prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>jsonutils.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
try:
import xmlrpclib
except ImportError:
# NOTE(jd): xmlrpclib is not shipped with Python 3
xmlrpclib = None
import six
from keystoneclient.openstack.common import gettextutils
from keystoneclient.openstack.common import importutils
from keystoneclient.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,<|fim▁hole|> return dict((k, recursive(v)) for k, v in value.iteritems())
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if xmlrpclib and isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)<|fim▁end|> | level=level,
max_depth=max_depth)
if isinstance(value, dict): |
<|file_name|>Institution.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2015 Unicon (R) Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*******************************************************************************/
/**
*
*/
package org.apereo.lai;
import java.io.Serializable;
<|fim▁hole|> *
*/
public interface Institution extends Serializable {
String getName();
String getKey();
String getSecret();
}<|fim▁end|> | /**
* @author ggilbert |
<|file_name|>update_default_servers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# This script prints a new "servers.json" to stdout.
# It prunes the offline servers from the existing list (note: run with Tor proxy to keep .onions),
# and adds new servers from provided file(s) of candidate servers.
# A file of new candidate servers can be created via e.g.:
# $ ./electrum_ltc/scripts/servers.py > reply.txt
import asyncio
import sys
import json
from electrum_ltc.network import Network
from electrum_ltc.util import create_and_start_event_loop, log_exceptions
from electrum_ltc.simple_config import SimpleConfig
from electrum_ltc import constants<|fim▁hole|>
try:
fname1 = sys.argv[1]
fname2 = sys.argv[2] if len(sys.argv) > 2 else None
except Exception:
print("usage: update_default_servers.py <file1> [<file2>]")
print(" - the file(s) should contain json hostmaps for new servers to be added")
print(" - if two files are provided, their intersection is used (peers found in both).\n"
" file1 should have the newer data.")
sys.exit(1)
def get_newly_added_servers(fname1, fname2=None):
with open(fname1) as f:
res_hostmap = json.loads(f.read())
if fname2 is not None:
with open(fname2) as f:
dict2 = json.loads(f.read())
common_set = set.intersection(set(res_hostmap), set(dict2))
res_hostmap = {k: v for k, v in res_hostmap.items() if k in common_set}
return res_hostmap
# testnet?
#constants.set_testnet()
config = SimpleConfig({'testnet': False})
loop, stopping_fut, loop_thread = create_and_start_event_loop()
network = Network(config)
network.start()
@log_exceptions
async def f():
try:
# prune existing servers
old_servers_all = constants.net.DEFAULT_SERVERS
old_servers_online = await network.prune_offline_servers(constants.net.DEFAULT_SERVERS)
# add new servers
newly_added_servers = get_newly_added_servers(fname1, fname2)
res_servers = {**old_servers_online, **newly_added_servers}
print(json.dumps(res_servers, indent=4, sort_keys=True))
print(f"got reply from {len(old_servers_online)}/{len(old_servers_all)} old servers", file=sys.stderr)
print(f"len(newly_added_servers)={len(newly_added_servers)}. total: {len(res_servers)}", file=sys.stderr)
finally:
stopping_fut.set_result(1)
asyncio.run_coroutine_threadsafe(f(), loop)<|fim▁end|> | |
<|file_name|>setup_verify.go<|end_file_name|><|fim▁begin|>package ovs
import (
"time"
ovs "github.com/docker/libnetwork/drivers/ovs/ovsdbdriver"
"github.com/vishvananda/netlink"
)<|fim▁hole|> var (
maxRetry int = 3
found = false
)
for retry := 1; retry <= maxRetry; retry++ {
_, err := netlink.LinkByName(config.BridgeName)
if err == nil {
found = true
break
}
time.Sleep(2 * time.Second)
}
if !found {
return &ErrNotFoundAfterMaxRetry{maxRetry: maxRetry}
}
return nil
}<|fim▁end|> |
// Check whether the bridge interface exist
func setupVerifyInterface(_ *ovs.OvsdbDriver, config *networkConfiguration) error { |
<|file_name|>v1_0_0.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2006-2012 by OpenLayers Contributors (see authors.txt for
* full list of contributors). Published under the 2-clause BSD license.
* See license.txt in the OpenLayers distribution or repository for the
* full text of the license. */
/**
* @requires OpenLayers/Format/SLD/v1.js
* @requires OpenLayers/Format/Filter/v1_0_0.js
*/
/**
* Class: OpenLayers.Format.SLD.v1_0_0
* Write SLD version 1.0.0.
*
* Inherits from:
* - <OpenLayers.Format.SLD.v1>
*/
OpenLayers.Format.SLD.v1_0_0 = OpenLayers.Class(
OpenLayers.Format.SLD.v1, {
<|fim▁hole|> * Constant: VERSION
* {String} 1.0.0
*/
VERSION: "1.0.0",
/**
* Property: schemaLocation
* {String} http://www.opengis.net/sld
* http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd
*/
schemaLocation: "http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd",
/**
* Constructor: OpenLayers.Format.SLD.v1_0_0
* Instances of this class are not created directly. Use the
* <OpenLayers.Format.SLD> constructor instead.
*
* Parameters:
* options - {Object} An optional object whose properties will be set on
* this instance.
*/
CLASS_NAME: "OpenLayers.Format.SLD.v1_0_0"
});<|fim▁end|> | /**
|
<|file_name|>rational.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ******************************************************************************
#
# Copyright (C) 2008-2010 Olivier Tilloy <[email protected]>
#
# This file is part of the pyexiv2 distribution.
#
# pyexiv2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# pyexiv2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyexiv2; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, 5th Floor, Boston, MA 02110-1301 USA.
#
# Author: Olivier Tilloy <[email protected]>
#
# ******************************************************************************
import unittest
from pyexiv2.utils import Rational
class TestRational(unittest.TestCase):
def test_constructor(self):
r = Rational(2, 1)
self.assertEqual(r.numerator, 2)
self.assertEqual(r.denominator, 1)
self.assertRaises(ZeroDivisionError, Rational, 1, 0)
def test_read_only(self):
r = Rational(3, 4)
try:
r.numerator = 5
except AttributeError:
pass
else:
self.fail('Numerator is not read-only.')
try:
r.denominator = 5
except AttributeError:
pass
else:
self.fail('Denominator is not read-only.')
def test_match_string(self):<|fim▁hole|> self.assertEqual(Rational.match_string('0/3'), (0, 3))
self.assertEqual(Rational.match_string('0/0'), (0, 0))
self.assertRaises(ValueError, Rational.match_string, '+3/5')
self.assertRaises(ValueError, Rational.match_string, '3 / 5')
self.assertRaises(ValueError, Rational.match_string, '3/-5')
self.assertRaises(ValueError, Rational.match_string, 'invalid')
def test_from_string(self):
self.assertEqual(Rational.from_string('4/3'), Rational(4, 3))
self.assertEqual(Rational.from_string('-4/3'), Rational(-4, 3))
self.assertRaises(ValueError, Rational.from_string, '+3/5')
self.assertRaises(ValueError, Rational.from_string, '3 / 5')
self.assertRaises(ValueError, Rational.from_string, '3/-5')
self.assertRaises(ValueError, Rational.from_string, 'invalid')
self.assertRaises(ZeroDivisionError, Rational.from_string, '1/0')
self.assertRaises(ZeroDivisionError, Rational.from_string, '0/0')
def test_to_string(self):
self.assertEqual(str(Rational(3, 5)), '3/5')
self.assertEqual(str(Rational(-3, 5)), '-3/5')
def test_repr(self):
self.assertEqual(repr(Rational(3, 5)), 'Rational(3, 5)')
self.assertEqual(repr(Rational(-3, 5)), 'Rational(-3, 5)')
self.assertEqual(repr(Rational(0, 3)), 'Rational(0, 3)')
def test_to_float(self):
self.assertEqual(Rational(3, 6).to_float(), 0.5)
self.assertEqual(Rational(11, 11).to_float(), 1.0)
self.assertEqual(Rational(-2, 8).to_float(), -0.25)
self.assertEqual(Rational(0, 3).to_float(), 0.0)
def test_equality(self):
r1 = Rational(2, 1)
r2 = Rational(2, 1)
r3 = Rational(8, 4)
r4 = Rational(3, 2)
self.assertEqual(r1, r2)
self.assertEqual(r1, r3)
self.assertNotEqual(r1, r4)<|fim▁end|> | self.assertEqual(Rational.match_string('4/3'), (4, 3))
self.assertEqual(Rational.match_string('-4/3'), (-4, 3)) |
<|file_name|>pipeP.js<|end_file_name|><|fim▁begin|>var assert = require('assert');
var Q = require('q');
var R = require('..');
describe('pipeP', function() {
function a(x) {return x + 'A';}
function b(x) {return x + 'B';}
it('handles promises', function() {
var plusOne = function(a) {return a + 1;};
var multAsync = function(a, b) {return Q.when(a * b);};
return R.pipeP(multAsync, plusOne)(2, 3)
.then(function(result) {
assert.strictEqual(result, 7);
});
});
it('returns a function with arity == leftmost argument', function() {
function a2(x, y) { void y; return 'A2'; }
function a3(x, y) { void y; return Q.when('A2'); }
function a4(x, y) { void y; return 'A2'; }
var f1 = R.pipeP(a, b);<|fim▁hole|> assert.strictEqual(f3.length, a3.length);
var f4 = R.pipeP(a4, b);
assert.strictEqual(f4.length, a4.length);
});
});<|fim▁end|> | assert.strictEqual(f1.length, a.length);
var f2 = R.pipeP(a2, b);
assert.strictEqual(f2.length, a2.length);
var f3 = R.pipeP(a3, b); |
<|file_name|>frontend.js<|end_file_name|><|fim▁begin|>/* Copyright 2018 Onestein
* License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). */
odoo.define('website_lazy_load_image.lazy_image_loader', function (require) {
'use strict';
var Class = require('web.Class');
var mixins = require('web.mixins');
/**
* Handles lazy loading of images.
*/
var LazyImageLoader = Class.extend(mixins.EventDispatcherMixin, {
/**
* The instance of the jQuery lazy loading plugin.
*
* @type {jQuery.lazy}
*/
plugin: null,
/**
* Use this to hook on the onFinishedAll of the lazy loading plugin
* of a specific instance of LazyImageLoader.
*
* @type {jQuery.Deferred}
*/
all_finished: null,
/**
* @class
* @param {String} selector The selector for the elements to lazy load.
*/
init: function (selector) {
mixins.EventDispatcherMixin.init.call(this);
this.all_finished = $.Deferred();
this.plugin = $(selector).data('loaded', false).lazy(
this._getPluginConfiguration()
);
},
/**
* Get the settings for the initialization of the lazy loading plugin.
*
* @private
* @returns {Object} Lazy loading plugin settings
*/
_getPluginConfiguration: function () {
return {
afterLoad: this._afterLoad.bind(this),
beforeLoad: this._beforeLoad.bind(this),
onError: this._onError.bind(this),
onFinishedAll: this._onFinishedAll.bind(this),
chainable: false,
};
},
/**
* Triggered by the beforeLoad event of the lazy loading plugin.
*
* @param {DOMElement} el
* @private
*/
_beforeLoad: function (el) {
this.trigger('beforeLoad', el);
},
/**
* Triggered by the afterLoad event of the lazy loading plugin.
*
* @param {DOMElement} el
* @private
*/
_afterLoad: function (el) {
this.trigger('afterLoad', el);
},
/**
* Triggered by the onError event of the lazy loading plugin.
*
* @param {DOMElement} el
* @private
*/
_onError: function (el) {
this.trigger('onError', el);
},
/**
* Triggered by the onFinished event of the lazy loading plugin.
*
* @private
*/
_onFinishedAll: function () {
this.all_finished.resolve();
this.trigger('onFinishedAll');<|fim▁hole|>
require('web.dom_ready');
var lazy_image_loader = new LazyImageLoader(
'#wrapwrap > main img:not(.lazyload-disable), ' +
'#wrapwrap > footer img:not(.lazyload-disable)'
);
return {
LazyImageLoader: LazyImageLoader,
lazy_image_loader: lazy_image_loader,
};
});<|fim▁end|> | },
}); |
<|file_name|>BaseFragment.java<|end_file_name|><|fim▁begin|>package io.kaif.mobile.app;
import rx.Observable;
import rx.android.app.support.RxFragment;
import rx.android.lifecycle.LifecycleObservable;
import rx.android.schedulers.AndroidSchedulers;
<|fim▁hole|> protected <T> Observable<T> bind(Observable<T> observable) {
return LifecycleObservable.bindFragmentLifecycle(lifecycle(),
observable.observeOn(AndroidSchedulers.mainThread()));
}
}<|fim▁end|> | public class BaseFragment extends RxFragment {
|
<|file_name|>one_device_strategy.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A tf.distribute.Strategy for running on a single device."""
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import input_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute.v1 import input_lib as input_lib_v1
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
# TODO(josh11b): Do we wrap values in types to generate errors if you are
# doing something that won't work with other DistributionStrategy
# implementations?
@tf_export("distribute.OneDeviceStrategy", v1=[])
class OneDeviceStrategy(distribute_lib.Strategy):
"""A distribution strategy for running on a single device.
Using this strategy will place any variables created in its scope on the
specified device. Input distributed through this strategy will be
prefetched to the specified device. Moreover, any functions called via
`strategy.run` will also be placed on the specified device
as well.
Typical usage of this strategy could be testing your code with the
tf.distribute.Strategy API before switching to other strategies which
actually distribute to multiple devices/machines.
For example:
```
strategy = tf.distribute.OneDeviceStrategy(device="/gpu:0")
with strategy.scope():
v = tf.Variable(1.0)
print(v.device) # /job:localhost/replica:0/task:0/device:GPU:0
def step_fn(x):
return x * 2
result = 0
for i in range(10):
result += strategy.run(step_fn, args=(i,))
print(result) # 90
```
"""
def __init__(self, device):
"""Creates a `OneDeviceStrategy`.
Args:
device: Device string identifier for the device on which the variables
should be placed. See class docs for more details on how the device is
used. Examples: "/cpu:0", "/gpu:0", "/device:CPU:0", "/device:GPU:0"
"""
super(OneDeviceStrategy, self).__init__(OneDeviceExtended(self, device))
distribute_lib.distribution_strategy_gauge.get_cell("V2").set(
"OneDeviceStrategy")
def experimental_distribute_dataset(self, dataset, options=None): # pylint: disable=useless-super-delegation
"""Distributes a tf.data.Dataset instance provided via dataset.
In this case, there is only one device, so this is only a thin wrapper
around the input dataset. It will, however, prefetch the input data to the
specified device. The returned distributed dataset can be iterated over
similar to how regular datasets can.
NOTE: Currently, the user cannot add any more transformations to a
distributed dataset.
Example:
```
strategy = tf.distribute.OneDeviceStrategy()
dataset = tf.data.Dataset.range(10).batch(2)
dist_dataset = strategy.experimental_distribute_dataset(dataset)
for x in dist_dataset:
print(x) # [0, 1], [2, 3],...
```
Args:
dataset: `tf.data.Dataset` to be prefetched to device.
options: `tf.distribute.InputOptions` used to control options on how this
dataset is distributed.
Returns:
A "distributed `Dataset`" that the caller can iterate over.
"""
return super(OneDeviceStrategy, self).experimental_distribute_dataset(
dataset, options)
def distribute_datasets_from_function(
self,
dataset_fn, # pylint: disable=useless-super-delegation
options=None):
"""Distributes `tf.data.Dataset` instances created by calls to `dataset_fn`.
`dataset_fn` will be called once for each worker in the strategy. In this
case, we only have one worker and one device so `dataset_fn` is called
once.
The `dataset_fn` should take an `tf.distribute.InputContext` instance where
information about batching and input replication can be accessed:
```
def dataset_fn(input_context):
batch_size = input_context.get_per_replica_batch_size(global_batch_size)
d = tf.data.Dataset.from_tensors([[1.]]).repeat().batch(batch_size)
return d.shard(
input_context.num_input_pipelines, input_context.input_pipeline_id)
inputs = strategy.distribute_datasets_from_function(dataset_fn)
for batch in inputs:
replica_results = strategy.run(replica_fn, args=(batch,))
```
IMPORTANT: The `tf.data.Dataset` returned by `dataset_fn` should have a
per-replica batch size, unlike `experimental_distribute_dataset`, which uses
the global batch size. This may be computed using
`input_context.get_per_replica_batch_size`.
Args:
dataset_fn: A function taking a `tf.distribute.InputContext` instance and
returning a `tf.data.Dataset`.
options: `tf.distribute.InputOptions` used to control options on how this
dataset is distributed.
Returns:
A "distributed `Dataset`", which the caller can iterate over like regular
datasets.
"""
return super(OneDeviceStrategy,
self).distribute_datasets_from_function(dataset_fn, options)
def experimental_local_results(self, value): # pylint: disable=useless-super-delegation
"""Returns the list of all local per-replica values contained in `value`.
In `OneDeviceStrategy`, the `value` is always expected to be a single
value, so the result is just the value in a tuple.
Args:
value: A value returned by `experimental_run()`, `run()`,
`extended.call_for_each_replica()`, or a variable created in `scope`.
Returns:
A tuple of values contained in `value`. If `value` represents a single
value, this returns `(value,).`
"""
return super(OneDeviceStrategy, self).experimental_local_results(value)
def run(self, fn, args=(), kwargs=None, options=None): # pylint: disable=useless-super-delegation
"""Run `fn` on each replica, with the given arguments.
In `OneDeviceStrategy`, `fn` is simply called within a device scope for the
given device, with the provided arguments.
Args:
fn: The function to run. The output must be a `tf.nest` of `Tensor`s.
args: (Optional) Positional arguments to `fn`.
kwargs: (Optional) Keyword arguments to `fn`.
options: (Optional) An instance of `tf.distribute.RunOptions` specifying
the options to run `fn`.
Returns:
Return value from running `fn`.
"""
return super(OneDeviceStrategy, self).run(fn, args, kwargs, options)
def reduce(self, reduce_op, value, axis): # pylint: disable=useless-super-delegation
"""Reduce `value` across replicas.
In `OneDeviceStrategy`, there is only one replica, so if axis=None, value
is simply returned. If axis is specified as something other than None,
such as axis=0, value is reduced along that axis and returned.
Example:
```
t = tf.range(10)
result = strategy.reduce(tf.distribute.ReduceOp.SUM, t, axis=None).numpy()
# result: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
result = strategy.reduce(tf.distribute.ReduceOp.SUM, t, axis=0).numpy()
# result: 45
```
Args:
reduce_op: A `tf.distribute.ReduceOp` value specifying how values should
be combined.
value: A "per replica" value, e.g. returned by `run` to
be combined into a single tensor.
axis: Specifies the dimension to reduce along within each
replica's tensor. Should typically be set to the batch dimension, or
`None` to only reduce across replicas (e.g. if the tensor has no batch
dimension).
Returns:
A `Tensor`.
"""
return super(OneDeviceStrategy, self).reduce(reduce_op, value, axis)
def scope(self): # pylint: disable=useless-super-delegation
"""Returns a context manager selecting this Strategy as current.
Inside a `with strategy.scope():` code block, this thread
will use a variable creator set by `strategy`, and will
enter its "cross-replica context".
In `OneDeviceStrategy`, all variables created inside `strategy.scope()`
will be on `device` specified at strategy construction time.
See example in the docs for this class.
Returns:
A context manager to use for creating variables with this strategy.
"""
return super(OneDeviceStrategy, self).scope()
@tf_export(v1=["distribute.OneDeviceStrategy"]) # pylint: disable=empty-docstring
class OneDeviceStrategyV1(distribute_lib.StrategyV1):
__doc__ = OneDeviceStrategy.__doc__.replace(
"For example:\n ```",
"For example:\n ```\n tf.enable_eager_execution()")
def __init__(self, device):
super(OneDeviceStrategyV1, self).__init__(OneDeviceExtended(self, device))
distribute_lib.distribution_strategy_gauge.get_cell("V1").set(
"OneDeviceStrategy")
__init__.__doc__ = OneDeviceStrategy.__init__.__doc__
# TODO(josh11b): Switch to V2 after callers have been updated to only V2 APIs.
class OneDeviceExtended(distribute_lib.StrategyExtendedV1):
"""Implementation of OneDeviceStrategy."""
def __init__(self, container_strategy, device):
super(OneDeviceExtended, self).__init__(container_strategy)
self._device = device_util.resolve(device)
self._input_device = device_util.get_host_for_device(self._device)
def _input_workers_with_options(self, options=None):
if not options or options.experimental_fetch_to_device:
return input_lib.InputWorkers([(self._input_device, (self._device,))])
else:
return input_lib.InputWorkers([(self._input_device,
(self._input_device,))])
@property
def _input_workers(self):
return self._input_workers_with_options()
def _create_variable(self, next_creator, **kwargs):
colocate_with = kwargs.pop("colocate_with", None)
if colocate_with is None:
with ops.device(self._device):
return next_creator(**kwargs)
elif isinstance(colocate_with, numpy_dataset.SingleDevice):
with ops.device(colocate_with.device):
return next_creator(**kwargs)
else:
with ops.colocate_with(colocate_with):
return next_creator(**kwargs)
def _validate_colocate_with_variable(self, colocate_with_variable):
distribute_utils.validate_colocate(colocate_with_variable, self)
def _make_dataset_iterator(self, dataset):
"""Make iterator from dataset without splitting the batch."""
# Note that split_batch_by argument is not passed because it is always 1 in
# this strategy, and adding it adds unnecessary overhead to the dataset.
return input_lib_v1.DatasetIterator(dataset, self._input_workers,
self._container_strategy())
def _make_input_fn_iterator(
self,
input_fn,
replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
return input_lib_v1.InputFunctionIterator(input_fn, self._input_workers,
[distribute_lib.InputContext()],
self._container_strategy())
def _experimental_make_numpy_dataset(self, numpy_input, session):
return numpy_dataset.one_host_numpy_dataset(
numpy_input, numpy_dataset.SingleDevice(self._input_device), session)
def _broadcast_to(self, tensor, destinations):
del destinations
return tensor
def _experimental_distribute_dataset(self, dataset, options):
# Note that split_batch_by argument is not passed because it is always 1 in
# this strategy, and adding it adds unnecessary overhead to the dataset.
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`experimental_distribute_datasets_from_function`."
)
return input_util.get_distributed_dataset(
dataset,
self._input_workers_with_options(options),
self._container_strategy(),
options=options)
def _distribute_datasets_from_function(self, dataset_fn, options):
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`experimental_distribute_datasets_from_function` "
"of tf.distribute.MirroredStrategy")
return input_util.get_distributed_datasets_from_function(
dataset_fn,
self._input_workers_with_options(options),
[distribute_lib.InputContext()],
self._container_strategy(),
options=options)
def _experimental_distribute_values_from_function(self, value_fn):
# TODO(b/137795644): This should return a PerReplica value but other
# methods like run in OneDeviceStrategy need to be modified
# to do the same.
return value_fn(distribute_lib.ValueContext())
# TODO(priyag): Deal with OutOfRange errors once b/111349762 is fixed.
def _experimental_run_steps_on_iterator(self, fn, iterator, iterations,
initial_loop_values=None):
if initial_loop_values is None:
initial_loop_values = {}
initial_loop_values = nest.flatten(initial_loop_values)
ctx = input_lib.MultiStepContext()
def body(i, *args):
"""A wrapper around `fn` to create the while loop body."""
del args
fn_result = fn(ctx, iterator.get_next())
flat_last_step_outputs = nest.flatten(ctx.last_step_outputs)
with ops.control_dependencies([fn_result]):
return [i + 1] + flat_last_step_outputs
# We capture the control_flow_context at this point, before we run `fn`
# inside a while_loop. This is useful in cases where we might need to exit
# these contexts and get back to the outer context to do some things, for
# e.g. create an op which should be evaluated only once at the end of the
# loop on the host. One such usage is in creating metrics' value op.
self._outer_control_flow_context = (
ops.get_default_graph()._get_control_flow_context()) # pylint: disable=protected-access
# TODO(priyag): Use max_iterations instead of an explicit counter.
cond = lambda i, *args: i < iterations
i = constant_op.constant(0)
loop_result = control_flow_ops.while_loop(
cond, body, [i] + initial_loop_values, name="",
parallel_iterations=1, back_prop=False, swap_memory=False,
return_same_structure=True)
del self._outer_control_flow_context
ctx.run_op = control_flow_ops.group(loop_result)
# Convert the last_step_outputs from a list to the original dict structure
# of last_step_outputs.
last_step_tensor_outputs = loop_result[1:]
last_step_tensor_outputs_dict = nest.pack_sequence_as(
ctx.last_step_outputs, last_step_tensor_outputs)
ctx._set_last_step_outputs(last_step_tensor_outputs_dict) # pylint: disable=protected-access
return ctx
def _call_for_each_replica(self, fn, args, kwargs):
strategy = self._container_strategy()
with ops.device(self._device), _OneDeviceReplicaContext(strategy):
return fn(*args, **kwargs)
def _reduce_to(self, reduce_op, value, destinations, options):
del reduce_op, destinations, options
return value
def _gather_to_implementation(self, value, destinations, axis, options):
del destinations, axis, options<|fim▁hole|> # The implementations of _update() and _update_non_slot() are identical
# except _update() passes `var` as the first argument to `fn()`.
return self._update_non_slot(var, fn, (var,) + tuple(args), kwargs, group)
def _update_non_slot(self, colocate_with, fn, args, kwargs, group):
del colocate_with
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
result = fn(*args, **kwargs)
if group:
return result
else:
return nest.map_structure(self._local_results, result)
def read_var(self, replica_local_var):
"""Read the aggregate value of a replica-local variable."""
return array_ops.identity(replica_local_var)
def _local_results(self, value):
return (value,)
def value_container(self, value):
return value
def _in_multi_worker_mode(self):
"""Whether this strategy indicates working in multi-worker settings."""
return False
@property
def _num_replicas_in_sync(self):
return 1
@property
def worker_devices(self):
return (self._device,)
@property
def parameter_devices(self):
return (self._device,)
def non_slot_devices(self, var_list):
del var_list
return (self._device,)
@property
def experimental_should_init(self):
return True
@property
def experimental_between_graph(self):
return False
@property
def should_checkpoint(self):
return True
@property
def should_save_summary(self):
return True
# TODO(priyag): Delete this once all strategies use global batch size.
@property
def _global_batch_size(self):
"""Global and per-replica batching are equivalent for OneDeviceStrategy."""
return True
@property
def _support_per_replica_values(self):
return False
def _get_local_replica_id(self, replica_id_in_sync_group):
return replica_id_in_sync_group
class _OneDeviceReplicaContext(distribute_lib.ReplicaContext):
"""ReplicaContext for OneDeviceStrategy."""
def __init__(self, strategy):
distribute_lib.ReplicaContext.__init__(
self, strategy, replica_id_in_sync_group=0)
@property
def devices(self):
return self._strategy.extended.worker_devices<|fim▁end|> | return value
def _update(self, var, fn, args, kwargs, group): |
<|file_name|>CSNHandler.java<|end_file_name|><|fim▁begin|>package service.handler.twostep;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.Socket;
import java.security.KeyPair;
import java.security.PublicKey;
import java.security.SignatureException;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import message.Operation;
import message.twostep.csn.*;
import service.Config;
import service.Key;
import service.KeyManager;<|fim▁hole|>import service.handler.ConnectionHandler;
import utility.Utils;
/**
*
* @author Scott
*/
public class CSNHandler extends ConnectionHandler {
public static final File ATTESTATION;
private static final ReentrantLock LOCK;
private static int CSN;
static {
ATTESTATION = new File(Config.ATTESTATION_DIR_PATH + "/service-provider/csn");
LOCK = new ReentrantLock();
CSN = 0;
}
public CSNHandler(Socket socket, KeyPair keyPair) {
super(socket, keyPair);
}
@Override
protected void handle(DataOutputStream out, DataInputStream in)
throws SignatureException, IllegalAccessException {
PublicKey clientPubKey = KeyManager.getInstance().getPublicKey(Key.CLIENT);
try {
Request req = Request.parse(Utils.receive(in));
LOCK.lock();
if (!req.validate(clientPubKey)) {
throw new SignatureException("REQ validation failure");
}
String result;
Operation op = req.getOperation();
File file = new File(Config.DATA_DIR_PATH + '/' + op.getPath());
boolean sendFileAfterAck = false;
if (req.getConsecutiveSequenceNumber() == CSN + 1) {
CSN += 1;
switch (op.getType()) {
case UPLOAD:
file = new File(Config.DOWNLOADS_DIR_PATH + '/' + op.getPath());
Utils.receive(in, file);
String digest = Utils.digest(file);
if (op.getMessage().compareTo(digest) == 0) {
result = "ok";
} else {
result = "upload fail";
}
Utils.writeDigest(file.getPath(), digest);
break;
case AUDIT:
file = new File(op.getPath());
result = Utils.readDigest(file.getPath());
sendFileAfterAck = true;
break;
case DOWNLOAD:
result = Utils.readDigest(file.getPath());
sendFileAfterAck = true;
break;
default:
result = "operation type mismatch";
}
} else {
result = "CSN mismatch";
}
Acknowledgement ack = new Acknowledgement(result, req);
ack.sign(keyPair);
Utils.send(out, ack.toString());
if (sendFileAfterAck) {
Utils.send(out, file);
}
Utils.appendAndDigest(ATTESTATION, ack.toString() + '\n');
} finally {
if (LOCK != null) {
LOCK.unlock();
}
}
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
.. module: FSRStools.rraman
:platform: Windows
.. moduleauthor:: Daniel Dietze <[email protected]>
Resonance Raman excitation profile calculation based on the time-domain picture of resonance Raman. See Myers and Mathies in *Biological Applications of Raman Spectroscopy*, Vol. 2, pp. 1-58 (John Wiley and Sons, New York, 1987) for details (referred to as Myers in the following). The code is mainly based on Myers' Fortran 77 code (see Appendix of PhD Thesis of K. M. Spillane, 2011, UC Berkeley for source code).
**Changelog:**
*10-7-2015:*
- Added / modified functions for calculating fluorescence spectra.
- Added a convenience function to calculate Raman spectra from a set of excitation profiles.
- Added some more damping functions and phenomenological support for Stokes shift in simple homogeneous damping function.
*10-21-2015:*
- Some bug fixes concerning the prefactors and the normalization of the fluorescence spectra.
- Fixed a bug regarding the Raman overlaps.
**Example Code**
Here is a short example calculating Myers' *Gedankenmolecule* from Myers and Mathies::
import numpy as np
import FSRStools.rraman as rr
# parameters:
# -----------
# displacements
D = np.array([1.27, 0.3, 0.7, 0.53])
# ground state frequencies
RMg = np.array([1550.0, 1300.0, 1150.0, 1000.0])
# excited state frequencies
RMe = np.array([1550.0, 1300.0, 1150.0, 1000.0])
# electronic zero-zero energy
E0 = 20700.0
# homogeneous linewidth and shape parameter
Gamma = 200.0
halpha = 0
# inhomogeneous linewidth and shape parameter
sig = 400.0
ialpha = 1
# electronic transition dipole length
M = 0.8
# index of refraction of surrounding medium
IOR = 1.0
# time axis parameters for integrations
tmax = 5000
dt = 0.2
# just calculate fundamentals
nquanta = np.identity(len(RMg))
sshift = np.dot(nquanta, RMg)
# calculation part
# ----------------
# create axes
t, wn = rr.getAxes(tmax, dt)
# zero-zero energy and damping
# add here all time domain stuff
TDpart = rr.getHomogeneousDamping(t, Gamma, halpha)
# time dependent overlap integrals
OVLPS = rr.getOverlaps(t, D, RMg, RMe, nquanta)
# calculate cross-sections
sigmaA, sigmaR, kF = rr.getCrossSections(t, wn, E0, OVLPS, sshift, M, IOR, TDpart, sig, ialpha)
..
This file is part of the FSRStools python module.
The FSRStools python module is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The FSRStools python module is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the FSRStools python module. If not, see <http://www.gnu.org/licenses/>.
Copyright 2014, 2015 Daniel Dietze <[email protected]>.
"""
import numpy as np
# some constants
hbar = 5308.880986 #: Planck's constant over 2 pi, hbar, in `cm-1 fs`
c0 = 2.99792458e-5 #: speed of light in `cm / fs`
kB = 0.695 #: Boltzman's constant in `cm-1 / K`
# -------------------------------------------------------------------------------------------------------------------
# some useful functions
def radperfs2wn(w):
"""Angular frequency (rad / fs) to wavenumber (cm-1).
"""
return hbar * w
def wn2radperfs(e):
"""Wavenumber (cm-1) to angular frequency (rad / fs).
"""
return e / hbar
def wn2lambda(w):
"""Convert wavenumber (cm-1) to wavelength (nm).
"""
return 1e7 / w
def lambda2wn(w):
"""Convert wavelength (nm) to wavenumber (cm-1).
"""
return 1e7 / w
def getWnIndex(wn, wn0):
"""Get the index into an array of wavenumbers wn with wavenumber closest to wn0. Use this function for :py:func:`getRamanSpectrum`.
"""
if np.amin(wn) > wn0 or np.amax(wn) < wn0:
print "Warning: wn0 lies outside of wn."
return np.argmin(np.absolute(wn - wn0))
def getAxes(tmax, dt):
"""Create time and frequency axes for the resonance Raman calculations.
:param float tmax: Endpoint for time domain calculation (fs). This value should be high enough to capture the full dephasing.
:param float dt: Increment of time axis (fs). This value should be small enough to capture the highest vibronic feature in the excited state.
:returns: Time axis (fs) and frequency axis (cm-1).
"""
t = np.arange(0, tmax + dt, dt)
numPoints = len(t)
wn = np.arange(numPoints) / (c0 * dt * numPoints)
return t, wn
def molarExtinction2AbsCS(eSpctr, IOR):
"""Convert molar extinction (cm-1 / M) to molecular absorption cross section (A**2 / molec).
See McHale, Resonance Raman Spectroscopy, Wiley, (2002), p. 545 or Myers & Mathies for details. The absorption cross section in solution has to be scaled by index of refraction unless the molar extinction has not been corrected.
:param array eSpctr: Extinction spectrum in (cm-1 / M).
:param float IOR: Index of refraction of surrounding solvent / medium.
:returns: Absorption spectrum in units of (A**2 / molec.), same shape as eSpcrt.
"""
return 1e3 * np.log(10.0) * eSpctr / 6.0221e23 * 1e8 * 1e8 / IOR
def diff2absRamanCS(diffRaCS, rho):
"""Convert the differential Raman cross section (A**2/molec sr) to absolute Raman cross section in (A**2 / molec) for a given depolarization ratio rho.
:param float diffRaCS: Differential Raman cross section (A**2/molec sr).
:param float rho: Associated depolarization ratio of this Raman mode.
:returns: Absolute Raman cross section in (A**2 / molec).
"""
return 8.0 * np.pi / 3.0 * (1.0 + 2.0 * rho) / (1.0 + rho) * diffRaCS
def getRamanSpectrum(wn, iEL, RMg, nquanta, sigmaR, dw=10.0, alpha=0):
"""
Convenience function to calculate the Raman spectrum. The spectrum is scattered power per infinitesimal frequency normalized to incident power times molecular density (cm-3) times path length (cm). See Myers, *Chem. Phys.* **180**, 215 (1994), Eq. 7 for details.
:param array wn: Wavenumber axis (Stokes shift, not electronic).
:param int iEL: Index into sigmaR corresponding to the pump energy of the laser.
:param array RMg: Ground state Raman frequencies
:param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0, 1, 2.
:param array sigmaR: Array of M Raman cross sections that have been calculated by :py:func:`getCrossSections` (in A**2 / molec).
:param float dw: Phenomenological FWHM linewidth of the Raman lines in cm-1 (default = 10 cm-1).
:param float alpha: Line shape parameter to be used for the Raman spectrum:
- 1 = Gaussian
- 0 = Lorentzian (default)
:returns: Calculated Raman spectrum (same shape as wn).
"""
spectrum = np.zeros(len(wn))
if iEL < 0 or iEL >= len(sigmaR[0]):
print "Error: iEL is out of range!"
return spectrum
# iterate over all M modes
for i, nM in enumerate(nquanta):
# get frequency of this mode
wR = np.sum(nM * RMg)
# add Lorentzian part of lineshape
spectrum = spectrum + (1.0 - alpha) * sigmaR[i][iEL] * 1e-16 * (dw / (2.0 * np.pi * ((wn - wR)**2 + dw**2 / 4.0)))
# add Gaussian part of lineshape
spectrum = spectrum + alpha * sigmaR[i][iEL] * 1e-16 * ((2.0 * np.sqrt(np.log(2) / np.pi)) / dw * np.exp(-4.0 * np.log(2.0) * (wn - wR)**2 / dw**2))
return spectrum
# -----------------------------------------------------------------------------------------------------------------------------------
# time dependent overlap integrals with equal ground and excited state vibrational frequencies
# the t00 overlap does not contain the factors exp(-1j wVIB t) nor exp(-1j E0/hbar t) as these are taken care of when assembling the cross section
# Myers eqs. (37) - (39)
# Delta = displacement in dimensionless coordinates
# eVIB = vibrational frequency (cm-1)
# t = time axis in fs
def t00A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
# The 0-0 overlap does not contain the factors :math:`e^{-j w_{VIB} t}` nor :math:`e^{-j E_0 / \\hbar t}` as these are taken care of when assembling the cross section.
return np.exp(-Delta**2 / 2.0 * (1.0 - np.exp(-1j * eVIB / hbar * t)))
def t10A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
return Delta / np.sqrt(2) * (np.exp(-1j * eVIB / hbar * t) - 1.0) # * t00A(t, Delta, eVIB)
def t20A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
return Delta**2 / (2 * np.sqrt(2)) * (np.exp(-1j * eVIB / hbar * t) - 1.0)**2 # * t00A(t, Delta, eVIB)
# -------------------------------------------------------------------------------------------------------------------------------------------------
# same with different frequency in ground and excited state
# Myers eqs. (42) - (44)
# Delta = displacement in dimensionless coordinates
# eg = ground state vibrational frequency (cm-1)
# ee = excited state vibrational frequency (cm-1)
# t = time axis in fs
def t00B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
# the log reduces to 0.5 * eg / hbar * t when eg = ee
# this is the factor that is taken out in the t00A case, as it cancels with the exp in the integral later on
# however, np.log returns values such that -pi < arg(log(..)) < pi
gt = 1j / 2.0 * np.log(1j * wg / we * swe + cwe) + pt * (qt - Delta) / 2.0 # skip -E0 t / hbar
# gt = gt + wg * t / 2.0 # add +w t / 2 using ground state frequency as this compensates the -w t / 2.0 term coming from the FFT
# add the following term to recover t00A for eg = ee
gt = gt - 1j / 2.0 * np.log(1j * np.sin(wg * t) + np.cos(wg * t))
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
gp = 1j * at * qt**2 - pt * qt + gt
return a**(-0.5) * np.exp(-pp**2 / (4.0 * a)) * np.exp(1j * gp)
def t10B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
return 2**(-0.5) * pp / (1j * a) # * t00B(t, Delta, eg, ee)
def t20B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
return -8**(-0.5) * (pp**2 / a**2 + 2. * (1. - 1. / a)) # * t00B(t, Delta, eg, ee)
# ----------------------------------------------------------------------------------------------------------------------------------
# same for linear dissociative excited state surfaces
# Myers eqs. (52) - (54)
# beta = slope of potential energy surface (dV / dq) in cm-1 (q is dimensionless coordinate)
# eVIB = vibrational frequency (cm-1)
def t00D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
<|fim▁hole|>
tmp = (1.0 + 1j * eVIB / hbar * t / 2.0)**(-0.5) * np.exp(-beta**2 * (6 * t**2 + 1j * eVIB / hbar * t**3) / (24 * hbar**2))
tmp = tmp * np.exp(1j * eVIB / hbar * t / 2.0) # add this term to compensate for the -1j w t / 2 term coming from the FFt
return tmp
def t10D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
"""
return -1j * 2**(-0.5) * (beta * t / hbar) # * t00D(t, beta, eVIB)
def t20D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
"""
return -2**(-0.5) * (beta**2 * t**2 / (2.0 * hbar**2) - 1j * eVIB / hbar * t / (2.0 + 1j * eVIB / hbar * t)) # * t00D(t, beta, eVIB)
# ---------------------------------------------------------------------------------------------------------------------------------
def getOverlaps(t, D, RMg, RMe, nquanta):
"""Calculate the time dependent overlap integrals / Franck-Condon factors :math:`<i|i(t)>_k` and :math:`<f|i(t)>_k`.
.. versionchanged:: 10-07-2015
Format of return value changed.
:param array t: Time axis in (fs).
:param array D: Array of N normalized displacements of excited state surfaces (deltas), or slope of linear dissociative excited state surface.
:param array RMg: N Raman ground state frequencies (cm-1).
:param array RMe: N Raman excited state frequencies (cm-1) or -1 if excited state surface is dissociative.
:param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0 (no excitation), 1 (fundamental), 2 (first overtone).
:returns: M + 2 - dimensional array containing the Rayleigh, fluorescence and M Raman overlaps.
"""
ovlps = []
N = len(D)
M = nquanta.shape[0]
# Frank-Condon factors <i|i(t)>_k and <f|i(t)>_k
FC0 = []
FC0p = []
FC1 = []
FC2 = []
for i in range(N):
if(RMg[i] == RMe[i]):
FC0.append(t00A(t, D[i], RMg[i]))
FC0p.append(FC0[-1]) # fluorescence overlap is identical to absorption overlap when frequencies are equal
FC1.append(t10A(t, D[i], RMg[i]))
FC2.append(t20A(t, D[i], RMg[i]))
elif(RMe[i] == -1):
FC0.append(t00D(t, D[i], RMg[i]))
FC0p.append(np.zeros(len(t))) # fluorescence is negligible from dissociative surface
FC1.append(t10D(t, D[i], RMg[i]))
FC2.append(t20D(t, D[i], RMg[i]))
else:
FC0.append(t00B(t, D[i], RMg[i], RMe[i]))
FC0p.append(t00B(t, D[i], RMe[i], RMg[i])) # fluorescence overlap has excited state and ground state Raman frequencies switched
FC1.append(t10B(t, D[i], RMg[i], RMe[i]))
FC2.append(t20B(t, D[i], RMg[i], RMe[i]))
# go to numpy array..
FC0 = np.array(FC0)
FC0p = np.array(FC0p)
FC1 = np.array(FC1)
FC2 = np.array(FC2)
# Rayleigh / absorption overlap
oabs = 1.0 + 0.0 * 1j # reuse this term for the raman overlaps
for i in range(N):
oabs = oabs * FC0[i]
ovlps.append(oabs)
# fluorescence overlap
o = 1.0 + 0.0 * 1j
for i in range(N):
o = o * FC0p[i]
ovlps.append(o)
# actual Raman overlaps
for j in range(M):
o = 1.0 * oabs # all raman modes are based on this product and additional terms given by the excited modes
for i in range(N):
if(nquanta[j][i] == 1):
o = o * FC1[i]
elif(nquanta[j][i] == 2):
o = o * FC2[i]
ovlps.append(o)
return ovlps
# ---------------------------------------------------------------------------------------------------------------------------------
def getZeroZeroEnergy(t, E0):
"""Calculate the oscillation term in the time domain due to the electronic zero-zero energy E0.
:param array t: Time axis (fs).
:param float E0: Difference between excited and ground state vibrational ground state energies, *zero-zero energy* (cm-1).
"""
return np.exp(-1j * E0 / hbar * t)
# -----------------------------------------------------------------------------------------------------------------------------
# Calculate the damping terms as function of time t.
def getHomogeneousDamping(t, Gamma, alpha=0, lmbda=0):
"""Calculates the damping term arising from the homogeneous linewidth of the electronic transition. Offers phenomenological support for Stokes shift.
.. note:: Added phenomenological Stokes shift to input parameters on 10-12-2015. See for example *New J Phys* **11**, 015001 (2009), Eqs. (1) and (2).
:param array t: Time axis (fs).
:param float Gamma: Decay rate according to :math:`1 / \\tau` in (cm-1), where :math:`tau` is exponential dephasing time.
:param float alpha: Line shape parameter:
- 1 = Gaussian
- 0 = Lorentzian
:param float lmbda: Phenomenological Stokes shift (cm-1) which is added as imaginary part to g(t). Compared to the Brownian oscillator models, lmbda **is** the observed Stokes shift. (default = 0)
:returns: Damping term in the time domain, :math:`e^{-g(t) - i \lambda t / 2 \hbar}`.
"""
g = alpha * (Gamma**2 / hbar**2 * t**2) + (1 - alpha) * (Gamma / hbar * t) + 1j * lmbda / 2.0 * t / hbar
return np.exp(-g)
def getKuboDamping(t, Delta, Lambda):
"""Calculates the damping term using Kubo's *stochastic model*. This model describes the broadening, but does not yield solvent induced Stokes shifts.
:param array t: Time axis (fs).
:param float Delta: Magnitude of solvent energy gap fluctuations (cm-1). This parameter also controls the effective line shape:
- Delta >> Lambda = Lorentzian
- Delta << Lambda = Gaussian
:param float Lambda: Effective frequency of solvent fluctuations (cm-1).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997)
"""
return np.exp(-(Delta / Lambda)**2 * (np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0))
def getBrownianDamping(t, kappa, T, egamma, cutoff=1e-6):
"""Calculate the damping term using Mukamel's Brownian oscillator model based on Myers Fortran code. The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis (fs).
:param float kappa: Lineshape parameter:
- kappa >> 1 = Lorentzian,
- kappa << 1 = Gaussian.
:param float T: Temperature in K.
:param float egamma: Electronic homogeneous linewidth (**FWHM**, cm-1).
:param float cutoff: Cutoff for sum over Brownian oscillators. Typically between 1e-6 (default) and 1e-8. Check for convergence by re-running with different values.
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997)
"""
temp = np.absolute(T)
# ----------------------------------------------------------
# 1: derive Mukamel's parameters from kappa, temp and egamma
# I do not have a reference for this part - it's taken from Myers fortran code
# Boltzmann beta
beta = 1.0 / (kB * temp) # 1/cm-1
# some 'a' parameter (this comes from Myers Fortran program)
a = (2.355 + 1.76 * kappa) / (1.0 + 0.85 * kappa + 0.88 * kappa**2)
# these are Mukamel's parameters in Myers, J. Raman. Spec. 28, 389 (1997), eqs. (35) to (38)
Lambda = kappa * egamma / a # cm-1
lmbda = beta * (Lambda / kappa)**2 / 2.0 # cm-1
# ----------------------------------------------------------
# 2: calculate the sum over n Brownian oscillators
vs = np.zeros(len(t)) # this is the sum over the n oscillators as function of time in (cm-1)**-3
n = 0
while(True):
n = n + 1
vn = 2.0 * np.pi * n / beta # cm-1
vinc = (np.exp(-vn / hbar * t) + vn / hbar * t - 1) / (vn * (vn**2 - Lambda**2))
vs = vs + vinc
if(np.amax(np.absolute(vinc[1:] / vs[1:])) < cutoff): # the first element of vs is always 0
break
# ----------------------------------------------------------
# 3: calculate the damping function g(t)
gexp = np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0 # dimensionless
greal = (lmbda / Lambda) / np.tan(beta * Lambda / 2.0) * gexp # dimensionless
greal = greal + 4.0 * lmbda * Lambda / beta * vs # dimensionless
gimag = -(lmbda / Lambda) * gexp # dimensionless
g = greal + 1j * gimag # dimensionless
return np.exp(-g)
def getBrownianDamping2(t, lmbda, Lambda, T=298.0, cutoff=1e-6):
"""Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model.
The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis in fs.
:param float lmbda: Solvent contribution to reorganization energy (cm-1).
:param float Lambda: Inverse of characteristic time scale for solvent fluctuations (fs-1).
:param float T: Temperature (K, default = 298 K).
:param float cutoff: Cutoff value for summation over brownian oscillators (default 1e-6).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eqs. (10a) to (10d).
"""
beta = 1.0 / (kB * np.absolute(T))
lmb = lmbda / hbar # convert to fs-1
# calculate real part as sum over oscillators
gR = 0.0
i = 1.0
while(1):
nun = 2.0 * np.pi / (hbar * beta) * i # frequency of ith oscillator
dg = (np.exp(-nun * t) + nun * t - 1.0) / (nun * (nun**2 - Lambda**2))
gR = gR + dg
i = i + 1.0
if np.sum(np.absolute(np.dg)) / np.sum(np.absolute(gR)) < cutoff:
break
gR = gR * 4.0 * lmb * Lambda / (hbar * beta)
gR = gR + (lmb / Lambda) * np.cot(hbar * beta * Lambda / 2.0) * (np.exp(-Lambda * t) + Lambda * t - 1.0)
# calculate imaginary part = Stokes shift
gI = -(lmb / Lambda) * (np.exp(-Lambda * t) - 1.0)
# assemble
g = gR + 1j * gI # dimensionless
return np.exp(-g)
def getBrownianDampingSlowMod(t, lmbda, T=298.0):
"""Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model in the high-temperature and slow-modulation limit.
The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis in fs.
:param float lmbda: Solvent contribution to reorganization energy (cm-1).
:param float T: Temperature (K, default = 298 K).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eq. (11).
"""
lmb = lmbda / hbar # convert to fs-1
return np.exp(-(lmb * kB * np.absolute(T) * t**2 / hbar + 1j * lmb * t))
# ---------------------------------------------------------------------------------------------------------------------------------
#
def applyInhomogeneousBroadening(wn, y, sig, alpha=1):
"""Convolute a spectrum with a Gaussian/Lorentzian to account for inhomogeneous broadening.
:param array wn: Frequency axis in same units as sig (cm-1).
:param array y: Input spectrum, same shape as wn.
:param float sig: Width of convolution function in same units as x (standard deviation of Gaussian distribution). Must not be zero.
:param float alpha: Lineshape parameter:
- 1 = Gaussian,
- 0 = Lorentzian.
:returns: Convoluted spectrum (same shape as y).
"""
ck = alpha / (sig * np.sqrt(2 * np.pi)) * np.exp(-(wn - (wn[-1] + wn[0]) / 2.0)**2 / (2.0 * sig**2))
ck += (1 - alpha) * sig / (np.pi * ((wn - (wn[-1] + wn[0]) / 2)**2 + sig**2))
# np.convolve uses a sum, whereas the function we want uses an integral; wn[1] - wn[0] is dwn
return (wn[1] - wn[0]) * np.convolve(y, ck, 'same')
# --------------------------------------------------------------------------------------------------------------------------------
def prefA(eEL, M, IOR, dt):
"""Return the prefactor for the absorption cross section calculation in (A**2 / molec).
:param array eEL: Laser excitation energy in (cm-1). May also be a single float value.
:param float M: Electronic transition dipole length in (A).
:param float IOR: Index of refraction of surrounding solvent / medium.
:param float dt: Time increment used for integration (fs).
:returns: Prefactor for absorption cross section calculation.
.. seealso:: Myers, Eq. (35).
"""
# to convert from esu to SI divide by 4 pi eps0
# the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft
# so rfft is not completely identical to half-sided FT integral
return 5.7579e-6 * M**2 * eEL * dt / IOR / 2.0
# -------------------------------------------------------------------------------------------------------------------------------
def prefR(eEL, M, eR, dt):
"""Return the prefactor for the Raman excitation profile calculation (A**2 / molec).
:param array eEL: Laser excitation energies in (cm-1). Can also be a single floating point value.
:param float M: Electronic transition dipole moment in (A).
:param float eR: Stokes shift of the Raman line in (cm-1).
:param float dt: Time increment for the integration (fs).
:returns: The prefactor for the Raman excitation profile calculation.
.. seealso:: Myers, Eq. (34) and following text.
"""
# get energy of stokes shifted photons
eES = eEL - eR
# the 1e-6 is for fs instead of ps in the integral and is consistent with Myers fortran code (it is different however from the 1e4 factor in Valley & Hoffman code!!)
# to convert from esu to SI divide by (4 pi eps0)**2
return 2.0831e-20 * 1e-6 * M**4 * eES**3 * eEL * dt**2
# --------------------------------------------------------------------------------------------------------------------------------
def prefF(eEF, M, IOR, dt):
"""Return the prefactor for the fluorescence efficiency calculation (unitless). See :py:func:`getCrossSections` for more details.
:param array eEF: Fluorescence energy in (cm-1). May also be a single float value.
:param float M: Electronic transition dipole length in (A).
:param float IOR: Index of refraction of surrounding solvent / medium.
:param float dt: Time increment used for integration (fs).
:returns: Prefactor for fluorescence efficiency calculation.
.. seealso:: Myers, *Chem. Phys.* **180**, 215 (1994), Eqs. (6) and (26).
"""
# to convert from esu to SI divide by 4 pi eps0
# the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft
# so rfft is not completely identical to half-sided FT integral
return 3.6656e-22 * IOR * M**2 * eEF**3 * dt / 2.0
# ----------------------------------------------------------------------------------------------------------------------------
def getCrossSections(t, wn, E0, ovlps, sshift, M, IOR, damp=1, sig=0, ialpha=1):
"""Calculate the absorption and Raman cross-sections and the fluorescence efficiency. The latter is a unitless quantity which may be used
to calculate the fluorescence rate (=rate of spontaneous emission) by integrating over the frequency axis (see Myers, *Chem. Phys.* **180**, 215 (1994) Eq. 6 and discussion).
.. note:: Changed shape of input parameters and shape of return values on 10-07-2015.
:param array t: Time axis in (fs). This axis is used for the calculation of the zero-zero energy term in the time domain.
:param array wn: Wavenumber axis in (cm-1). Same shape as t.
:param array E0: Zero-zero energy. This function then calculates the time domain part using `getZeroZeroEnergy`.
:param array ovlps: M + 2 Absorption, fluorescence and Raman overlap integrals.
:param float sshift: Vibrational freqencies of M Raman modes to calculate (cm-1).
:param float M: Electronic transition dipole length (A).
:param float IOR: Index of refraction of surrounding medium / solvent.
:param array damp: Damping function in the time domain. Same shape as t. Set to 1 if no damping is used (default).
:param float sig: Linewidth for inhomogeneous damping (standard deviation of Gaussian), set to zero if not used (default).
:param float ialpha: Lineshape parameter for inhomogeneous damping:
- 1 = Gaussian (default),
- 0 = Lorentzian.
:returns: Absorption (sigmaA), M Raman cross sections (sigmaR[M]), both in A**2 / mol., and fluorescence efficiency spectrum, kF (arrays have same shape as wn); all as function of excitation wavenumber.
"""
Npoints = len(wn)
dt = t[1] - t[0]
# caluclate zero-zero time domain part
tdpart = getZeroZeroEnergy(t, E0)
# absorption cross section - using the half sided FT (equivalent to rfft)
tmp = np.real(Npoints * np.fft.irfft(ovlps[0] * tdpart * damp, Npoints))
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
sigmaA = prefA(wn, M, IOR, dt) * tmp
# fluorescence rate / intensity - using half sided FT - similar to absorption
# in order to account for the sign change, the zero-zero energy time domain part and the damping term had to be separated;
# use the tdpart conjugated and change irfft by hfft to get the factor exp(-1j w t)
# numpy does not normalize the forward FFT, so no factor Npoints
tmp = np.real(np.fft.hfft(ovlps[1] * np.conjugate(tdpart) * damp, Npoints))
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
kF = prefF(wn, M, IOR, dt) * tmp
# Raman cross sections - using a standard FT
sigmaR = []
for i, ovlp in enumerate(ovlps[2:]): # iterate over all lines
tmp = np.absolute(Npoints * np.fft.ifft(ovlp * tdpart * damp, Npoints))**2 # use again the inverse transform to get "exp(1j w t)"
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
sigmaR.append(prefR(wn, M, sshift[i], dt) * tmp)
return sigmaA, sigmaR, kF<|fim▁end|> | """
|
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package perform
import (
"encoding/json"
"io/ioutil"
"net/http"
"os"
"github.com/monax/compilers/definitions"
"github.com/monax/cli/config"
"github.com/monax/cli/log"
)
// Start the compile server
func StartServer(addrUnsecure, addrSecure, cert, key string) {
log.Warn("Hello I'm the marmots' compilers server")
config.InitMonaxDir()
if err := os.Mkdir("binaries", 0666); err != nil {
log.Error("problem starting binaries directory, exiting...")
os.Exit(1)
}
// Routes
http.HandleFunc("/", CompileHandler)
http.HandleFunc("/binaries", BinaryHandler)
// Use SSL ?
log.Debug(cert)
if addrSecure != "" {
log.Debug("Using HTTPS")
log.WithField("=>", addrSecure).Debug("Listening on...")
if err := http.ListenAndServeTLS(addrSecure, cert, key, nil); err != nil {
log.Error("Cannot serve on http port: ", err)
os.Exit(1)
}
}
if addrUnsecure != "" {
log.Debug("Using HTTP")
log.WithField("=>", addrUnsecure).Debug("Listening on...")
if err := http.ListenAndServe(addrUnsecure, nil); err != nil {
log.Error("Cannot serve on http port: ", err)
os.Exit(1)
}
}
}
// Main http request handler
// Read request, compile, build response object, write
func CompileHandler(w http.ResponseWriter, r *http.Request) {
resp := compileResponse(w, r)
if resp == nil {
return
}
respJ, err := json.Marshal(resp)
if err != nil {
log.Errorln("failed to marshal", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
}
w.Write(respJ)
}
func BinaryHandler(w http.ResponseWriter, r *http.Request) {
// read the request body
body, err := ioutil.ReadAll(r.Body)
if err != nil {
log.Errorln("err on read http request body", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
}
// unmarshall body into req struct
req := new(definitions.BinaryRequest)
err = json.Unmarshal(body, req)
if err != nil {
log.Errorln("err on json unmarshal of request", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
}
resp := linkBinaries(req)
respJ, err := json.Marshal(resp)
if err != nil {
log.Errorln("failed to marshal", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
}
w.Write(respJ)
}
// read in the files from the request, compile them
func compileResponse(w http.ResponseWriter, r *http.Request) *Response {
// read the request body
body, err := ioutil.ReadAll(r.Body)
if err != nil {
log.Errorln("err on read http request body", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return nil
}
// unmarshall body into req struct
req := new(definitions.Request)
err = json.Unmarshal(body, req)
if err != nil {
log.Errorln("err on json unmarshal of request", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return nil
}
log.WithFields(log.Fields{
"lang": req.Language,
// "script": string(req.Script),
"libs": req.Libraries,
"incl": req.Includes,
}).Debug("New Request")
cached := CheckCached(req.Includes, req.Language)
log.WithField("cached?", cached).Debug("Cached Item(s)")
var resp *Response
// if everything is cached, no need for request
if cached {<|fim▁hole|> resp, err = CachedResponse(req.Includes, req.Language)
if err != nil {
log.Errorln("err during caching response", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return nil
}
} else {
resp = compile(req)
resp.CacheNewResponse(*req)
}
PrintResponse(*resp, false)
return resp
}<|fim▁end|> | |
<|file_name|>glfw_windowing.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A windowing implementation using GLFW.
use windowing::{ApplicationMethods, WindowEvent, WindowMethods};
use windowing::{IdleWindowEvent, ResizeWindowEvent, LoadUrlWindowEvent, MouseWindowEventClass};
use windowing::{ScrollWindowEvent, ZoomWindowEvent, NavigationWindowEvent, FinishedWindowEvent};
use windowing::{QuitWindowEvent, MouseWindowClickEvent, MouseWindowMouseDownEvent, MouseWindowMouseUpEvent};
use windowing::RefreshWindowEvent;
use windowing::{Forward, Back};
use alert::{Alert, AlertMethods};
use extra::time::Timespec;
use extra::time;
use std::libc::c_int;
use std::local_data;
use geom::point::Point2D;
use geom::size::Size2D;
use servo_msg::compositor_msg::{IdleRenderState, RenderState, RenderingRenderState};
use servo_msg::compositor_msg::{FinishedLoading, Blank, Loading, PerformingLayout, ReadyState};
use glfw;
/// A structure responsible for setting up and tearing down the entire windowing system.
pub struct Application;
impl ApplicationMethods for Application {
fn new() -> Application {
// Per GLFW docs it's safe to set the error callback before calling
// glfwInit(), and this way we notice errors from init too.
do glfw::set_error_callback |_error_code, description| {
error!("GLFW error: {:s}", description);
};
glfw::init();
Application
}
}
impl Drop for Application {
fn drop(&mut self) {
drop_local_window();
glfw::terminate();
}
}
/// The type of a window.
pub struct Window {
glfw_window: glfw::Window,
event_queue: @mut ~[WindowEvent],
drag_origin: Point2D<c_int>,
mouse_down_button: @mut Option<glfw::MouseButton>,
mouse_down_point: @mut Point2D<c_int>,
ready_state: ReadyState,
render_state: RenderState,
last_title_set_time: Timespec,
}
impl WindowMethods<Application> for Window {
/// Creates a new window.
fn new(_: &Application) -> @mut Window {
// Create the GLFW window.
let glfw_window = glfw::Window::create(800, 600, "Servo", glfw::Windowed)
.expect("Failed to create GLFW window");
glfw_window.make_context_current();
// Create our window object.
let window = @mut Window {
glfw_window: glfw_window,
event_queue: @mut ~[],
drag_origin: Point2D(0 as c_int, 0),<|fim▁hole|>
ready_state: Blank,
render_state: IdleRenderState,
last_title_set_time: Timespec::new(0, 0),
};
install_local_window(window);
// Register event handlers.
do window.glfw_window.set_framebuffer_size_callback |_win, width, height| {
local_window().event_queue.push(ResizeWindowEvent(width as uint, height as uint))
}
do window.glfw_window.set_refresh_callback |_win| {
local_window().event_queue.push(RefreshWindowEvent)
}
do window.glfw_window.set_key_callback |_win, key, _scancode, action, mods| {
if action == glfw::Press {
local_window().handle_key(key, mods)
}
}
do window.glfw_window.set_mouse_button_callback |win, button, action, _mods| {
let (x, y) = win.get_cursor_pos();
//handle hidpi displays, since GLFW returns non-hi-def coordinates.
let (backing_size, _) = win.get_framebuffer_size();
let (window_size, _) = win.get_size();
let hidpi = (backing_size as f32) / (window_size as f32);
let x = x as f32 * hidpi;
let y = y as f32 * hidpi;
if button == glfw::MouseButtonLeft || button == glfw::MouseButtonRight {
local_window().handle_mouse(button, action, x as i32, y as i32);
}
}
do window.glfw_window.set_scroll_callback |win, x_offset, y_offset| {
let dx = (x_offset as f32) * 30.0;
let dy = (y_offset as f32) * 30.0;
let (x, y) = win.get_cursor_pos();
//handle hidpi displays, since GLFW returns non-hi-def coordinates.
let (backing_size, _) = win.get_framebuffer_size();
let (window_size, _) = win.get_size();
let hidpi = (backing_size as f32) / (window_size as f32);
let x = x as f32 * hidpi;
let y = y as f32 * hidpi;
local_window().event_queue.push(ScrollWindowEvent(Point2D(dx, dy), Point2D(x as i32, y as i32)));
}
window
}
/// Returns the size of the window.
fn size(&self) -> Size2D<f32> {
let (width, height) = self.glfw_window.get_framebuffer_size();
Size2D(width as f32, height as f32)
}
/// Presents the window to the screen (perhaps by page flipping).
fn present(&mut self) {
self.glfw_window.swap_buffers();
}
fn recv(@mut self) -> WindowEvent {
if !self.event_queue.is_empty() {
return self.event_queue.shift()
}
glfw::poll_events();
if self.glfw_window.should_close() {
QuitWindowEvent
} else if !self.event_queue.is_empty() {
self.event_queue.shift()
} else {
IdleWindowEvent
}
}
/// Sets the ready state.
fn set_ready_state(@mut self, ready_state: ReadyState) {
self.ready_state = ready_state;
self.update_window_title()
}
/// Sets the render state.
fn set_render_state(@mut self, render_state: RenderState) {
if self.ready_state == FinishedLoading &&
self.render_state == RenderingRenderState &&
render_state == IdleRenderState {
// page loaded
self.event_queue.push(FinishedWindowEvent);
}
self.render_state = render_state;
self.update_window_title()
}
fn hidpi_factor(@mut self) -> f32 {
let (backing_size, _) = self.glfw_window.get_framebuffer_size();
let (window_size, _) = self.glfw_window.get_size();
(backing_size as f32) / (window_size as f32)
}
}
impl Window {
/// Helper function to set the window title in accordance with the ready state.
fn update_window_title(&mut self) {
let now = time::get_time();
if now.sec == self.last_title_set_time.sec {
return
}
self.last_title_set_time = now;
match self.ready_state {
Blank => {
self.glfw_window.set_title("blank — Servo")
}
Loading => {
self.glfw_window.set_title("Loading — Servo")
}
PerformingLayout => {
self.glfw_window.set_title("Performing Layout — Servo")
}
FinishedLoading => {
match self.render_state {
RenderingRenderState => {
self.glfw_window.set_title("Rendering — Servo")
}
IdleRenderState => {
self.glfw_window.set_title("Servo")
}
}
}
}
}
/// Helper function to handle keyboard events.
fn handle_key(&self, key: glfw::Key, mods: glfw::Modifiers) {
match key {
glfw::KeyEscape => self.glfw_window.set_should_close(true),
glfw::KeyL if mods.contains(glfw::Control) => self.load_url(), // Ctrl+L
glfw::KeyEqual if mods.contains(glfw::Control) => { // Ctrl-+
self.event_queue.push(ZoomWindowEvent(1.1));
}
glfw::KeyMinus if mods.contains(glfw::Control) => { // Ctrl--
self.event_queue.push(ZoomWindowEvent(0.90909090909));
}
glfw::KeyBackspace if mods.contains(glfw::Shift) => { // Shift-Backspace
self.event_queue.push(NavigationWindowEvent(Forward));
}
glfw::KeyBackspace => { // Backspace
self.event_queue.push(NavigationWindowEvent(Back));
}
_ => {}
}
}
/// Helper function to handle a click
fn handle_mouse(&self, button: glfw::MouseButton, action: glfw::Action, x: c_int, y: c_int) {
// FIXME(tkuehn): max pixel dist should be based on pixel density
let max_pixel_dist = 10f64;
let event = match action {
glfw::Press => {
*self.mouse_down_point = Point2D(x, y);
*self.mouse_down_button = Some(button);
MouseWindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32))
}
glfw::Release => {
match *self.mouse_down_button {
None => (),
Some(but) if button == but => {
let pixel_dist = *self.mouse_down_point - Point2D(x, y);
let pixel_dist = ((pixel_dist.x * pixel_dist.x +
pixel_dist.y * pixel_dist.y) as f64).sqrt();
if pixel_dist < max_pixel_dist {
let click_event = MouseWindowClickEvent(button as uint,
Point2D(x as f32, y as f32));
self.event_queue.push(MouseWindowEventClass(click_event));
}
}
Some(_) => (),
}
MouseWindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32))
}
_ => fail!("I cannot recognize the type of mouse action that occured. :-(")
};
self.event_queue.push(MouseWindowEventClass(event));
}
/// Helper function to pop up an alert box prompting the user to load a URL.
fn load_url(&self) {
let mut alert: Alert = AlertMethods::new("Navigate to:");
alert.add_prompt();
alert.run();
let value = alert.prompt_value();
if "" == value { // To avoid crashing on Linux.
self.event_queue.push(LoadUrlWindowEvent(~"http://purple.com/"))
} else {
self.event_queue.push(LoadUrlWindowEvent(value))
}
}
}
static TLS_KEY: local_data::Key<@mut Window> = &local_data::Key;
fn install_local_window(window: @mut Window) {
local_data::set(TLS_KEY, window);
}
fn drop_local_window() {
local_data::pop(TLS_KEY);
}
fn local_window() -> @mut Window {
local_data::get(TLS_KEY, |v| *v.unwrap())
}<|fim▁end|> |
mouse_down_button: @mut None,
mouse_down_point: @mut Point2D(0 as c_int, 0), |
<|file_name|>google.js<|end_file_name|><|fim▁begin|>(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)<|fim▁hole|>ga('send', 'pageview');<|fim▁end|> | })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-26989160-4', '3scape.me');
ga('require', 'displayfeatures');
ga('require', 'linkid', 'linkid.js'); |
<|file_name|>corpus_test.cpp<|end_file_name|><|fim▁begin|>// File: corpus_test.cpp
#include "crn_core.h"
#include "corpus_test.h"
#include "crn_find_files.h"
#include "crn_console.h"
#include "crn_image_utils.h"
#include "crn_hash.h"
#include "crn_hash_map.h"
#include "crn_radix_sort.h"
#include "crn_mipmapped_texture.h"
namespace crnlib {
corpus_tester::corpus_tester() {
m_bad_block_img.resize(256, 256);
m_next_bad_block_index = 0;
m_total_bad_block_files = 0;
}
void corpus_tester::print_comparative_metric_stats(const command_line_params& cmd_line_params, const crnlib::vector<image_utils::error_metrics>& stats1, const crnlib::vector<image_utils::error_metrics>& stats2, uint num_blocks_x, uint num_blocks_y) {
num_blocks_y;
crnlib::vector<uint> better_blocks;
crnlib::vector<uint> equal_blocks;
crnlib::vector<uint> worse_blocks;
crnlib::vector<float> delta_psnr;
for (uint i = 0; i < stats1.size(); i++) {
//uint bx = i % num_blocks_x;
//uint by = i / num_blocks_x;
const image_utils::error_metrics& em1 = stats1[i];
const image_utils::error_metrics& em2 = stats2[i];
if (em1.mPeakSNR < em2.mPeakSNR) {
worse_blocks.push_back(i);
delta_psnr.push_back((float)(em2.mPeakSNR - em1.mPeakSNR));
} else if (fabs(em1.mPeakSNR - em2.mPeakSNR) < .001f)
equal_blocks.push_back(i);
else
better_blocks.push_back(i);
}
console::printf("Num worse blocks: %u, %3.3f%%", worse_blocks.size(), worse_blocks.size() * 100.0f / stats1.size());
console::printf("Num equal blocks: %u, %3.3f%%", equal_blocks.size(), equal_blocks.size() * 100.0f / stats1.size());
console::printf("Num better blocks: %u, %3.3f%%", better_blocks.size(), better_blocks.size() * 100.0f / stats1.size());
console::printf("Num equal+better blocks: %u, %3.3f%%", equal_blocks.size() + better_blocks.size(), (equal_blocks.size() + better_blocks.size()) * 100.0f / stats1.size());
if (!cmd_line_params.has_key("nobadblocks")) {
crnlib::vector<uint> indices[2];
indices[0].resize(worse_blocks.size());
indices[1].resize(worse_blocks.size());
uint* pSorted_indices = NULL;
if (worse_blocks.size()) {
pSorted_indices = indirect_radix_sort(worse_blocks.size(), &indices[0][0], &indices[1][0], &delta_psnr[0], 0, sizeof(float), true);
console::printf("List of worse blocks sorted by delta PSNR:");
for (uint i = 0; i < worse_blocks.size(); i++) {
uint block_index = worse_blocks[pSorted_indices[i]];
uint bx = block_index % num_blocks_x;
uint by = block_index / num_blocks_x;
console::printf("%u. [%u,%u] %3.3f %3.3f %3.3f",
i,
bx, by,
stats1[block_index].mPeakSNR,
stats2[block_index].mPeakSNR,
stats2[block_index].mPeakSNR - stats1[block_index].mPeakSNR);
}
}
}
}
void corpus_tester::print_metric_stats(const crnlib::vector<image_utils::error_metrics>& stats, uint num_blocks_x, uint num_blocks_y) {
num_blocks_y;
image_utils::error_metrics best_metrics;
image_utils::error_metrics worst_metrics;
worst_metrics.mPeakSNR = 1e+6f;
vec2I best_loc;
vec2I worst_loc;
utils::zero_object(best_loc);
utils::zero_object(worst_loc);
double psnr_total = 0.0f;
double psnr2_total = 0.0f;
uint num_non_inf = 0;
uint num_inf = 0;
for (uint i = 0; i < stats.size(); i++) {
uint bx = i % num_blocks_x;
uint by = i / num_blocks_x;
<|fim▁hole|> best_loc.set(bx, by);
}
if (em < worst_metrics) {
worst_metrics = em;
worst_loc.set(bx, by);
}
if (em.mPeakSNR < 200.0f) {
psnr_total += em.mPeakSNR;
psnr2_total += em.mPeakSNR * em.mPeakSNR;
num_non_inf++;
} else {
num_inf++;
}
}
console::printf("Number of infinite PSNR blocks: %u", num_inf);
console::printf("Number of non-infinite PSNR blocks: %u", num_non_inf);
if (num_non_inf) {
psnr_total /= num_non_inf;
psnr2_total /= num_non_inf;
double psnr_std_dev = sqrt(psnr2_total - psnr_total * psnr_total);
console::printf("Average Non-Inf PSNR: %3.3f, Std dev: %3.3f", psnr_total, psnr_std_dev);
console::printf("Worst PSNR: %3.3f, Block Location: %i,%i", worst_metrics.mPeakSNR, worst_loc[0], worst_loc[1]);
console::printf("Best Non-Inf PSNR: %3.3f, Block Location: %i,%i", best_metrics.mPeakSNR, best_loc[0], best_loc[1]);
}
}
void corpus_tester::flush_bad_blocks() {
if (!m_next_bad_block_index)
return;
dynamic_string filename(cVarArg, "badblocks_%u.tga", m_total_bad_block_files);
console::printf("Writing bad block image: %s", filename.get_ptr());
image_utils::write_to_file(filename.get_ptr(), m_bad_block_img, image_utils::cWriteFlagIgnoreAlpha);
m_bad_block_img.set_all(color_quad_u8::make_black());
m_total_bad_block_files++;
m_next_bad_block_index = 0;
}
void corpus_tester::add_bad_block(image_u8& block) {
uint num_blocks_x = m_bad_block_img.get_block_width(4);
uint num_blocks_y = m_bad_block_img.get_block_height(4);
uint total_blocks = num_blocks_x * num_blocks_y;
m_bad_block_img.blit((m_next_bad_block_index % num_blocks_x) * 4, (m_next_bad_block_index / num_blocks_x) * 4, block);
m_next_bad_block_index++;
if (m_next_bad_block_index == total_blocks)
flush_bad_blocks();
}
static bool progress_callback(uint percentage_complete, void* pUser_data_ptr) {
static int s_prev_percentage_complete = -1;
pUser_data_ptr;
if (s_prev_percentage_complete != static_cast<int>(percentage_complete)) {
console::progress("%u%%", percentage_complete);
s_prev_percentage_complete = percentage_complete;
}
return true;
}
bool corpus_tester::test(const char* pCmd_line) {
console::printf("Command line:\n\"%s\"", pCmd_line);
static const command_line_params::param_desc param_desc_array[] =
{
{"corpus_test", 0, false},
{"in", 1, true},
{"deep", 0, false},
{"alpha", 0, false},
{"nomips", 0, false},
{"perceptual", 0, false},
{"endpointcaching", 0, false},
{"multithreaded", 0, false},
{"writehybrid", 0, false},
{"nobadblocks", 0, false},
};
command_line_params cmd_line_params;
if (!cmd_line_params.parse(pCmd_line, CRNLIB_ARRAY_SIZE(param_desc_array), param_desc_array, true))
return false;
double total_time1 = 0, total_time2 = 0;
command_line_params::param_map_const_iterator it = cmd_line_params.begin();
for (; it != cmd_line_params.end(); ++it) {
if (it->first != "in")
continue;
if (it->second.m_values.empty()) {
console::error("Must follow /in parameter with a filename!\n");
return false;
}
for (uint in_value_index = 0; in_value_index < it->second.m_values.size(); in_value_index++) {
const dynamic_string& filespec = it->second.m_values[in_value_index];
find_files file_finder;
if (!file_finder.find(filespec.get_ptr(), find_files::cFlagAllowFiles | (cmd_line_params.has_key("deep") ? find_files::cFlagRecursive : 0))) {
console::warning("Failed finding files: %s", filespec.get_ptr());
continue;
}
if (file_finder.get_files().empty()) {
console::warning("No files found: %s", filespec.get_ptr());
return false;
}
const find_files::file_desc_vec& files = file_finder.get_files();
image_u8 o(4, 4), a(4, 4), b(4, 4);
uint first_channel = 0;
uint num_channels = 3;
bool perceptual = cmd_line_params.get_value_as_bool("perceptual", false);
if (perceptual) {
first_channel = 0;
num_channels = 0;
}
console::printf("Perceptual mode: %u", perceptual);
for (uint file_index = 0; file_index < files.size(); file_index++) {
const find_files::file_desc& file_desc = files[file_index];
console::printf("-------- Loading image: %s", file_desc.m_fullname.get_ptr());
image_u8 img;
if (!image_utils::read_from_file(img, file_desc.m_fullname.get_ptr(), 0)) {
console::warning("Failed loading image file: %s", file_desc.m_fullname.get_ptr());
continue;
}
if ((!cmd_line_params.has_key("alpha")) && img.is_component_valid(3)) {
for (uint y = 0; y < img.get_height(); y++)
for (uint x = 0; x < img.get_width(); x++)
img(x, y).a = 255;
img.set_component_valid(3, false);
}
mipmapped_texture orig_tex;
orig_tex.assign(crnlib_new<image_u8>(img));
if (!cmd_line_params.has_key("nomips")) {
mipmapped_texture::generate_mipmap_params genmip_params;
genmip_params.m_srgb = true;
console::printf("Generating mipmaps");
if (!orig_tex.generate_mipmaps(genmip_params, false)) {
console::error("Mipmap generation failed!");
return false;
}
}
console::printf("Compress 1");
mipmapped_texture tex1(orig_tex);
dxt_image::pack_params convert_params;
convert_params.m_endpoint_caching = cmd_line_params.get_value_as_bool("endpointcaching", 0, false);
convert_params.m_compressor = cCRNDXTCompressorCRN;
convert_params.m_quality = cCRNDXTQualityNormal;
convert_params.m_perceptual = perceptual;
convert_params.m_num_helper_threads = cmd_line_params.get_value_as_bool("multithreaded", 0, true) ? (g_number_of_processors - 1) : 0;
convert_params.m_pProgress_callback = progress_callback;
timer t;
t.start();
if (!tex1.convert(PIXEL_FMT_ETC1, false, convert_params)) {
console::error("Texture conversion failed!");
return false;
}
double time1 = t.get_elapsed_secs();
total_time1 += time1;
console::printf("Elapsed time: %3.3f", time1);
console::printf("Compress 2");
mipmapped_texture tex2(orig_tex);
convert_params.m_endpoint_caching = false;
convert_params.m_compressor = cCRNDXTCompressorCRN;
convert_params.m_quality = cCRNDXTQualitySuperFast;
t.start();
if (!tex2.convert(PIXEL_FMT_ETC1, false, convert_params)) {
console::error("Texture conversion failed!");
return false;
}
double time2 = t.get_elapsed_secs();
total_time2 += time2;
console::printf("Elapsed time: %3.3f", time2);
image_u8 hybrid_img(img.get_width(), img.get_height());
for (uint l = 0; l < orig_tex.get_num_levels(); l++) {
image_u8 orig_img, img1, img2;
image_u8* pOrig = orig_tex.get_level(0, l)->get_unpacked_image(orig_img, cUnpackFlagUncook | cUnpackFlagUnflip);
image_u8* pImg1 = tex1.get_level(0, l)->get_unpacked_image(img1, cUnpackFlagUncook | cUnpackFlagUnflip);
image_u8* pImg2 = tex2.get_level(0, l)->get_unpacked_image(img2, cUnpackFlagUncook | cUnpackFlagUnflip);
const uint num_blocks_x = pOrig->get_block_width(4);
const uint num_blocks_y = pOrig->get_block_height(4);
crnlib::vector<image_utils::error_metrics> metrics[2];
for (uint by = 0; by < num_blocks_y; by++) {
for (uint bx = 0; bx < num_blocks_x; bx++) {
pOrig->extract_block(o.get_ptr(), bx * 4, by * 4, 4, 4);
pImg1->extract_block(a.get_ptr(), bx * 4, by * 4, 4, 4);
pImg2->extract_block(b.get_ptr(), bx * 4, by * 4, 4, 4);
image_utils::error_metrics em1;
em1.compute(o, a, first_channel, num_channels);
image_utils::error_metrics em2;
em2.compute(o, b, first_channel, num_channels);
metrics[0].push_back(em1);
metrics[1].push_back(em2);
if (em1.mPeakSNR < em2.mPeakSNR) {
add_bad_block(o);
hybrid_img.blit(bx * 4, by * 4, b);
} else {
hybrid_img.blit(bx * 4, by * 4, a);
}
}
}
if (cmd_line_params.has_key("writehybrid"))
image_utils::write_to_file("hybrid.tga", hybrid_img, image_utils::cWriteFlagIgnoreAlpha);
console::printf("---- Mip level: %u, Total blocks: %ux%u, %u", l, num_blocks_x, num_blocks_y, num_blocks_x * num_blocks_y);
console::printf("Compressor 1:");
print_metric_stats(metrics[0], num_blocks_x, num_blocks_y);
console::printf("Compressor 2:");
print_metric_stats(metrics[1], num_blocks_x, num_blocks_y);
console::printf("Compressor 1 vs. 2:");
print_comparative_metric_stats(cmd_line_params, metrics[0], metrics[1], num_blocks_x, num_blocks_y);
image_utils::error_metrics em;
em.compute(*pOrig, *pImg1, 0, perceptual ? 0 : 3);
em.print("Compressor 1: ");
em.compute(*pOrig, *pImg2, 0, perceptual ? 0 : 3);
em.print("Compressor 2: ");
em.compute(*pOrig, hybrid_img, 0, perceptual ? 0 : 3);
em.print("Best of Both: ");
}
}
} // file_index
}
flush_bad_blocks();
console::printf("Total times: %4.3f vs. %4.3f", total_time1, total_time2);
return true;
}
} // namespace crnlib<|fim▁end|> | const image_utils::error_metrics& em = stats[i];
if ((em.mPeakSNR < 200.0f) && (em > best_metrics)) {
best_metrics = em; |
<|file_name|>broadcasting.py<|end_file_name|><|fim▁begin|>"""
========================
Broadcasting over arrays
========================
The term broadcasting describes how numpy treats arrays with different
shapes during arithmetic operations. Subject to certain constraints,
the smaller array is "broadcast" across the larger array so that they
have compatible shapes. Broadcasting provides a means of vectorizing
array operations so that looping occurs in C instead of Python. It does
this without making needless copies of data and usually leads to
efficient algorithm implementations. There are, however, cases where
broadcasting is a bad idea because it leads to inefficient use of memory
that slows computation.
NumPy operations are usually done element-by-element, which requires two
arrays to have exactly the same shape::
>>> a = np.array([1.0, 2.0, 3.0])
>>> b = np.array([2.0, 2.0, 2.0])
>>> a * b
array([ 2., 4., 6.])
NumPy's broadcasting rule relaxes this constraint when the arrays'
shapes meet certain constraints. The simplest broadcasting example occurs
when an array and a scalar value are combined in an operation:
>>> a = np.array([1.0, 2.0, 3.0])
>>> b = 2.0
>>> a * b
array([ 2., 4., 6.])
The result is equivalent to the previous example where ``b`` was an array.
We can think of the scalar ``b`` being *stretched* during the arithmetic
operation into an array with the same shape as ``a``. The new elements in
``b`` are simply copies of the original scalar. The stretching analogy is
only conceptual. NumPy is smart enough to use the original scalar value
without actually making copies, so that broadcasting operations are as
memory and computationally efficient as possible.
The second example is more effective than the first, since here broadcasting
moves less memory around during the multiplication (``b`` is a scalar,
not an array).
General Broadcasting Rules
==========================
When operating on two arrays, NumPy compares their shapes element-wise.
It starts with the trailing dimensions, and works its way forward. Two
dimensions are compatible when
1) they are equal, or
2) one of them is 1
If these conditions are not met, a
``ValueError: frames are not aligned`` exception is thrown, indicating that
the arrays have incompatible shapes. The size of the resulting array
is the maximum size along each dimension of the input arrays.
Arrays do not need to have the same *number* of dimensions. For example,
if you have a ``256x256x3`` array of RGB values, and you want to scale
each color in the image by a different value, you can multiply the image
by a one-dimensional array with 3 values. Lining up the sizes of the
trailing axes of these arrays according to the broadcast rules, shows that
they are compatible::
Image (3d array): 256 x 256 x 3
Scale (1d array): 3
Result (3d array): 256 x 256 x 3
When either of the dimensions compared is one, the larger of the two is
used. In other words, the smaller of two axes is stretched or "copied"
to match the other.
In the following example, both the ``A`` and ``B`` arrays have axes with
length one that are expanded to a larger size during the broadcast
operation::
A (4d array): 8 x 1 x 6 x 1
B (3d array): 7 x 1 x 5
Result (4d array): 8 x 7 x 6 x 5
Here are some more examples::
A (2d array): 5 x 4
B (1d array): 1
Result (2d array): 5 x 4
A (2d array): 5 x 4
B (1d array): 4
Result (2d array): 5 x 4
A (3d array): 15 x 3 x 5
B (3d array): 15 x 1 x 5
Result (3d array): 15 x 3 x 5
A (3d array): 15 x 3 x 5
B (2d array): 3 x 5
Result (3d array): 15 x 3 x 5
A (3d array): 15 x 3 x 5
B (2d array): 3 x 1
Result (3d array): 15 x 3 x 5
Here are examples of shapes that do not broadcast::
A (1d array): 3
B (1d array): 4 # trailing dimensions do not match
A (2d array): 2 x 1
B (3d array): 8 x 4 x 3 # second from last dimensions mismatch
An example of broadcasting in practice::
>>> x = np.arange(4)
>>> xx = x.reshape(4,1)
>>> y = np.ones(5)
>>> z = np.ones((3,4))
>>> x.shape
(4,)
>>> y.shape
(5,)
>>> x + y
<type 'exceptions.ValueError'>: shape mismatch: objects cannot be broadcast to a single shape
>>> xx.shape
(4, 1)
>>> y.shape
(5,)
>>> (xx + y).shape
(4, 5)
>>> xx + y
array([[ 1., 1., 1., 1., 1.],
[ 2., 2., 2., 2., 2.],
[ 3., 3., 3., 3., 3.],
[ 4., 4., 4., 4., 4.]])
>>> x.shape
(4,)
>>> z.shape
(3, 4)
>>> (x + z).shape
(3, 4)
>>> x + z
array([[ 1., 2., 3., 4.],
[ 1., 2., 3., 4.],<|fim▁hole|>any other outer operation) of two arrays. The following example shows an
outer addition operation of two 1-d arrays::
>>> a = np.array([0.0, 10.0, 20.0, 30.0])
>>> b = np.array([1.0, 2.0, 3.0])
>>> a[:, np.newaxis] + b
array([[ 1., 2., 3.],
[ 11., 12., 13.],
[ 21., 22., 23.],
[ 31., 32., 33.]])
Here the ``newaxis`` index operator inserts a new axis into ``a``,
making it a two-dimensional ``4x1`` array. Combining the ``4x1`` array
with ``b``, which has shape ``(3,)``, yields a ``4x3`` array.
See `this article <http://www.scipy.org/EricsBroadcastingDoc>`_
for illustrations of broadcasting concepts.
"""<|fim▁end|> | [ 1., 2., 3., 4.]])
Broadcasting provides a convenient way of taking the outer product (or |
<|file_name|>sprites.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 *-*
import pygame
from const import *
class MultiSprite(object):
def __init__(self, path, res_x, res_y=None, offX=0, offY=0, gX=0, gY=0):
"""path = file path of the Multi sprite.
res_x and res_y are the X, Y size of each sub sprite.
offX and offY can specify an internal offset which are applied inside of a field (used for char sets).
gX and gY specify global offsets (used for char sets)."""
self.sprite = pygame.image.load(path)
self.res_x = res_x
self.res_y = res_y if res_y else res_x
self.offX = offX
self.offY = offY
self.gX = gX<|fim▁hole|>
def draw2dungeon(self, x, y, target, t_x=SCALE, t_y=SCALE, pX=0, pY=0): # Dont even ask ^^
"""x and y are the position of the subsprite in the MultiSprite.
target is the target surface and
t_x and t_y are the positions to where the subsprite shall be blitted.
All coordinates are scaled accordingly inside this funtion.
pX and pY are additional Pixel Offsets because we can"""
# make this a _little_ bit more readable ^^
rx, ry = self.res_x, self.res_y
offX, offY = self.offX, self.offY
gX, gY = self.gX, self.gY
subsprite_rect = (gX+rx*x+pX, gY+ry*y+pY, rx, ry) # square around the sub sprite we want to draw
topleft = (t_x*SCALE+offX, t_y*SCALE+offY) # topleft target coordinates; here goes the subsprite
#print subsprite_rect, topleft
target.blit(self.sprite, topleft, subsprite_rect)
class TileSetMultiSprite(MultiSprite):
def __init__(self, path, res_x, res_y=None):
super(TileSetMultiSprite, self).__init__(path, res_x, res_y)
class CharSetMultiSprite(MultiSprite):
def __init__(self, path, res_x, res_y=None, offX=0, offY=0, gX=0, gY=0):
super(CharSetMultiSprite, self).__init__(path, res_x, res_y, offX, offY, gX, gY)<|fim▁end|> | self.gY = gY |
<|file_name|>MDMC.py<|end_file_name|><|fim▁begin|># coding=utf-8
from abc import ABCMeta
import logging
from typing import Iterator
import numpy as np
from mdlmc.topo.topology import NeighborTopology
from ..misc.tools import remember_last_element
from ..LMC.output import CovalentAutocorrelation, MeanSquareDisplacement
from ..cython_exts.LMC.PBCHelper import AtomBox
from ..LMC.jumprate_generators import JumpRate
logger = logging.getLogger(__name__)
logging.getLogger("matplotlib").setLevel(logging.WARN)
def get_git_version():
from mdlmc.version_hash import commit_hash, commit_date, commit_message
print("# Hello. I am from commit {}".format(commit_hash))
print("# Commit Date: {}".format(commit_date))
print("# Commit Message: {}".format(commit_message))
class KMCLattice:
"""Implementation of the time-dependent Kinetic Monte Carlo Scheme"""
__show_in_config__ = True
__no_config_parameter__ = ["topology", "atom_box", "jumprate_function"]
def __init__(self, topology: "NeighborTopology", *,
atom_box: "AtomBox",
jumprate_function: "JumpRate",
lattice_size: int,
proton_number: int,
donor_atoms: str,
time_step: float,
extra_atoms: str = None):
"""
Parameters
----------
trajectory
lattice_size
proton_number
jumprate_function
donor_atoms:
name of donor / acceptor atoms
extra_atoms:
extra atoms used for the determination of the jump rate
"""
self.topology = topology
self._lattice = self._initialize_lattice(lattice_size, proton_number)
# Check whether the topology object has the method "take_lattice_reference
if hasattr(self.topology, "take_lattice_reference"):<|fim▁hole|> self.topology.take_lattice_reference(self._lattice)
self._atom_box = atom_box
self._jumprate_function = jumprate_function
self._donor_atoms = donor_atoms
self._time_step = time_step
self._extra_atoms = extra_atoms
def _initialize_lattice(self, lattice_size, proton_number):
lattice = np.zeros(lattice_size, dtype=np.int32)
lattice[:proton_number] = range(1, proton_number + 1)
np.random.shuffle(lattice)
return lattice
def __iter__(self) -> Iterator[np.ndarray]:
yield from self.continuous_output()
def continuous_output(self):
current_frame_number = 0
topo = self.topology
lattice = self.lattice
topology_iterator, last_topo = remember_last_element(iter(self.topology))
jumprate_iterator, last_jumprates = remember_last_element(
jumprate_generator(self._jumprate_function, self.lattice, topology_iterator))
sum_of_jumprates = (np.sum(jumpr) for _, _, jumpr in jumprate_iterator)
kmc_routine = self.fastforward_to_next_jump(sum_of_jumprates,
self._time_step)
for f, df, kmc_time in kmc_routine:
current_time = kmc_time
logger.debug("Next jump at time %.2f", current_time)
logger.debug("df = %s; dt = %s", df, kmc_time)
logger.debug("Go to frame %s", f)
for frame in self.topology.get_cached_frames():
yield current_frame_number, current_time, frame
current_frame_number += 1
proton_idx = self.move_proton(*last_jumprates(), lattice)
topo.update_time_of_last_jump(proton_idx, kmc_time)
def move_proton(self, start, dest, jump_rates, lattice):
"""Given the hopping rates between the acceptor atoms, choose a connection randomly and
move the proton."""
start_occupied_destination_free = filter_allowed_transitions(start, dest, lattice)
start = start[start_occupied_destination_free]
dest = dest[start_occupied_destination_free]
jump_rates = jump_rates[start_occupied_destination_free]
cumsum = np.cumsum(jump_rates)
random_draw = np.random.uniform(0, cumsum[-1])
transition_idx = np.searchsorted(cumsum, random_draw)
start_idx = start[transition_idx]
destination_idx = dest[transition_idx]
proton_idx = self._lattice[start_idx]
logger.debug("Particle %s moves from %s to %s", proton_idx, start_idx, destination_idx)
logger.debug("lattice[%s] = %s", destination_idx, self._lattice[destination_idx])
self._lattice[destination_idx] = proton_idx
self._lattice[start_idx] = 0
return proton_idx
@staticmethod
def fastforward_to_next_jump(jumprates, dt):
"""Implements Kinetic Monte Carlo with time-dependent rates.
Parameters
----------
jumprates : generator / iterator
Unit: femtosecond^{-1}
Proton jump rate from an oxygen site to any neighbor
dt : float
Trajectory time step
Returns
-------
frame: int
Frame at which the next event occurs
delta_frame : int
Difference between frame and the index at which the next event occurs
kmc_time : float
Time of the next event
"""
sweep, kmc_time = 0, 0
current_rate = next(jumprates)
while True:
time_selector = -np.log(1 - np.random.random())
# Handle case where time selector is so small that the next frame is not reached
t_trial = time_selector / current_rate
if (kmc_time + t_trial) // dt == kmc_time // dt:
kmc_time += t_trial
delta_frame = 0
else:
delta_t, delta_frame = dt - kmc_time % dt, 1
current_probsum = current_rate * delta_t
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
while next_probsum < time_selector:
delta_frame += 1
current_probsum = next_probsum
next_rate = next(jumprates)
next_probsum = current_probsum + next_rate * dt
rest = time_selector - current_probsum
delta_t += (delta_frame - 1) * dt + rest / next_rate
kmc_time += delta_t
sweep += delta_frame
yield sweep, delta_frame, kmc_time
def xyz_output(self, particle_type: str = "H"):
for f, t, frame in self:
particle_positions = frame[self.donor_atoms][self.occupied_sites]
particle_positions.atom_names = particle_type
yield frame.append(particle_positions)
def observables_output(self, reset_frequency: int, print_frequency: int):
"""
Parameters
----------
reset_frequency: int
print_frequency: int
Returns
-------
"""
kmc_iterator = iter(self)
donor_sites = self.donor_atoms
current_frame_number, current_time, frame = next(kmc_iterator)
autocorr = CovalentAutocorrelation(self.lattice)
msd = MeanSquareDisplacement(frame[donor_sites].atom_positions, self.lattice, self._atom_box)
for current_frame_number, current_time, frame in kmc_iterator:
if current_frame_number % reset_frequency == 0:
autocorr.reset(self.lattice)
msd.reset_displacement()
msd.update_displacement(frame[donor_sites].atom_positions, self.lattice)
if current_frame_number % print_frequency == 0:
auto = autocorr.calculate(self.lattice)
msd_result = msd.msd()
yield current_frame_number, current_time, msd_result, auto
@property
def lattice(self):
return self._lattice
@property
def donor_atoms(self):
# TODO: not needed (?)
return self._donor_atoms
@property
def extra_atoms(self):
return self._extra_atoms
@property
def occupied_sites(self):
return np.where(self._lattice > 0)[0]
def jumprate_generator(jumprate_function, lattice, topology_iterator):
for start, destination, *colvars in topology_iterator:
omega = jumprate_function(*colvars)
# select only jumprates from donors which are occupied
start_occupied_destination_free = filter_allowed_transitions(start, destination, lattice)
omega_allowed = omega[start_occupied_destination_free]
start_allowed = start[start_occupied_destination_free]
destination_allowed = destination[start_occupied_destination_free]
yield start_allowed, destination_allowed, omega_allowed
def filter_allowed_transitions(start, destination, lattice):
lattice_is_occupied = lattice > 0
occupied_sites, = np.where(lattice_is_occupied)
unoccupied_sites, = np.where(~lattice_is_occupied)
occupied_mask = np.in1d(start, occupied_sites)
unoccupied_mask = np.in1d(destination, unoccupied_sites)
start_occupied_destination_free = occupied_mask & unoccupied_mask
return start_occupied_destination_free
class Output(metaclass=ABCMeta):
__show_in_config__ = True
__no_config_parameter__ = ["kmc"]
class XYZOutput(Output):
def __init__(self,
kmc: KMCLattice,
particle_type: str) -> None:
self.kmc = kmc
self.particle_type = particle_type
def __iter__(self):
yield from self.kmc.xyz_output(self.particle_type)
class ObservablesOutput(Output):
def __init__(self,
kmc: KMCLattice,
reset_frequency: int,
print_frequency: int) -> None:
self.kmc = kmc
self.reset_frequency = reset_frequency
self.print_frequency = print_frequency
def __iter__(self):
yield from self.kmc.observables_output(self.reset_frequency, self.print_frequency)<|fim▁end|> | logger.debug("topology has method take_lattice_reference") |
<|file_name|>random_string.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common import password_gen
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
from heat.engine import translation
<|fim▁hole|>
This is useful for configuring passwords and secrets on services. Random
string can be generated from specified character sequences, which means
that all characters will be randomly chosen from specified sequences, or
with some classes, e.g. letterdigits, which means that all character will
be randomly chosen from union of ascii letters and digits. Output string
will be randomly generated string with specified length (or with length of
32, if length property doesn't specified).
"""
support_status = support.SupportStatus(version='2014.1')
PROPERTIES = (
LENGTH, SEQUENCE, CHARACTER_CLASSES, CHARACTER_SEQUENCES,
SALT,
) = (
'length', 'sequence', 'character_classes', 'character_sequences',
'salt',
)
_CHARACTER_CLASSES_KEYS = (
CHARACTER_CLASSES_CLASS, CHARACTER_CLASSES_MIN,
) = (
'class', 'min',
)
_CHARACTER_SEQUENCES = (
CHARACTER_SEQUENCES_SEQUENCE, CHARACTER_SEQUENCES_MIN,
) = (
'sequence', 'min',
)
ATTRIBUTES = (
VALUE,
) = (
'value',
)
properties_schema = {
LENGTH: properties.Schema(
properties.Schema.INTEGER,
_('Length of the string to generate.'),
default=32,
constraints=[
constraints.Range(1, 512),
]
),
SEQUENCE: properties.Schema(
properties.Schema.STRING,
_('Sequence of characters to build the random string from.'),
constraints=[
constraints.AllowedValues(password_gen.CHARACTER_CLASSES),
],
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
message=_('Use property %s.') % CHARACTER_CLASSES,
version='2014.2'
)
)
),
CHARACTER_CLASSES: properties.Schema(
properties.Schema.LIST,
_('A list of character class and their constraints to generate '
'the random string from.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
CHARACTER_CLASSES_CLASS: properties.Schema(
properties.Schema.STRING,
(_('A character class and its corresponding %(min)s '
'constraint to generate the random string from.')
% {'min': CHARACTER_CLASSES_MIN}),
constraints=[
constraints.AllowedValues(
password_gen.CHARACTER_CLASSES),
],
default=password_gen.LETTERS_DIGITS),
CHARACTER_CLASSES_MIN: properties.Schema(
properties.Schema.INTEGER,
_('The minimum number of characters from this '
'character class that will be in the generated '
'string.'),
default=1,
constraints=[
constraints.Range(1, 512),
]
)
}
),
# add defaults for backward compatibility
default=[{CHARACTER_CLASSES_CLASS: password_gen.LETTERS_DIGITS,
CHARACTER_CLASSES_MIN: 1}]
),
CHARACTER_SEQUENCES: properties.Schema(
properties.Schema.LIST,
_('A list of character sequences and their constraints to '
'generate the random string from.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
CHARACTER_SEQUENCES_SEQUENCE: properties.Schema(
properties.Schema.STRING,
_('A character sequence and its corresponding %(min)s '
'constraint to generate the random string '
'from.') % {'min': CHARACTER_SEQUENCES_MIN},
required=True),
CHARACTER_SEQUENCES_MIN: properties.Schema(
properties.Schema.INTEGER,
_('The minimum number of characters from this '
'sequence that will be in the generated '
'string.'),
default=1,
constraints=[
constraints.Range(1, 512),
]
)
}
)
),
SALT: properties.Schema(
properties.Schema.STRING,
_('Value which can be set or changed on stack update to trigger '
'the resource for replacement with a new random string. The '
'salt value itself is ignored by the random generator.')
),
}
attributes_schema = {
VALUE: attributes.Schema(
_('The random string generated by this resource. This value is '
'also available by referencing the resource.'),
cache_mode=attributes.Schema.CACHE_NONE,
type=attributes.Schema.STRING
),
}
def translation_rules(self, props):
if props.get(self.SEQUENCE):
return [
translation.TranslationRule(
props,
translation.TranslationRule.ADD,
[self.CHARACTER_CLASSES],
[{self.CHARACTER_CLASSES_CLASS: props.get(
self.SEQUENCE),
self.CHARACTER_CLASSES_MIN: 1}]),
translation.TranslationRule(
props,
translation.TranslationRule.DELETE,
[self.SEQUENCE]
)
]
def _generate_random_string(self, char_sequences, char_classes, length):
seq_mins = [
password_gen.special_char_class(
char_seq[self.CHARACTER_SEQUENCES_SEQUENCE],
char_seq[self.CHARACTER_SEQUENCES_MIN])
for char_seq in char_sequences]
char_class_mins = [
password_gen.named_char_class(
char_class[self.CHARACTER_CLASSES_CLASS],
char_class[self.CHARACTER_CLASSES_MIN])
for char_class in char_classes]
return password_gen.generate_password(length,
seq_mins + char_class_mins)
def validate(self):
super(RandomString, self).validate()
char_sequences = self.properties[self.CHARACTER_SEQUENCES]
char_classes = self.properties[self.CHARACTER_CLASSES]
def char_min(char_dicts, min_prop):
if char_dicts:
return sum(char_dict[min_prop] for char_dict in char_dicts)
return 0
length = self.properties[self.LENGTH]
min_length = (char_min(char_sequences, self.CHARACTER_SEQUENCES_MIN) +
char_min(char_classes, self.CHARACTER_CLASSES_MIN))
if min_length > length:
msg = _("Length property cannot be smaller than combined "
"character class and character sequence minimums")
raise exception.StackValidationFailed(message=msg)
def handle_create(self):
char_sequences = self.properties[self.CHARACTER_SEQUENCES] or []
char_classes = self.properties[self.CHARACTER_CLASSES] or []
length = self.properties[self.LENGTH]
random_string = self._generate_random_string(char_sequences,
char_classes,
length)
self.data_set('value', random_string, redact=True)
self.resource_id_set(self.physical_resource_name())
def _resolve_attribute(self, name):
if name == self.VALUE:
return self.data().get(self.VALUE)
def get_reference_id(self):
if self.resource_id is not None:
return self.data().get('value')
else:
return six.text_type(self.name)
def resource_mapping():
return {
'OS::Heat::RandomString': RandomString,
}<|fim▁end|> |
class RandomString(resource.Resource):
"""A resource which generates a random string. |
<|file_name|>test_worker.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division, absolute_import
from numbers import Integral
from operator import add
import os
import shutil
import sys
import traceback
import logging
import re
import pytest
from toolz import pluck
from tornado import gen
from tornado.ioloop import TimeoutError
from distributed.batched import BatchedStream
from distributed.core import rpc, dumps, loads, connect, read, write
from distributed.client import _wait
from distributed.scheduler import Scheduler
from distributed.sizeof import sizeof
from distributed.worker import Worker, error_message, logger
from distributed.utils import ignoring
from distributed.utils_test import (loop, inc, gen_cluster,
slow, slowinc, throws, current_loop, gen_test)
def test_worker_ncores():
from distributed.worker import _ncores
w = Worker('127.0.0.1', 8019)
try:
assert w.executor._max_workers == _ncores
finally:
shutil.rmtree(w.local_dir)
def test_identity():
w = Worker('127.0.0.1', 8019)
ident = w.identity(None)
assert ident['type'] == 'Worker'
assert ident['scheduler'] == ('127.0.0.1', 8019)
assert isinstance(ident['ncores'], int)
assert isinstance(ident['memory_limit'], int)
def test_health():
w = Worker('127.0.0.1', 8019)
d = w.host_health()
assert isinstance(d, dict)
d = w.host_health()
try:
import psutil
except ImportError:
pass
else:
assert 'disk-read' in d
assert 'disk-write' in d
assert 'network-recv' in d
assert 'network-send' in d
@gen_cluster()
def test_worker_bad_args(c, a, b):
aa = rpc(ip=a.ip, port=a.port)
bb = rpc(ip=b.ip, port=b.port)
class NoReprObj(object):
""" This object cannot be properly represented as a string. """
def __str__(self):
raise ValueError("I have no str representation.")
def __repr__(self):
raise ValueError("I have no repr representation.")
response = yield aa.compute(key='x',
function=dumps(NoReprObj),
args=dumps(()),
who_has={})
assert not a.active
assert response['status'] == 'OK'
assert a.data['x']
assert isinstance(response['compute_start'], float)
assert isinstance(response['compute_stop'], float)
assert isinstance(response['thread'], Integral)
def bad_func(*args, **kwargs):
1 / 0
class MockLoggingHandler(logging.Handler):
"""Mock logging handler to check for expected logs."""
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
hdlr = MockLoggingHandler()
old_level = logger.level
logger.setLevel(logging.DEBUG)
logger.addHandler(hdlr)
response = yield bb.compute(key='y',
function=dumps(bad_func),
args=dumps(['x']),
kwargs=dumps({'k': 'x'}),
who_has={'x': [a.address]})
assert not b.active
assert response['status'] == 'error'
# Make sure job died because of bad func and not because of bad
# argument.
assert isinstance(loads(response['exception']), ZeroDivisionError)
if sys.version_info[0] >= 3:
assert any('1 / 0' in line
for line in pluck(3, traceback.extract_tb(
loads(response['traceback'])))
if line)
assert hdlr.messages['warning'][0] == " Compute Failed\n" \
"Function: bad_func\n" \
"args: (< could not convert arg to str >)\n" \
"kwargs: {'k': < could not convert arg to str >}\n"
assert re.match(r"^Send compute response to scheduler: y, " \
"\{.*'args': \(< could not convert arg to str >\), .*" \
"'kwargs': \{'k': < could not convert arg to str >\}.*\}",
hdlr.messages['debug'][0]) or \
re.match("^Send compute response to scheduler: y, " \
"\{.*'kwargs': \{'k': < could not convert arg to str >\}, .*" \
"'args': \(< could not convert arg to str >\).*\}",
hdlr.messages['debug'][0])
logger.setLevel(old_level)
# Now we check that both workers are still alive.
assert not a.active
response = yield aa.compute(key='z',
function=dumps(add),
args=dumps([1, 2]),
who_has={},
close=True)
assert not a.active
assert response['status'] == 'OK'
assert a.data['z'] == 3
assert isinstance(response['compute_start'], float)
assert isinstance(response['compute_stop'], float)
assert isinstance(response['thread'], Integral)
assert not b.active
response = yield bb.compute(key='w',
function=dumps(add),
args=dumps([1, 2]),
who_has={},
close=True)
assert not b.active
assert response['status'] == 'OK'
assert b.data['w'] == 3
assert isinstance(response['compute_start'], float)
assert isinstance(response['compute_stop'], float)
assert isinstance(response['thread'], Integral)
aa.close_streams()
bb.close_streams()
@gen_cluster()
def test_worker(c, a, b):
aa = rpc(ip=a.ip, port=a.port)
bb = rpc(ip=b.ip, port=b.port)
result = yield aa.identity()
assert not a.active
response = yield aa.compute(key='x',
function=dumps(add),
args=dumps([1, 2]),
who_has={},
close=True)
assert not a.active
assert response['status'] == 'OK'
assert a.data['x'] == 3
assert isinstance(response['compute_start'], float)
assert isinstance(response['compute_stop'], float)
assert isinstance(response['thread'], Integral)
response = yield bb.compute(key='y',
function=dumps(add),
args=dumps(['x', 10]),
who_has={'x': [a.address]})
assert response['status'] == 'OK'
assert b.data['y'] == 13
assert response['nbytes'] == sizeof(b.data['y'])
assert isinstance(response['transfer_start'], float)
assert isinstance(response['transfer_stop'], float)
def bad_func():
1 / 0
response = yield bb.compute(key='z',
function=dumps(bad_func),
args=dumps(()),
close=True)
assert not b.active
assert response['status'] == 'error'
assert isinstance(loads(response['exception']), ZeroDivisionError)
if sys.version_info[0] >= 3:
assert any('1 / 0' in line
for line in pluck(3, traceback.extract_tb(
loads(response['traceback'])))
if line)
aa.close_streams()
yield a._close()
assert a.address not in c.ncores and b.address in c.ncores
assert list(c.ncores.keys()) == [b.address]
assert isinstance(b.address, str)
assert b.ip in b.address
assert str(b.port) in b.address
bb.close_streams()
def test_compute_who_has(current_loop):
@gen.coroutine
def f():
s = Scheduler()
s.listen(0)
x = Worker(s.ip, s.port, ip='127.0.0.1')
y = Worker(s.ip, s.port, ip='127.0.0.1')
z = Worker(s.ip, s.port, ip='127.0.0.1')
x.data['a'] = 1
y.data['a'] = 2
yield [x._start(), y._start(), z._start()]
zz = rpc(ip=z.ip, port=z.port)
yield zz.compute(function=dumps(inc),
args=dumps(('a',)),
who_has={'a': [x.address]},
key='b')
assert z.data['b'] == 2
if 'a' in z.data:
del z.data['a']
yield zz.compute(function=dumps(inc),
args=dumps(('a',)),
who_has={'a': [y.address]},
key='c')
assert z.data['c'] == 3
yield [x._close(), y._close(), z._close()]
zz.close_streams()
current_loop.run_sync(f, timeout=5)
@gen_cluster()
def dont_test_workers_update_center(s, a, b):
aa = rpc(ip=a.ip, port=a.port)
response = yield aa.update_data(data={'x': dumps(1), 'y': dumps(2)})
assert response['status'] == 'OK'
assert response['nbytes'] == {'x': sizeof(1), 'y': sizeof(2)}
assert a.data == {'x': 1, 'y': 2}
assert s.who_has == {'x': {a.address},
'y': {a.address}}
assert s.has_what[a.address] == {'x', 'y'}
yield aa.delete_data(keys=['x'], close=True)
assert not s.who_has['x']
assert all('x' not in s for s in c.has_what.values())
aa.close_streams()
@slow
@gen_cluster()
def dont_test_delete_data_with_missing_worker(c, a, b):
bad = '127.0.0.1:9001' # this worker doesn't exist
c.who_has['z'].add(bad)
c.who_has['z'].add(a.address)
c.has_what[bad].add('z')
c.has_what[a.address].add('z')
a.data['z'] = 5
cc = rpc(ip=c.ip, port=c.port)
yield cc.delete_data(keys=['z']) # TODO: this hangs for a while
assert 'z' not in a.data
assert not c.who_has['z']
assert not c.has_what[bad]
assert not c.has_what[a.address]
cc.close_streams()
@gen_cluster()
def test_upload_file(s, a, b):
assert not os.path.exists(os.path.join(a.local_dir, 'foobar.py'))
assert not os.path.exists(os.path.join(b.local_dir, 'foobar.py'))
assert a.local_dir != b.local_dir
aa = rpc(ip=a.ip, port=a.port)
bb = rpc(ip=b.ip, port=b.port)
yield [aa.upload_file(filename='foobar.py', data=b'x = 123'),
bb.upload_file(filename='foobar.py', data='x = 123')]
assert os.path.exists(os.path.join(a.local_dir, 'foobar.py'))
assert os.path.exists(os.path.join(b.local_dir, 'foobar.py'))
def g():
import foobar
return foobar.x
yield aa.compute(function=dumps(g),
key='x')
result = yield aa.get_data(keys=['x'])
assert result == {'x': dumps(123)}
yield a._close()
yield b._close()
aa.close_streams()
bb.close_streams()
assert not os.path.exists(os.path.join(a.local_dir, 'foobar.py'))
@gen_cluster()
def test_upload_egg(s, a, b):
eggname = 'mytestegg-1.0.0-py3.4.egg'
local_file = __file__.replace('test_worker.py', eggname)
assert not os.path.exists(os.path.join(a.local_dir, eggname))
assert not os.path.exists(os.path.join(b.local_dir, eggname))
assert a.local_dir != b.local_dir
aa = rpc(ip=a.ip, port=a.port)
bb = rpc(ip=b.ip, port=b.port)
with open(local_file, 'rb') as f:
payload = f.read()
yield [aa.upload_file(filename=eggname, data=payload),
bb.upload_file(filename=eggname, data=payload)]
assert os.path.exists(os.path.join(a.local_dir, eggname))
assert os.path.exists(os.path.join(b.local_dir, eggname))
def g(x):
import testegg
return testegg.inc(x)
yield aa.compute(function=dumps(g), key='x', args=dumps((10,)))
result = yield aa.get_data(keys=['x'])
assert result == {'x': dumps(10 + 1)}
yield a._close()
yield b._close()
aa.close_streams()
bb.close_streams()
assert not os.path.exists(os.path.join(a.local_dir, eggname))
@gen_cluster()
def test_broadcast(s, a, b):
cc = rpc(ip=s.ip, port=s.port)
results = yield cc.broadcast(msg={'op': 'ping'})
assert results == {a.address: b'pong', b.address: b'pong'}
cc.close_streams()
@gen_test()
def test_worker_with_port_zero():
s = Scheduler()
s.listen(8007)
w = Worker(s.ip, s.port, ip='127.0.0.1')
yield w._start()
assert isinstance(w.port, int)
assert w.port > 1024
@slow
def test_worker_waits_for_center_to_come_up(current_loop):
@gen.coroutine
def f():
w = Worker('127.0.0.1', 8007, ip='127.0.0.1')
yield w._start()
try:
current_loop.run_sync(f, timeout=4)
except TimeoutError:
pass
@gen_cluster()
def test_worker_task(s, a, b):
aa = rpc(ip=a.ip, port=a.port)
yield aa.compute(task=dumps((inc, 1)), key='x', report=False)
assert a.data['x'] == 2
@gen_cluster()
def test_worker_task_data(s, a, b):
aa = rpc(ip=a.ip, port=a.port)
yield aa.compute(task=dumps(2), key='x', report=False)
assert a.data['x'] == 2
@gen_cluster()
def test_worker_task_bytes(s, a, b):
aa = rpc(ip=a.ip, port=a.port)
yield aa.compute(task=dumps((inc, 1)), key='x', report=False)
assert a.data['x'] == 2
yield aa.compute(function=dumps(inc), args=dumps((10,)), key='y',
report=False)
assert a.data['y'] == 11
def test_error_message():
class MyException(Exception):
def __init__(self, a, b):
self.args = (a + b,)
def __str__(self):
return "MyException(%s)" % self.args
msg = error_message(MyException('Hello', 'World!'))
assert 'Hello' in str(msg['exception'])
@gen_cluster()
def test_gather(s, a, b):
b.data['x'] = 1
b.data['y'] = 2
aa = rpc(ip=a.ip, port=a.port)
resp = yield aa.gather(who_has={'x': [b.address], 'y': [b.address]})<|fim▁hole|> assert resp['status'] == 'OK'
assert a.data['x'] == b.data['x']
assert a.data['y'] == b.data['y']
@gen_cluster()
def test_compute_stream(s, a, b):
stream = yield connect(a.ip, a.port)
yield write(stream, {'op': 'compute-stream'})
msgs = [{'op': 'compute-task', 'function': dumps(inc), 'args': dumps((i,)), 'key': 'x-%d' % i}
for i in range(10)]
bstream = BatchedStream(stream, 0)
for msg in msgs[:5]:
yield write(stream, msg)
for i in range(5):
msg = yield read(bstream)
assert msg['status'] == 'OK'
assert msg['key'][0] == 'x'
for msg in msgs[5:]:
yield write(stream, msg)
for i in range(5):
msg = yield read(bstream)
assert msg['status'] == 'OK'
assert msg['key'][0] == 'x'
yield write(stream, {'op': 'close'})
@gen_cluster(client=True, ncores=[('127.0.0.1', 1)])
def test_active_holds_tasks(e, s, w):
future = e.submit(slowinc, 1, delay=0.2)
yield gen.sleep(0.1)
assert future.key in w.active
yield future._result()
assert future.key not in w.active
future = e.submit(throws, 1)
with ignoring(Exception):
yield _wait([future])
assert not w.active
def test_io_loop(loop):
s = Scheduler(loop=loop)
s.listen(0)
assert s.io_loop is loop
w = Worker(s.ip, s.port, loop=loop)
assert w.io_loop is loop
@gen_cluster(client=True, ncores=[])
def test_spill_to_disk(e, s):
np = pytest.importorskip('numpy')
w = Worker(s.ip, s.port, loop=s.loop, memory_limit=1000)
yield w._start()
x = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='x')
yield _wait(x)
y = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='y')
yield _wait(y)
assert set(w.data) == {x.key, y.key}
assert set(w.data.fast) == {x.key, y.key}
z = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='z')
yield _wait(z)
assert set(w.data) == {x.key, y.key, z.key}
assert set(w.data.fast) == {y.key, z.key}
assert set(w.data.slow) == {x.key}
yield x._result()
assert set(w.data.fast) == {x.key, z.key}
assert set(w.data.slow) == {y.key}
@gen_cluster(client=True)
def test_access_key(c, s, a, b):
def f(i):
from distributed.worker import thread_state
return thread_state.key
futures = [c.submit(f, i, key='x-%d' % i) for i in range(20)]
results = yield c._gather(futures)
assert list(results) == ['x-%d' % i for i in range(20)]
@gen_cluster(client=True)
def test_run_dask_worker(c, s, a, b):
def f(dask_worker=None):
return dask_worker.id
response = yield c._run(f)
assert response == {a.address: a.id, b.address: b.id}<|fim▁end|> | |
<|file_name|>wiz_event_append_assistant.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import fields, models, api
from openerp.addons.event_track_assistant._common import\
_convert_to_utc_date, _convert_to_local_date, _convert_time_to_float
date2string = fields.Date.to_string
datetime2string = fields.Datetime.to_string
str2datetime = fields.Datetime.from_string
class WizEventAppendAssistant(models.TransientModel):
_inherit = 'wiz.event.append.assistant'
type_hour = fields.Many2one(
comodel_name='hr.type.hour', string='Type hour')
start_time = fields.Float(string='Start time', default=0.0)
end_time = fields.Float(string='End time', default=0.0)
@api.model
def default_get(self, var_fields):
tz = self.env.user.tz
res = super(WizEventAppendAssistant, self).default_get(var_fields)
res.update({
'start_time': _convert_time_to_float(
_convert_to_utc_date(res.get('min_from_date'), tz=tz), tz=tz),
'end_time': _convert_time_to_float(
_convert_to_utc_date(res.get('max_to_date'), tz=tz), tz=tz),
})
return res
@api.multi
@api.onchange('from_date', 'start_time', 'to_date', 'end_time', 'partner')
def onchange_dates_and_partner(self):
self.ensure_one()
res = super(WizEventAppendAssistant, self).onchange_dates_and_partner()
return res
def revert_dates(self):
tz = self.env.user.tz
super(WizEventAppendAssistant, self).revert_dates()
self.start_time = _convert_time_to_float(_convert_to_utc_date(
self.min_from_date, tz=tz), tz=tz)
self.end_time = _convert_time_to_float(_convert_to_utc_date(
self.max_to_date, tz=tz), tz=tz)
def _update_registration_start_date(self, registration):
super(WizEventAppendAssistant, self)._update_registration_start_date(
registration)
reg_date_start = str2datetime(registration.date_start)
if self.start_time:
wiz_from_date = _convert_to_utc_date(
self.from_date, time=self.start_time, tz=self.env.user.tz)
if wiz_from_date != reg_date_start:
registration.date_start = wiz_from_date
def _update_registration_date_end(self, registration):
super(WizEventAppendAssistant, self)._update_registration_date_end(
registration)
reg_date_end = str2datetime(registration.date_end)
if self.end_time:
wiz_to_date = _convert_to_utc_date(
self.to_date, time=self.end_time, tz=self.env.user.tz)
if wiz_to_date != reg_date_end:
registration.date_end = wiz_to_date
def _prepare_registration_data(self, event):<|fim▁hole|> date_start, time=self.start_time, tz=self.env.user.tz)
date_end = _convert_to_local_date(self.to_date).date()
date_end = _convert_to_utc_date(
date_end, time=self.end_time, tz=self.env.user.tz)
vals.update({
'date_start': event.date_begin
if datetime2string(date_start) < event.date_begin else date_start,
'date_end': event.date_end
if datetime2string(date_end) > event.date_end else date_end,
})
return vals
def _calc_dates_for_search_track(self, from_date, to_date):
super(WizEventAppendAssistant,
self)._calc_dates_for_search_track(from_date, to_date)
from_date = self._prepare_date_for_control(
from_date, time=self.start_time or 0.0)
to_date = self._prepare_date_for_control(
to_date, time=self.end_time or 24.0)
return from_date, to_date<|fim▁end|> | vals = super(WizEventAppendAssistant,
self)._prepare_registration_data(event)
date_start = _convert_to_local_date(self.from_date).date()
date_start = _convert_to_utc_date( |
<|file_name|>shared.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { CommonModule } from '@angular/common';
import { StarComponent } from '../shared/star.component';
@NgModule({
declarations: [
StarComponent
],
imports: [
CommonModule<|fim▁hole|> exports: [
CommonModule,
FormsModule,
StarComponent
]
})
export class SharedModule { }<|fim▁end|> | ], |
<|file_name|>view.py<|end_file_name|><|fim▁begin|># Webhooks for external integrations.
from __future__ import absolute_import
from typing import Any, Dict, List, Optional, Text, Tuple
from django.utils.translation import ugettext as _
from django.db.models import Q
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from zerver.models import UserProfile, get_user_profile_by_email, Realm
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import api_key_only_webhook_view, has_request_variables, REQ
import logging
import re
import ujson
IGNORED_EVENTS = [
'comment_created', # we handle issue_update event instead
'comment_updated', # we handle issue_update event instead
'comment_deleted', # we handle issue_update event instead
]
def guess_zulip_user_from_jira(jira_username, realm):
# type: (Text, Realm) -> Optional[UserProfile]
try:
# Try to find a matching user in Zulip
# We search a user's full name, short name,
# and beginning of email address
user = UserProfile.objects.filter(<|fim▁hole|> realm=realm).order_by("id")[0]
return user
except IndexError:
return None
def convert_jira_markup(content, realm):
# type: (Text, Realm) -> Text
# Attempt to do some simplistic conversion of JIRA
# formatting to Markdown, for consumption in Zulip
# Jira uses *word* for bold, we use **word**
content = re.sub(r'\*([^\*]+)\*', r'**\1**', content)
# Jira uses {{word}} for monospacing, we use `word`
content = re.sub(r'{{([^\*]+?)}}', r'`\1`', content)
# Starting a line with bq. block quotes that line
content = re.sub(r'bq\. (.*)', r'> \1', content)
# Wrapping a block of code in {quote}stuff{quote} also block-quotes it
quote_re = re.compile(r'{quote}(.*?){quote}', re.DOTALL)
content = re.sub(quote_re, r'~~~ quote\n\1\n~~~', content)
# {noformat}stuff{noformat} blocks are just code blocks with no
# syntax highlighting
noformat_re = re.compile(r'{noformat}(.*?){noformat}', re.DOTALL)
content = re.sub(noformat_re, r'~~~\n\1\n~~~', content)
# Code blocks are delineated by {code[: lang]} {code}
code_re = re.compile(r'{code[^\n]*}(.*?){code}', re.DOTALL)
content = re.sub(code_re, r'~~~\n\1\n~~~', content)
# Links are of form: [https://www.google.com] or [Link Title|https://www.google.com]
# In order to support both forms, we don't match a | in bare links
content = re.sub(r'\[([^\|~]+?)\]', r'[\1](\1)', content)
# Full links which have a | are converted into a better markdown link
full_link_re = re.compile(r'\[(?:(?P<title>[^|~]+)\|)(?P<url>.*)\]')
content = re.sub(full_link_re, r'[\g<title>](\g<url>)', content)
# Try to convert a JIRA user mention of format [~username] into a
# Zulip user mention. We don't know the email, just the JIRA username,
# so we naively guess at their Zulip account using this
if realm:
mention_re = re.compile(u'\[~(.*?)\]')
for username in mention_re.findall(content):
# Try to look up username
user_profile = guess_zulip_user_from_jira(username, realm)
if user_profile:
replacement = u"**{}**".format(user_profile.full_name)
else:
replacement = u"**{}**".format(username)
content = content.replace("[~{}]".format(username,), replacement)
return content
def get_in(payload, keys, default=''):
# type: (Dict[str, Any], List[str], Text) -> Any
try:
for key in keys:
payload = payload[key]
except (AttributeError, KeyError, TypeError):
return default
return payload
def get_issue_string(payload, issue_id=None):
# type: (Dict[str, Any], Text) -> Text
# Guess the URL as it is not specified in the payload
# We assume that there is a /browse/BUG-### page
# from the REST url of the issue itself
if issue_id is None:
issue_id = get_issue_id(payload)
base_url = re.match("(.*)\/rest\/api/.*", get_in(payload, ['issue', 'self']))
if base_url and len(base_url.groups()):
return u"[{}]({}/browse/{})".format(issue_id, base_url.group(1), issue_id)
else:
return issue_id
def get_assignee_mention(assignee_email):
# type: (Text) -> Text
if assignee_email != '':
try:
assignee_name = get_user_profile_by_email(assignee_email).full_name
except UserProfile.DoesNotExist:
assignee_name = assignee_email
return u"**{}**".format(assignee_name)
return ''
def get_issue_author(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['user', 'displayName'])
def get_issue_id(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'key'])
def get_issue_title(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'fields', 'summary'])
def get_issue_subject(payload):
# type: (Dict[str, Any]) -> Text
return u"{}: {}".format(get_issue_id(payload), get_issue_title(payload))
def get_sub_event_for_update_issue(payload):
# type: (Dict[str, Any]) -> Text
sub_event = payload.get('issue_event_type_name', '')
if sub_event == '':
if payload.get('comment'):
return 'issue_commented'
elif payload.get('transition'):
return 'issue_transited'
return sub_event
def get_event_type(payload):
# type: (Dict[str, Any]) -> Optional[Text]
event = payload.get('webhookEvent')
if event is None and payload.get('transition'):
event = 'jira:issue_updated'
return event
def add_change_info(content, field, from_field, to_field):
# type: (Text, Text, Text, Text) -> Text
content += u"* Changed {}".format(field)
if from_field:
content += u" from **{}**".format(from_field)
if to_field:
content += u" to {}\n".format(to_field)
return content
def handle_updated_issue_event(payload, user_profile):
# Reassigned, commented, reopened, and resolved events are all bundled
# into this one 'updated' event type, so we try to extract the meaningful
# event that happened
# type: (Dict[str, Any], UserProfile) -> Text
issue_id = get_in(payload, ['issue', 'key'])
issue = get_issue_string(payload, issue_id)
assignee_email = get_in(payload, ['issue', 'fields', 'assignee', 'emailAddress'], '')
assignee_mention = get_assignee_mention(assignee_email)
if assignee_mention != '':
assignee_blurb = u" (assigned to {})".format(assignee_mention)
else:
assignee_blurb = ''
sub_event = get_sub_event_for_update_issue(payload)
if 'comment' in sub_event:
if sub_event == 'issue_commented':
verb = 'added comment to'
elif sub_event == 'issue_comment_edited':
verb = 'edited comment on'
else:
verb = 'deleted comment from'
content = u"{} **{}** {}{}".format(get_issue_author(payload), verb, issue, assignee_blurb)
comment = get_in(payload, ['comment', 'body'])
if comment:
comment = convert_jira_markup(comment, user_profile.realm)
content = u"{}:\n\n\n{}\n".format(content, comment)
else:
content = u"{} **updated** {}{}:\n\n".format(get_issue_author(payload), issue, assignee_blurb)
changelog = get_in(payload, ['changelog'])
if changelog != '':
# Use the changelog to display the changes, whitelist types we accept
items = changelog.get('items')
for item in items:
field = item.get('field')
if field == 'assignee' and assignee_mention != '':
target_field_string = assignee_mention
else:
# Convert a user's target to a @-mention if possible
target_field_string = u"**{}**".format(item.get('toString'))
from_field_string = item.get('fromString')
if target_field_string or from_field_string:
content = add_change_info(content, field, from_field_string, target_field_string)
elif sub_event == 'issue_transited':
from_field_string = get_in(payload, ['transition', 'from_status'])
target_field_string = u'**{}**'.format(get_in(payload, ['transition', 'to_status']))
if target_field_string or from_field_string:
content = add_change_info(content, 'status', from_field_string, target_field_string)
return content
def handle_created_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **created** {} priority {}, assigned to **{}**:\n\n> {}".format(
get_issue_author(payload),
get_issue_string(payload),
get_in(payload, ['issue', 'fields', 'priority', 'name']),
get_in(payload, ['issue', 'fields', 'assignee', 'displayName'], 'no one'),
get_issue_title(payload)
)
def handle_deleted_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **deleted** {}!".format(get_issue_author(payload), get_issue_string(payload))
@api_key_only_webhook_view("JIRA")
@has_request_variables
def api_jira_webhook(request, user_profile,
payload=REQ(argument_type='body'),
stream=REQ(default='jira')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text) -> HttpResponse
event = get_event_type(payload)
if event == 'jira:issue_created':
subject = get_issue_subject(payload)
content = handle_created_issue_event(payload)
elif event == 'jira:issue_deleted':
subject = get_issue_subject(payload)
content = handle_deleted_issue_event(payload)
elif event == 'jira:issue_updated':
subject = get_issue_subject(payload)
content = handle_updated_issue_event(payload, user_profile)
elif event in IGNORED_EVENTS:
return json_success()
else:
if event is None:
if not settings.TEST_SUITE:
message = u"Got JIRA event with None event type: {}".format(payload)
logging.warning(message)
return json_error(_("Event is not given by JIRA"))
else:
if not settings.TEST_SUITE:
logging.warning("Got JIRA event type we don't support: {}".format(event))
return json_success()
check_send_message(user_profile, request.client, "stream", [stream], subject, content)
return json_success()<|fim▁end|> | Q(full_name__iexact=jira_username) |
Q(short_name__iexact=jira_username) |
Q(email__istartswith=jira_username),
is_active=True, |
<|file_name|>autoderef-method-twice-but-not-thrice.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
trait double {
fn double(self: Box<Self>) -> uint;
}
impl double for Box<uint> {
fn double(self: Box<Box<uint>>) -> uint { **self * 2u }
}
pub fn main() {<|fim▁hole|><|fim▁end|> | let x = box box box box box 3u;
assert_eq!(x.double(), 6u);
} |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package me.banxi.androiddemo;<|fim▁hole|>import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;
public class MainActivity extends Activity{
public void onCreate(Bundle bundle){
super.onCreate(bundle);
TextView textView = new TextView(this);
textView.setText("Hello,World");
setContentView(textView);
}
}<|fim▁end|> | |
<|file_name|>test_automatic_adaptation.py<|end_file_name|><|fim▁begin|>###############################################################################
# Copyright 2014 Enthought, Inc.
###############################################################################
from traits.adaptation.api import reset_global_adaptation_manager
from traits.api import HasTraits, Instance, List, register_factory, TraitError
from traits.testing.unittest_tools import unittest
class Foo(HasTraits):
pass
class Bar(HasTraits):
pass
def bar_to_foo_adapter(bar):
return Foo()
class FooContainer(HasTraits):
not_adapting_foo = Instance(Foo)
adapting_foo = Instance(Foo, adapt='yes')
not_adapting_foo_list = List(Foo)
adapting_foo_list = List(Instance(Foo, adapt='yes'))
class TestAutomaticAdaptation(unittest.TestCase):
#### 'TestCase' protocol ##################################################
def setUp(self):
reset_global_adaptation_manager()
#### Tests ################################################################<|fim▁hole|> foo_container = FooContainer()
# Before a Bar->Foo adapter is registered.
with self.assertRaises(TraitError):
foo_container.not_adapting_foo = bar
with self.assertRaises(TraitError):
foo_container.adapting_foo = bar
# After a Bar->Foo adapter is registered.
register_factory(bar_to_foo_adapter, Bar, Foo)
with self.assertRaises(TraitError):
foo_container.not_adapting_foo = bar
foo_container.adapting_foo = bar
self.assertIsInstance(foo_container.adapting_foo, Foo)
def test_list_trait_automatic_adaptation(self):
bar = Bar()
foo_container = FooContainer()
# Before a Bar->Foo adapter is registered.
with self.assertRaises(TraitError):
foo_container.not_adapting_foo_list = [bar]
with self.assertRaises(TraitError):
foo_container.adapting_foo_list = [bar]
# After a Bar->Foo adapter is registered.
register_factory(bar_to_foo_adapter, Bar, Foo)
with self.assertRaises(TraitError):
foo_container.not_adapting_foo_list = [bar]
foo_container.adapting_foo_list = [bar]
self.assertIsInstance(foo_container.adapting_foo_list[0], Foo)<|fim▁end|> |
def test_instance_trait_automatic_adaptation(self):
bar = Bar() |
<|file_name|>sensorcontrol.cpp<|end_file_name|><|fim▁begin|>#include "sensorcontrol.h"
using namespace oi;
/*!
* \brief SensorControl::SensorControl
* \param station
* \param parent
*/
SensorControl::SensorControl(QPointer<Station> &station, QObject *parent) : QObject(parent), station(station), sensorValid(false){
this->worker = new SensorWorker();
this->connectSensorWorker();
}
/*!
* \brief SensorControl::~SensorControl
*/
SensorControl::~SensorControl(){
this->disconnectSensorWorker();
}
/*!
* \brief SensorControl::getSensor
* Returns a copy of the current sensor
* \return
*/
Sensor SensorControl::getSensor(){
//get sensor
Sensor sensor;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSensor", Qt::DirectConnection,
Q_RETURN_ARG(Sensor, sensor));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return sensor;
}
/*!
* \brief SensorControl::setSensor
* Sets the current sensor to the given one
* \param sensor
*/
void SensorControl::setSensor(const QPointer<Sensor> &sensor){
//check sensor
if(sensor.isNull()){
return;
}
//check old sensor and add it to the list of used sensors
if(sensorValid){
this->usedSensors.append(this->getSensor());
}
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "setSensor", Qt::DirectConnection,
Q_ARG(QPointer<Sensor>, sensor));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
//set sensor valid
this->sensorValid = true;
}
/*!
* \brief SensorControl::takeSensor
* Returns the current sensor instance. That sensor will no longer be used by the sensor worker
* \return
*/
QPointer<Sensor> SensorControl::takeSensor(){
//check old sensor and add it to the list of used sensors
if(sensorValid){
this->usedSensors.append(this->getSensor());
}
//call method of sensor worker
QPointer<Sensor> sensor(NULL);
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "takeSensor", Qt::DirectConnection,
Q_RETURN_ARG(QPointer<Sensor>, sensor));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
//set sensor invalid
this->sensorValid = false;
return sensor;
}
/*!
* \brief SensorControl::resetSensor
* Disconnects and deletes the current sensor
*/
void SensorControl::resetSensor(){
//check old sensor and add it to the list of used sensors
if(sensorValid){
this->usedSensors.append(this->getSensor());
}
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "resetSensor", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
//set sensor invalid
this->sensorValid = false;
}
/*!
* \brief SensorControl::getUsedSensors
* \return
*/
const QList<Sensor> &SensorControl::getUsedSensors(){
return this->usedSensors;
}
/*!
* \brief SensorControl::setUsedSensors
* \param sensors
*/
void SensorControl::setUsedSensors(const QList<Sensor> &sensors){
this->usedSensors = sensors;
}
/*!
* \brief SensorControl::getStreamFormat
* \return
*/
ReadingTypes SensorControl::getStreamFormat(){
//call method of sensor worker
ReadingTypes type = eUndefinedReading;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getStreamFormat", Qt::DirectConnection,
Q_RETURN_ARG(ReadingTypes, type));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return type;
}
/*!
* \brief SensorControl::setStreamFormat
* \param streamFormat
*/
void SensorControl::setStreamFormat(ReadingTypes streamFormat){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "setStreamFormat", Qt::QueuedConnection,
Q_ARG(ReadingTypes, streamFormat));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::getIsSensorSet
* \return
*/
bool SensorControl::getIsSensorSet(){
return this->sensorValid;
}
/*!
* \brief SensorControl::getIsSensorConnected
* \return
*/
bool SensorControl::getIsSensorConnected(){
//call method of sensor worker
bool isConnected = false;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getIsSensorConnected", Qt::DirectConnection,
Q_RETURN_ARG(bool, isConnected));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return isConnected;
}
/*!
* \brief SensorControl::getIsReadyForMeasurement
* \return
*/
bool SensorControl::getIsReadyForMeasurement(){
//call method of sensor worker
bool isReady = false;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getIsReadyForMeasurement", Qt::DirectConnection,
Q_RETURN_ARG(bool, isReady));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return isReady;
}
/*!
* \brief SensorControl::getIsBusy
* \return
*/
bool SensorControl::getIsBusy(){
//call method of sensor worker
bool isBusy = false;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getIsBusy", Qt::DirectConnection,
Q_RETURN_ARG(bool, isBusy));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return isBusy;
}
/*!
* \brief SensorControl::getSensorStatus
* \return
*/
QMap<QString, QString> SensorControl::getSensorStatus(){
//call method of sensor worker
QMap<QString, QString> status;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSensorStatus", Qt::DirectConnection,
Q_RETURN_ARG(StringStringMap, status));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return status;
}
/*!
* \brief SensorControl::getActiveSensorType
* \return
*/
SensorTypes SensorControl::getActiveSensorType(){
//call method of sensor worker
SensorTypes type;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getActiveSensorType", Qt::DirectConnection,
Q_RETURN_ARG(SensorTypes, type));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return type;
}
/*!
* \brief SensorControl::getSupportedReadingTypes
* \return
*/
QList<ReadingTypes> SensorControl::getSupportedReadingTypes(){
//call method of sensor worker
QList<ReadingTypes> types;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSupportedReadingTypes", Qt::DirectConnection,
Q_RETURN_ARG(QList<ReadingTypes>, types));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return types;
}
/*!
* \brief SensorControl::getSupportedConnectionTypes
* \return
*/
QList<ConnectionTypes> SensorControl::getSupportedConnectionTypes(){
//call method of sensor worker
QList<ConnectionTypes> types;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSupportedConnectionTypes", Qt::DirectConnection,
Q_RETURN_ARG(QList<ConnectionTypes>, types));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return types;
}
/*!
* \brief SensorControl::getSupportedSensorActions
* \return
*/
QList<SensorFunctions> SensorControl::getSupportedSensorActions(){
//call method of sensor worker
QList<SensorFunctions> actions;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSupportedSensorActions", Qt::DirectConnection,
Q_RETURN_ARG(QList<SensorFunctions>, actions));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return actions;
}
/*!
* \brief SensorControl::getSelfDefinedActions
* \return
*/
QStringList SensorControl::getSelfDefinedActions(){
//call method of sensor worker
QStringList actions;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSelfDefinedActions", Qt::DirectConnection,
Q_RETURN_ARG(QStringList, actions));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return actions;
}
/*!
* \brief SensorControl::getSensorConfiguration
* \return
*/
SensorConfiguration SensorControl::getSensorConfiguration(){
//call method of sensor worker
SensorConfiguration sConfig;
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "getSensorConfiguration", Qt::DirectConnection,
Q_RETURN_ARG(SensorConfiguration, sConfig));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
return sConfig;
}
/*!
* \brief SensorControl::setSensorConfiguration
* \param sConfig
*/
void SensorControl::setSensorConfiguration(const SensorConfiguration &sConfig){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "setSensorConfiguration", Qt::QueuedConnection,
Q_ARG(SensorConfiguration, sConfig));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::connectSensor
*/
void SensorControl::connectSensor(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "connectSensor", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::disconnectSensor
*/
void SensorControl::disconnectSensor(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "disconnectSensor", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::measure
* \param geomId
* \param mConfig
*/
void SensorControl::measure(const int &geomId, const MeasurementConfig &mConfig){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "measure", Qt::QueuedConnection,
Q_ARG(int, geomId), Q_ARG(MeasurementConfig, mConfig));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::move
* \param azimuth
* \param zenith
* \param distance
* \param isRelative
* \param measure
* \param geomId
* \param mConfig
*/
void SensorControl::move(const double &azimuth, const double &zenith, const double &distance, const bool &isRelative, const bool &measure, const int &geomId, const MeasurementConfig &mConfig){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "move", Qt::QueuedConnection,
Q_ARG(double, azimuth), Q_ARG(double, zenith), Q_ARG(double, distance),
Q_ARG(bool, isRelative), Q_ARG(bool, measure), Q_ARG(int, geomId),
Q_ARG(MeasurementConfig, mConfig));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::move
* \param x
* \param y
* \param z
* \param measure
* \param geomId
* \param mConfig
*/
void SensorControl::move(const double &x, const double &y, const double &z, const bool &measure, const int &geomId, const MeasurementConfig &mConfig){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "move", Qt::QueuedConnection,
Q_ARG(double, x), Q_ARG(double, y), Q_ARG(double, z),
Q_ARG(bool, measure), Q_ARG(int, geomId), Q_ARG(MeasurementConfig, mConfig));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::initialize
*/
void SensorControl::initialize(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "initialize", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::motorState
*/
void SensorControl::motorState(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "motorState", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::home
*/
void SensorControl::home(){
<|fim▁hole|> }
}
/*!
* \brief SensorControl::toggleSight
*/
void SensorControl::toggleSight(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "toggleSight", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::compensation
*/
void SensorControl::compensation(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "compensation", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::selfDefinedAction
* \param action
*/
void SensorControl::selfDefinedAction(const QString &action){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "selfDefinedAction", Qt::QueuedConnection,
Q_ARG(QString, action));
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
void SensorControl::search(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "search", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::startReadingStream
*/
void SensorControl::startReadingStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "startReadingStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::stopReadingStream
*/
void SensorControl::stopReadingStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "stopReadingStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::startConnectionMonitoringStream
*/
void SensorControl::startConnectionMonitoringStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "startConnectionMonitoringStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::stopConnectionMonitoringStream
*/
void SensorControl::stopConnectionMonitoringStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "stopConnectionMonitoringStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::startStatusMonitoringStream
*/
void SensorControl::startStatusMonitoringStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "startStatusMonitoringStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::stopStatusMonitoringStream
*/
void SensorControl::stopStatusMonitoringStream(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "stopStatusMonitoringStream", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
void SensorControl::finishMeasurement(){
//call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "finishMeasurement", Qt::DirectConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage);
}
}
/*!
* \brief SensorControl::connectSensorWorker
*/
void SensorControl::connectSensorWorker(){
//connect sensor action results
QObject::connect(this->worker, &SensorWorker::commandFinished, this, &SensorControl::commandFinished, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::measurementFinished, this, &SensorControl::measurementFinished, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::measurementDone, this, &SensorControl::measurementDone, Qt::QueuedConnection);
//connect streaming results
QObject::connect(this->worker, &SensorWorker::realTimeReading, this, &SensorControl::realTimeReading, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::realTimeStatus, this, &SensorControl::realTimeStatus, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::connectionLost, this, &SensorControl::connectionLost, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::connectionReceived, this, &SensorControl::connectionReceived, Qt::QueuedConnection);
QObject::connect(this->worker, &SensorWorker::isReadyForMeasurement, this, &SensorControl::isReadyForMeasurement, Qt::QueuedConnection);
//connect sensor messages
QObject::connect(this->worker, &SensorWorker::sensorMessage, this, &SensorControl::sensorMessage, Qt::QueuedConnection);
}
/*!
* \brief SensorControl::disconnectSensorWorker
*/
void SensorControl::disconnectSensorWorker(){
}
void SensorControl::setSensorWorkerThread(QPointer<QThread> t) {
this->worker->moveToThread(t);
}<|fim▁end|> | //call method of sensor worker
bool hasInvoked = QMetaObject::invokeMethod(this->worker, "home", Qt::QueuedConnection);
if(!hasInvoked){
emit this->sensorMessage("Cannot invoke getSensor method of sensor worker", eErrorMessage, eConsoleMessage); |
<|file_name|>if-check-panic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:Number is odd
fn even(x: usize) -> bool {
if x < 2 {
return false;
} else if x == 2 {
return true;
} else {
return even(x - 2);
}
}
fn foo(x: usize) {
if even(x) {
println!("{}", x);
} else {
panic!("Number is odd");
}
}<|fim▁hole|>fn main() {
foo(3);
}<|fim▁end|> | |
<|file_name|>container_debug_adapter.js<|end_file_name|><|fim▁begin|>import Ember from 'ember-metal'; // Ember as namespace
import {
A as emberA,
typeOf,
String as StringUtils,
Namespace,
Object as EmberObject
} from 'ember-runtime';
/**
@module ember
@submodule ember-extension-support
*/
/**
The `ContainerDebugAdapter` helps the container and resolver interface
with tools that debug Ember such as the
[Ember Extension](https://github.com/tildeio/ember-extension)
for Chrome and Firefox.
This class can be extended by a custom resolver implementer
to override some of the methods with library-specific code.
The methods likely to be overridden are:
* `canCatalogEntriesByType`
* `catalogEntriesByType`
The adapter will need to be registered
in the application's container as `container-debug-adapter:main`.
Example:
```javascript
Application.initializer({
name: "containerDebugAdapter",
initialize(application) {
application.register('container-debug-adapter:main', require('app/container-debug-adapter'));
}
});
```
@class ContainerDebugAdapter
@namespace Ember
@extends Ember.Object
@since 1.5.0
@public
*/
export default EmberObject.extend({
/**
The resolver instance of the application
being debugged. This property will be injected
on creation.
@property resolver
@default null
@public
*/
resolver: null,
/**
Returns true if it is possible to catalog a list of available
classes in the resolver for a given type.
@method canCatalogEntriesByType
@param {String} type The type. e.g. "model", "controller", "route".
@return {boolean} whether a list is available for this type.
@public
*/
canCatalogEntriesByType(type) {
if (type === 'model' || type === 'template') {
return false;
}
return true;
},
/**
Returns the available classes a given type.
@method catalogEntriesByType
@param {String} type The type. e.g. "model", "controller", "route".
@return {Array} An array of strings.
@public
*/
catalogEntriesByType(type) {
let namespaces = emberA(Namespace.NAMESPACES);
let types = emberA();
let typeSuffixRegex = new RegExp(`${StringUtils.classify(type)}$`);
namespaces.forEach(namespace => {
if (namespace !== Ember) {
for (let key in namespace) {
if (!namespace.hasOwnProperty(key)) { continue; }
if (typeSuffixRegex.test(key)) {
let klass = namespace[key];<|fim▁hole|> }
}
});
return types;
}
});<|fim▁end|> | if (typeOf(klass) === 'class') {
types.push(StringUtils.dasherize(key.replace(typeSuffixRegex, '')));
}
} |
<|file_name|>test_sepsis.py<|end_file_name|><|fim▁begin|>import pandas as pd
from pandas.io import gbq
def test_sepsis3_one_row_per_stay_id(dataset, project_id):
"""Verifies one stay_id per row of sepsis-3"""
query = f"""
SELECT
COUNT(*) AS n
FROM
(
SELECT stay_id FROM {dataset}.sepsis3 GROUP BY 1 HAVING COUNT(*) > 1
) s <|fim▁hole|> n = df.loc[0, 'n']
assert n == 0, 'sepsis-3 table has more than one row per stay_id'<|fim▁end|> | """
df = gbq.read_gbq(query, project_id=project_id, dialect="standard") |
<|file_name|>coire.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
#
# Copyright: Conor O'Callghan 2016
# Version: v1.1.3
#
# Please feel free to fork this project, modify the code and improve
# it on the github repo https://github.com/brioscaibriste/iarnrod
#
# Powered by TfL Open Data
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import json
import sys
import tempfile
import time
import os
from urllib.request import urlopen
'''
ParseArgs<|fim▁hole|>
'''
def ParseArgs():
# Parse our command line argument for the line name
parser = argparse.ArgumentParser()
parser.add_argument('--line',dest='LineName',help='Specify the London line you want to report on')
args = parser.parse_args()
# Check if the value is blank
Line = (args.LineName)
if not Line:
print ("\nError, you must specify a line name! e.g. --line district\n")
sys.exit(1)
# Convert the line name to lower case for easy comparison
Line = Line.lower()
# If the line isn't in the line list, fail badly
if Line not in ('district','circle','victoria','central','northern',
'bakerloo','hammersmith-city','jubilee','metropolitan',
'piccadilly','waterloo-city','dlr',):
print ("\nError, you have specified " + Line + " as your line. You must specify one of the following: "
"\n\tDistrict"
"\n\tCircle"
"\n\tVictora"
"\n\tCentral"
"\n\tNorthern"
"\n\tPiccadilly"
"\n\tBakerloo"
"\n\thammersmith-city"
"\n\twaterloo-city"
"\n\tDLR"
"\n\tMetropolitan"
"\n\tJubilee\n")
sys.exit(1)
# Convert the tube line back to upper case for nice display
Line = Line.upper()
return Line
'''
RetrieveTFLData
Inputs:
Line - Which line to retrieve information on
Run - Should the data retrieval be run or should the cache file be used
SFileName - The file in which to store the line status cache
This function takes the Line variable (a name of a Transport For London line
name) and polls the TFL API. The function then returns the current line
status for the specified line.
'''
def RetrieveTFLData(Line,Run,SFileName):
# TFL Unified API URL
TFLDataURL = "https://api.tfl.gov.uk/Line/" + Line + ("/Status?detail=False"
"&app_id=&app_key=")
if Run:
# Read all the information from JSON at the specified URL, can be re-done with requests?
RawData = urlopen(TFLDataURL).readall().decode('utf8') or die("Error, failed to "
"retrieve the data from the TFL website")
TFLData = json.loads(RawData)
# Sanitize the data to get the line status
Scratch = (TFLData[0]['lineStatuses'])
LineStatusData = (Scratch[0]['statusSeverityDescription'])
# Cache the staus in a file
with open(SFileName, 'w+') as SFile:
SFile.write(LineStatusData)
SFile.closed
else:
with open(SFileName, 'r+') as SFile:
LineStatusData = SFile.read()
SFile.closed
return LineStatusData
'''
Throttle
Inputs
PollIntervalMinutes - Polling interval in minutes
Throttle - Should we throttle the connection or not?
TFileName - The file where the timestamp for throttling usage is stored
This function is used to determine whether or not the next run of the retrieval of data should run.
It retrieves the previously run time from a file in /tmp if it exists, if the file does not exist
the run status will return as 1 and the current time stamp will be written into a new file.
If throttling is disabled, the file will be removed from /tmp and run will be set to 1.
'''
def Throttle(PollIntervalMinutes,Throttling,TFileName):
if Throttling == "True":
# Current epoch time
# CurrentStamp = str(time.time()).split('.')[0]
CurrentStamp = int(time.time())
# Does the temporary file exist or not
if os.path.isfile(TFileName):
# Open the temp file and read the time stamp
with open(TFileName, 'r+') as TFile:
TimeFile = TFile.read()
Remainder = CurrentStamp - int(TimeFile)
else:
# Get the current time stamp and write it to the temp file
with open(TFileName, 'w') as TFile:
TFile.write(str(CurrentStamp))
# Set the Remainder high to force the next run
Remainder = 1000000
# If the remainder is less than the poll interval don't run the command, if it isn't run the command
if ( Remainder < (PollIntervalMinutes * 60) ):
Run = 0
else:
Run = 1
# Set the command to run and re-write the poll time to file
with open(TFileName, 'w') as TFile:
TFile.write(str(CurrentStamp))
return Run
else:
# Remove the time file if it exists
try:
os.remove(TFileName)
except OSError:
pass
Run = 1
return Run<|fim▁end|> |
A simple function to parse the command line arguments passed to the function.
The function does very little sanitisation on the input variables. The
argument passed is then returned from the function. |
<|file_name|>easyblock.py<|end_file_name|><|fim▁begin|># #
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Generic EasyBuild support for building and installing software.
The EasyBlock class should serve as a base class for all easyblocks.
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Toon Willems (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Fotis Georgatos (Uni.Lu, NTUA)
"""
import copy
import glob
import inspect
import os
import shutil
import stat
import time
import traceback
from distutils.version import LooseVersion
from vsc.utils import fancylogger
from vsc.utils.missing import get_class_for
import easybuild.tools.environment as env
from easybuild.tools import config, filetools
from easybuild.framework.easyconfig import EASYCONFIGS_PKG_SUBDIR
from easybuild.framework.easyconfig.easyconfig import ITERATE_OPTIONS, EasyConfig, ActiveMNS
from easybuild.framework.easyconfig.easyconfig import get_easyblock_class, get_module_path, resolve_template
from easybuild.framework.easyconfig.parser import fetch_parameters_from_easyconfig
from easybuild.framework.easyconfig.tools import get_paths_for
from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP
from easybuild.tools.build_details import get_build_stats
from easybuild.tools.build_log import EasyBuildError, print_error, print_msg
from easybuild.tools.config import build_option, build_path, get_log_filename, get_repository, get_repositorypath
from easybuild.tools.config import install_path, log_path, package_path, source_paths
from easybuild.tools.environment import restore_env
from easybuild.tools.filetools import DEFAULT_CHECKSUM
from easybuild.tools.filetools import adjust_permissions, apply_patch, convert_name, download_file, encode_class_name
from easybuild.tools.filetools import extract_file, mkdir, move_logs, read_file, rmtree2
from easybuild.tools.filetools import write_file, compute_checksum, verify_checksum
from easybuild.tools.run import run_cmd
from easybuild.tools.jenkins import write_to_xml
from easybuild.tools.module_generator import ModuleGeneratorLua, ModuleGeneratorTcl, module_generator
from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version
from easybuild.tools.modules import ROOT_ENV_VAR_NAME_PREFIX, VERSION_ENV_VAR_NAME_PREFIX, DEVEL_ENV_VAR_NAME_PREFIX
from easybuild.tools.modules import get_software_root, modules_tool
from easybuild.tools.package.utilities import package
from easybuild.tools.repository.repository import init_repository
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
from easybuild.tools.systemtools import det_parallelism, use_group
from easybuild.tools.utilities import remove_unwanted_chars
from easybuild.tools.version import this_is_easybuild, VERBOSE_VERSION, VERSION
BUILD_STEP = 'build'
CLEANUP_STEP = 'cleanup'
CONFIGURE_STEP = 'configure'
EXTENSIONS_STEP = 'extensions'
FETCH_STEP = 'fetch'
MODULE_STEP = 'module'
PACKAGE_STEP = 'package'
PATCH_STEP = 'patch'
PERMISSIONS_STEP = 'permissions'
POSTPROC_STEP = 'postproc'
PREPARE_STEP = 'prepare'
READY_STEP = 'ready'
SANITYCHECK_STEP = 'sanitycheck'
SOURCE_STEP = 'source'
TEST_STEP = 'test'
TESTCASES_STEP = 'testcases'
MODULE_ONLY_STEPS = [MODULE_STEP, PREPARE_STEP, READY_STEP, SANITYCHECK_STEP]
_log = fancylogger.getLogger('easyblock')
class EasyBlock(object):
"""Generic support for building and installing software, base class for actual easyblocks."""
# static class method for extra easyconfig parameter definitions
# this makes it easy to access the information without needing an instance
# subclasses of EasyBlock should call this method with a dictionary
@staticmethod
def extra_options(extra=None):
"""
Extra options method which will be passed to the EasyConfig constructor.
"""
if extra is None:
extra = {}
if not isinstance(extra, dict):
_log.nosupport("Obtained 'extra' value of type '%s' in extra_options, should be 'dict'" % type(extra), '2.0')
return extra
#
# INIT
#
def __init__(self, ec):
"""
Initialize the EasyBlock instance.
@param ec: a parsed easyconfig file (EasyConfig instance)
"""
# keep track of original working directory, so we can go back there
self.orig_workdir = os.getcwd()
# list of patch/source files, along with checksums
self.patches = []
self.src = []
self.checksums = []
# build/install directories
self.builddir = None
self.installdir = None
# extensions
self.exts = None
self.exts_all = None
self.ext_instances = []
self.skip = None
self.module_extra_extensions = '' # extra stuff for module file required by extensions
# modules interface with default MODULEPATH
self.modules_tool = modules_tool()
# module generator
self.module_generator = module_generator(self, fake=True)
# modules footer
self.modules_footer = None
modules_footer_path = build_option('modules_footer')
if modules_footer_path is not None:
self.modules_footer = read_file(modules_footer_path)
# easyconfig for this application
if isinstance(ec, EasyConfig):
self.cfg = ec
else:
raise EasyBuildError("Value of incorrect type passed to EasyBlock constructor: %s ('%s')", type(ec), ec)
# determine install subdirectory, based on module name
self.install_subdir = None
# indicates whether build should be performed in installation dir
self.build_in_installdir = self.cfg['buildininstalldir']
# logging
self.log = None
self.logfile = None
self.logdebug = build_option('debug')
self.postmsg = '' # allow a post message to be set, which can be shown as last output
# list of loaded modules
self.loaded_modules = []
# iterate configure/build/options
self.iter_opts = {}
# sanity check fail error messages to report (if any)
self.sanity_check_fail_msgs = []
# robot path
self.robot_path = build_option('robot_path')
# original module path
self.orig_modulepath = os.getenv('MODULEPATH')
# keep track of initial environment we start in, so we can restore it if needed
self.initial_environ = copy.deepcopy(os.environ)
# initialize logger
self._init_log()
# should we keep quiet?
self.silent = build_option('silent')
# try and use the specified group (if any)
group_name = build_option('group')
if self.cfg['group'] is not None:
self.log.warning("Group spec '%s' is overriding config group '%s'." % (self.cfg['group'], group_name))
group_name = self.cfg['group']
self.group = None
if group_name is not None:
self.group = use_group(group_name)
# generate build/install directories
self.gen_builddir()
self.gen_installdir()
self.log.info("Init completed for application name %s version %s" % (self.name, self.version))
# INIT/CLOSE LOG
def _init_log(self):
"""
Initialize the logger.
"""
if not self.log is None:
return
self.logfile = get_log_filename(self.name, self.version, add_salt=True)
fancylogger.logToFile(self.logfile)
self.log = fancylogger.getLogger(name=self.__class__.__name__, fname=False)
self.log.info(this_is_easybuild())
this_module = inspect.getmodule(self)
self.log.info("This is easyblock %s from module %s (%s)",
self.__class__.__name__, this_module.__name__, this_module.__file__)
def close_log(self):
"""
Shutdown the logger.
"""
self.log.info("Closing log for application name %s version %s" % (self.name, self.version))
fancylogger.logToFile(self.logfile, enable=False)
#
# FETCH UTILITY FUNCTIONS
#
def get_checksum_for(self, checksums, filename=None, index=None):
"""
Obtain checksum for given filename.
@param checksums: a list or tuple of checksums (or None)
@param filename: name of the file to obtain checksum for
@param index: index of file in list
"""
# if checksums are provided as a dict, lookup by source filename as key
if isinstance(checksums, (list, tuple)):
if index is not None and index < len(checksums) and (index >= 0 or abs(index) <= len(checksums)):
return checksums[index]
else:
return None
elif checksums is None:
return None
else:
raise EasyBuildError("Invalid type for checksums (%s), should be list, tuple or None.", type(checksums))
def fetch_sources(self, list_of_sources, checksums=None):
"""
Add a list of source files (can be tarballs, isos, urls).
All source files will be checked if a file exists (or can be located)
"""
for index, src_entry in enumerate(list_of_sources):
if isinstance(src_entry, (list, tuple)):
cmd = src_entry[1]
source = src_entry[0]
elif isinstance(src_entry, basestring):
cmd = None
source = src_entry
# check if the sources can be located
path = self.obtain_file(source)
if path:
self.log.debug('File %s found for source %s' % (path, source))
self.src.append({
'name': source,
'path': path,
'cmd': cmd,
'checksum': self.get_checksum_for(checksums, filename=source, index=index),
# always set a finalpath
'finalpath': self.builddir,
})
else:
raise EasyBuildError('No file found for source %s', source)
self.log.info("Added sources: %s" % self.src)
def fetch_patches(self, patch_specs=None, extension=False, checksums=None):
"""
Add a list of patches.
All patches will be checked if a file exists (or can be located)
"""
if patch_specs is None:
patch_specs = self.cfg['patches']
patches = []
for index, patch_spec in enumerate(patch_specs):
# check if the patches can be located
copy_file = False
suff = None
level = None
if isinstance(patch_spec, (list, tuple)):
if not len(patch_spec) == 2:
raise EasyBuildError("Unknown patch specification '%s', only 2-element lists/tuples are supported!",
str(patch_spec))
patch_file = patch_spec[0]
# this *must* be of typ int, nothing else
# no 'isinstance(..., int)', since that would make True/False also acceptable
if type(patch_spec[1]) == int:
level = patch_spec[1]
elif isinstance(patch_spec[1], basestring):
# non-patch files are assumed to be files to copy
if not patch_spec[0].endswith('.patch'):
copy_file = True
suff = patch_spec[1]
else:
raise EasyBuildError("Wrong patch spec '%s', only int/string are supported as 2nd element",
str(patch_spec))
else:
patch_file = patch_spec
path = self.obtain_file(patch_file, extension=extension)
if path:
self.log.debug('File %s found for patch %s' % (path, patch_spec))
patchspec = {
'name': patch_file,
'path': path,
'checksum': self.get_checksum_for(checksums, filename=patch_file, index=index),
}
if suff:
if copy_file:
patchspec['copy'] = suff
else:
patchspec['sourcepath'] = suff
if level is not None:
patchspec['level'] = level
if extension:
patches.append(patchspec)
else:
self.patches.append(patchspec)
else:
raise EasyBuildError('No file found for patch %s', patch_spec)
if extension:
self.log.info("Fetched extension patches: %s" % patches)
return [patch['path'] for patch in patches]
else:
self.log.info("Added patches: %s" % self.patches)
def fetch_extension_sources(self):
"""
Find source file for extensions.
"""
exts_sources = []
self.cfg.enable_templating = False
exts_list = self.cfg['exts_list']
self.cfg.enable_templating = True
for ext in exts_list:
if (isinstance(ext, list) or isinstance(ext, tuple)) and ext:
# expected format: (name, version, options (dict))
ext_name = ext[0]
if len(ext) == 1:
exts_sources.append({'name': ext_name})
else:
ext_version = ext[1]
ext_options = {}
def_src_tmpl = "%(name)s-%(version)s.tar.gz"
if len(ext) == 3:
ext_options = ext[2]
if not isinstance(ext_options, dict):
raise EasyBuildError("Unexpected type (non-dict) for 3rd element of %s", ext)
elif len(ext) > 3:
raise EasyBuildError('Extension specified in unknown format (list/tuple too long)')
ext_src = {
'name': ext_name,
'version': ext_version,
'options': ext_options,
}
checksums = ext_options.get('checksums', None)
if ext_options.get('source_tmpl', None):
fn = resolve_template(ext_options['source_tmpl'], ext_src)
else:
fn = resolve_template(def_src_tmpl, ext_src)
if ext_options.get('nosource', None):
exts_sources.append(ext_src)
else:
source_urls = [resolve_template(url, ext_src) for url in ext_options.get('source_urls', [])]
src_fn = self.obtain_file(fn, extension=True, urls=source_urls)
if src_fn:
ext_src.update({'src': src_fn})
if checksums:
fn_checksum = self.get_checksum_for(checksums, filename=src_fn, index=0)
if verify_checksum(src_fn, fn_checksum):
self.log.info('Checksum for ext source %s verified' % fn)
else:
raise EasyBuildError('Checksum for ext source %s failed', fn)
ext_patches = self.fetch_patches(patch_specs=ext_options.get('patches', []), extension=True)
if ext_patches:
self.log.debug('Found patches for extension %s: %s' % (ext_name, ext_patches))
ext_src.update({'patches': ext_patches})
if checksums:
self.log.debug('Verifying checksums for extension patches...')
for index, ext_patch in enumerate(ext_patches):
checksum = self.get_checksum_for(checksums[1:], filename=ext_patch, index=index)
if verify_checksum(ext_patch, checksum):
self.log.info('Checksum for extension patch %s verified' % ext_patch)
else:
raise EasyBuildError('Checksum for extension patch %s failed', ext_patch)
else:
self.log.debug('No patches found for extension %s.' % ext_name)
exts_sources.append(ext_src)
else:
raise EasyBuildError("Source for extension %s not found.", ext)
elif isinstance(ext, basestring):
exts_sources.append({'name': ext})
else:
raise EasyBuildError("Extension specified in unknown format (not a string/list/tuple)")
return exts_sources
def obtain_file(self, filename, extension=False, urls=None):
"""
Locate the file with the given name
- searches in different subdirectories of source path
- supports fetching file from the web if path is specified as an url (i.e. starts with "http://:")
"""
srcpaths = source_paths()
# should we download or just try and find it?
if filename.startswith("http://") or filename.startswith("ftp://"):
# URL detected, so let's try and download it
url = filename
filename = url.split('/')[-1]
# figure out where to download the file to
filepath = os.path.join(srcpaths[0], self.name[0].lower(), self.name)
if extension:
filepath = os.path.join(filepath, "extensions")
self.log.info("Creating path %s to download file to" % filepath)
mkdir(filepath, parents=True)
try:
fullpath = os.path.join(filepath, filename)
# only download when it's not there yet
if os.path.exists(fullpath):
self.log.info("Found file %s at %s, no need to download it." % (filename, filepath))
return fullpath
else:
if download_file(filename, url, fullpath):
return fullpath
except IOError, err:
raise EasyBuildError("Downloading file %s from url %s to %s failed: %s", filename, url, fullpath, err)
else:
# try and find file in various locations
foundfile = None
failedpaths = []
# always look first in the dir of the current eb file
ebpath = [os.path.dirname(self.cfg.path)]
# always consider robot + easyconfigs install paths as a fall back (e.g. for patch files, test cases, ...)
common_filepaths = []
if self.robot_path:
common_filepaths.extend(self.robot_path)
common_filepaths.extend(get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR, robot_path=self.robot_path))
for path in ebpath + common_filepaths + srcpaths:
# create list of candidate filepaths
namepath = os.path.join(path, self.name)
letterpath = os.path.join(path, self.name.lower()[0], self.name)
# most likely paths
candidate_filepaths = [
letterpath, # easyblocks-style subdir
namepath, # subdir with software name
path, # directly in directory
]
# see if file can be found at that location
for cfp in candidate_filepaths:
fullpath = os.path.join(cfp, filename)
# also check in 'extensions' subdir for extensions
if extension:
fullpaths = [
os.path.join(cfp, "extensions", filename),
os.path.join(cfp, "packages", filename), # legacy
fullpath
]
else:
fullpaths = [fullpath]
for fp in fullpaths:
if os.path.isfile(fp):
self.log.info("Found file %s at %s" % (filename, fp))
foundfile = os.path.abspath(fp)
break # no need to try further
else:
failedpaths.append(fp)
if foundfile:
break # no need to try other source paths
if foundfile:
return foundfile
else:
# try and download source files from specified source URLs
if urls:
source_urls = urls
else:
source_urls = []
source_urls.extend(self.cfg['source_urls'])
targetdir = os.path.join(srcpaths[0], self.name.lower()[0], self.name)
mkdir(targetdir, parents=True)
for url in source_urls:
if extension:
targetpath = os.path.join(targetdir, "extensions", filename)
else:
targetpath = os.path.join(targetdir, filename)
if isinstance(url, basestring):
if url[-1] in ['=', '/']:
fullurl = "%s%s" % (url, filename)
else:
fullurl = "%s/%s" % (url, filename)
elif isinstance(url, tuple):
# URLs that require a suffix, e.g., SourceForge download links
# e.g. http://sourceforge.net/projects/math-atlas/files/Stable/3.8.4/atlas3.8.4.tar.bz2/download
fullurl = "%s/%s/%s" % (url[0], filename, url[1])
else:
self.log.warning("Source URL %s is of unknown type, so ignoring it." % url)
continue
self.log.debug("Trying to download file %s from %s to %s ..." % (filename, fullurl, targetpath))
downloaded = False
try:
if download_file(filename, fullurl, targetpath):
downloaded = True
except IOError, err:
self.log.debug("Failed to download %s from %s: %s" % (filename, url, err))
failedpaths.append(fullurl)
continue
if downloaded:
# if fetching from source URL worked, we're done
self.log.info("Successfully downloaded source file %s from %s" % (filename, fullurl))
return targetpath
else:
failedpaths.append(fullurl)
raise EasyBuildError("Couldn't find file %s anywhere, and downloading it didn't work either... "
"Paths attempted (in order): %s ", filename, ', '.join(failedpaths))
#
# GETTER/SETTER UTILITY FUNCTIONS
#
@property
def name(self):
"""
Shortcut the get the module name.
"""
return self.cfg['name']
@property
def version(self):
"""
Shortcut the get the module version.
"""
return self.cfg['version']
@property
def toolchain(self):
"""
Toolchain used to build this easyblock
"""
return self.cfg.toolchain
@property
def full_mod_name(self):
"""
Full module name (including subdirectory in module install path)
"""
return self.cfg.full_mod_name
@property
def short_mod_name(self):
"""
Short module name (not including subdirectory in module install path)
"""
return self.cfg.short_mod_name
@property
def moduleGenerator(self):
"""
Module generator (DEPRECATED, use self.module_generator instead).
"""
self.log.nosupport("self.moduleGenerator is replaced by self.module_generator", '2.0')
#
# DIRECTORY UTILITY FUNCTIONS
#
def gen_builddir(self):
"""Generate the (unique) name for the builddir"""
clean_name = remove_unwanted_chars(self.name)
# if a toolchain version starts with a -, remove the - so prevent a -- in the path name
tcversion = self.toolchain.version.lstrip('-')
lastdir = "%s%s-%s%s" % (self.cfg['versionprefix'], self.toolchain.name, tcversion, self.cfg['versionsuffix'])
builddir = os.path.join(os.path.abspath(build_path()), clean_name, self.version, lastdir)
# make sure build dir is unique if cleanupoldbuild is False or not set
if not self.cfg.get('cleanupoldbuild', False):
uniq_builddir = builddir
suff = 0
while(os.path.isdir(uniq_builddir)):
uniq_builddir = "%s.%d" % (builddir, suff)
suff += 1
builddir = uniq_builddir
self.builddir = builddir
self.log.info("Build dir set to %s" % self.builddir)
def make_builddir(self):
"""
Create the build directory.
"""
if not self.build_in_installdir:
# self.builddir should be already set by gen_builddir()
if not self.builddir:
raise EasyBuildError("self.builddir not set, make sure gen_builddir() is called first!")
self.log.debug("Creating the build directory %s (cleanup: %s)", self.builddir, self.cfg['cleanupoldbuild'])
else:
self.log.info("Changing build dir to %s" % self.installdir)
self.builddir = self.installdir
self.log.info("Overriding 'cleanupoldinstall' (to False), 'cleanupoldbuild' (to True) "
"and 'keeppreviousinstall' because we're building in the installation directory.")
# force cleanup before installation
self.cfg['cleanupoldbuild'] = True
self.cfg['keeppreviousinstall'] = False
# avoid cleanup after installation
self.cfg['cleanupoldinstall'] = False
# always make build dir
self.make_dir(self.builddir, self.cfg['cleanupoldbuild'])
def gen_installdir(self):
"""
Generate the name of the installation directory.
"""
basepath = install_path()
if basepath:
self.install_subdir = ActiveMNS().det_install_subdir(self.cfg)
self.installdir = os.path.join(os.path.abspath(basepath), self.install_subdir)
self.log.info("Install dir set to %s" % self.installdir)
else:
raise EasyBuildError("Can't set installation directory")
def make_installdir(self, dontcreate=None):
"""
Create the installation directory.
"""
self.log.debug("Creating the installation directory %s (cleanup: %s)" % (self.installdir,
self.cfg['cleanupoldinstall']))
if self.build_in_installdir:
self.cfg['keeppreviousinstall'] = True
dontcreate = (dontcreate is None and self.cfg['dontcreateinstalldir']) or dontcreate
self.make_dir(self.installdir, self.cfg['cleanupoldinstall'], dontcreateinstalldir=dontcreate)
def make_dir(self, dir_name, clean, dontcreateinstalldir=False):
"""
Create the directory.
"""
if os.path.exists(dir_name):
self.log.info("Found old directory %s" % dir_name)
if self.cfg['keeppreviousinstall']:
self.log.info("Keeping old directory %s (hopefully you know what you are doing)" % dir_name)
return
elif clean:
try:
rmtree2(dir_name)
self.log.info("Removed old directory %s" % dir_name)
except OSError, err:
raise EasyBuildError("Removal of old directory %s failed: %s", dir_name, err)
else:
try:
timestamp = time.strftime("%Y%m%d-%H%M%S")
backupdir = "%s.%s" % (dir_name, timestamp)
shutil.move(dir_name, backupdir)
self.log.info("Moved old directory %s to %s" % (dir_name, backupdir))
except OSError, err:
raise EasyBuildError("Moving old directory to backup %s %s failed: %s", dir_name, backupdir, err)
if dontcreateinstalldir:
olddir = dir_name
dir_name = os.path.dirname(dir_name)
self.log.info("Cleaning only, no actual creation of %s, only verification/defining of dirname %s" % (olddir, dir_name))
if os.path.exists(dir_name):
return
# if not, create dir as usual
mkdir(dir_name, parents=True)
#
# MODULE UTILITY FUNCTIONS
#
def make_devel_module(self, create_in_builddir=False):
"""
Create a develop module file which sets environment based on the build
Usage: module load name, which loads the module you want to use. $EBDEVELNAME should then be the full path
to the devel module file. So now you can module load $EBDEVELNAME.
WARNING: you cannot unload using $EBDEVELNAME (for now: use module unload `basename $EBDEVELNAME`)
"""
self.log.info("Making devel module...")
# load fake module
fake_mod_data = self.load_fake_module(purge=True)
header = self.module_generator.MODULE_HEADER
if header:
header += '\n'
load_lines = []
# capture all the EBDEVEL vars
# these should be all the dependencies and we should load them
for key in os.environ:
# legacy support
if key.startswith(DEVEL_ENV_VAR_NAME_PREFIX):
if not key.endswith(convert_name(self.name, upper=True)):
path = os.environ[key]
if os.path.isfile(path):
mod_name = path.rsplit(os.path.sep, 1)[-1]
load_lines.append(self.module_generator.load_module(mod_name))
elif key.startswith('SOFTDEVEL'):
self.log.nosupport("Environment variable SOFTDEVEL* being relied on", '2.0')
env_lines = []
for (key, val) in env.get_changes().items():
# check if non-empty string
# TODO: add unset for empty vars?
if val.strip():
env_lines.append(self.module_generator.set_environment(key, val))
if create_in_builddir:
output_dir = self.builddir
else:
output_dir = os.path.join(self.installdir, log_path())
mkdir(output_dir, parents=True)
filename = os.path.join(output_dir, ActiveMNS().det_devel_module_filename(self.cfg))
self.log.debug("Writing devel module to %s" % filename)
txt = ''.join([header] + load_lines + env_lines)
write_file(filename, txt)
# cleanup: unload fake module, remove fake module dir
self.clean_up_fake_module(fake_mod_data)
def make_module_dep(self):
"""
Make the dependencies for the module file.
"""
deps = []
mns = ActiveMNS()
# include load statements for toolchain, either directly or for toolchain dependencies
if self.toolchain.name != DUMMY_TOOLCHAIN_NAME:
if mns.expand_toolchain_load():
mod_names = self.toolchain.toolchain_dep_mods
deps.extend(mod_names)
self.log.debug("Adding toolchain components as module dependencies: %s" % mod_names)
else:
deps.append(self.toolchain.det_short_module_name())
self.log.debug("Adding toolchain %s as a module dependency" % deps[-1])
# include load/unload statements for dependencies
builddeps = self.cfg.builddependencies()
# include 'module load' statements for dependencies in reverse order
for dep in self.toolchain.dependencies:
if not dep in builddeps:
modname = dep['short_mod_name']
self.log.debug("Adding %s as a module dependency" % modname)
deps.append(modname)
else:
self.log.debug("Skipping build dependency %s" % str(dep))
self.log.debug("Full list of dependencies: %s" % deps)
# exclude dependencies that extend $MODULEPATH and form the path to the top of the module tree (if any)
mod_install_path = os.path.join(install_path('mod'), build_option('suffix_modules_path'))
full_mod_subdir = os.path.join(mod_install_path, self.cfg.mod_subdir)
init_modpaths = mns.det_init_modulepaths(self.cfg)
top_paths = [mod_install_path] + [os.path.join(mod_install_path, p) for p in init_modpaths]
excluded_deps = self.modules_tool.path_to_top_of_module_tree(top_paths, self.cfg.short_mod_name,
full_mod_subdir, deps)
deps = [d for d in deps if d not in excluded_deps]
self.log.debug("List of retained dependencies: %s" % deps)
loads = [self.module_generator.load_module(d) for d in deps]
unloads = [self.module_generator.unload_module(d) for d in deps[::-1]]
# Force unloading any other modules
if self.cfg['moduleforceunload']:
return ''.join(unloads) + ''.join(loads)
else:
return ''.join(loads)
def make_module_description(self):
"""
Create the module description.
"""
return self.module_generator.get_description()
def make_module_extra(self):
"""
Sets optional variables (EBROOT, MPI tuning variables).
"""
lines = ['']
# EBROOT + EBVERSION + EBDEVEL
env_name = convert_name(self.name, upper=True)
lines.append(self.module_generator.set_environment(ROOT_ENV_VAR_NAME_PREFIX + env_name, '', relpath=True))
lines.append(self.module_generator.set_environment(VERSION_ENV_VAR_NAME_PREFIX + env_name, self.version))
devel_path = os.path.join(log_path(), ActiveMNS().det_devel_module_filename(self.cfg))
devel_path_envvar = DEVEL_ENV_VAR_NAME_PREFIX + env_name
lines.append(self.module_generator.set_environment(devel_path_envvar, devel_path, relpath=True))
lines.append('\n')
for (key, value) in self.cfg['modextravars'].items():
lines.append(self.module_generator.set_environment(key, value))
for (key, value) in self.cfg['modextrapaths'].items():
if isinstance(value, basestring):
value = [value]
elif not isinstance(value, (tuple, list)):
raise EasyBuildError("modextrapaths dict value %s (type: %s) is not a list or tuple",
value, type(value))
lines.append(self.module_generator.prepend_paths(key, value))
if self.cfg['modloadmsg']:
lines.append(self.module_generator.msg_on_load(self.cfg['modloadmsg']))
if self.cfg['modtclfooter']:
if isinstance(self.module_generator, ModuleGeneratorTcl):
self.log.debug("Including Tcl footer in module: %s", self.cfg['modtclfooter'])
lines.extend([self.cfg['modtclfooter'], '\n'])
else:
self.log.warning("Not including footer in Tcl syntax in non-Tcl module file: %s",
self.cfg['modtclfooter'])
if self.cfg['modluafooter']:
if isinstance(self.module_generator, ModuleGeneratorLua):
self.log.debug("Including Lua footer in module: %s", self.cfg['modluafooter'])
lines.extend([self.cfg['modluafooter'], '\n'])
else:
self.log.warning("Not including footer in Lua syntax in non-Lua module file: %s",
self.cfg['modluafooter'])
for (key, value) in self.cfg['modaliases'].items():
lines.append(self.module_generator.set_alias(key, value))
txt = ''.join(lines)
self.log.debug("make_module_extra added this: %s", txt)
return txt
def make_module_extra_extensions(self):
"""
Sets optional variables for extensions.
"""
# add stuff specific to individual extensions
lines = [self.module_extra_extensions]
# set environment variable that specifies list of extensions
if self.exts_all:
exts_list = ','.join(['%s-%s' % (ext['name'], ext.get('version', '')) for ext in self.exts_all])
env_var_name = convert_name(self.name, upper=True)
lines.append(self.module_generator.set_environment('EBEXTSLIST%s' % env_var_name, exts_list))
return ''.join(lines)
def make_module_footer(self):
"""
Insert a footer section in the modulefile, primarily meant for contextual information
"""
footer = [self.module_generator.comment("Built with EasyBuild version %s" % VERBOSE_VERSION)]
# add extra stuff for extensions (if any)
if self.cfg['exts_list']:
footer.append(self.make_module_extra_extensions())
# include modules footer if one is specified
if self.modules_footer is not None:
self.log.debug("Including specified footer into module: '%s'" % self.modules_footer)
footer.append(self.modules_footer)
return ''.join(footer)
def make_module_extend_modpath(self):
"""
Include prepend-path statements for extending $MODULEPATH.
"""
txt = ''
if self.cfg['include_modpath_extensions']:
top_modpath = install_path('mod')
mod_path_suffix = build_option('suffix_modules_path')
modpath_exts = ActiveMNS().det_modpath_extensions(self.cfg)
self.log.debug("Including module path extensions returned by module naming scheme: %s" % modpath_exts)
full_path_modpath_extensions = [os.path.join(top_modpath, mod_path_suffix, ext) for ext in modpath_exts]
# module path extensions must exist, otherwise loading this module file will fail
for modpath_extension in full_path_modpath_extensions:
mkdir(modpath_extension, parents=True)
txt = self.module_generator.use(full_path_modpath_extensions)
else:
self.log.debug("Not including module path extensions, as specified.")
return txt
def make_module_req(self):
"""
Generate the environment-variables to run the module.
"""
requirements = self.make_module_req_guess()
lines = []
if os.path.isdir(self.installdir):
try:
os.chdir(self.installdir)
except OSError, err:
raise EasyBuildError("Failed to change to %s: %s", self.installdir, err)
lines.append('\n')
for key in sorted(requirements):
for path in requirements[key]:
paths = sorted(glob.glob(path))
if paths:
lines.append(self.module_generator.prepend_paths(key, paths))
try:
os.chdir(self.orig_workdir)
except OSError, err:
raise EasyBuildError("Failed to change back to %s: %s", self.orig_workdir, err)
return ''.join(lines)
def make_module_req_guess(self):
"""
A dictionary of possible directories to look for.
"""
return {
'PATH': ['bin', 'sbin'],
'LD_LIBRARY_PATH': ['lib', 'lib64', 'lib32'],
'LIBRARY_PATH': ['lib', 'lib64', 'lib32'],
'CPATH': ['include'],
'MANPATH': ['man', 'share/man'],
'PKG_CONFIG_PATH': ['lib/pkgconfig', 'share/pkgconfig'],
'ACLOCAL_PATH': ['share/aclocal'],
'CLASSPATH': ['*.jar'],
}
def load_module(self, mod_paths=None, purge=True):
"""
Load module for this software package/version, after purging all currently loaded modules.
"""
# self.full_mod_name might not be set (e.g. during unit tests)
if self.full_mod_name is not None:
if mod_paths is None:
mod_paths = []
all_mod_paths = mod_paths + ActiveMNS().det_init_modulepaths(self.cfg)
mods = [self.full_mod_name]
self.modules_tool.load(mods, mod_paths=all_mod_paths, purge=purge, init_env=self.initial_environ)
else:
self.log.warning("Not loading module, since self.full_mod_name is not set.")
def load_fake_module(self, purge=False):
"""
Create and load fake module.
"""
# take a copy of the current environment before loading the fake module, so we can restore it
env = copy.deepcopy(os.environ)
# create fake module
fake_mod_path = self.make_module_step(fake=True)
# load fake module
self.modules_tool.prepend_module_path(fake_mod_path)
self.load_module(purge=purge)
return (fake_mod_path, env)
def clean_up_fake_module(self, fake_mod_data):
"""
Clean up fake module.
"""
fake_mod_path, env = fake_mod_data
# unload module and remove temporary module directory
# self.full_mod_name might not be set (e.g. during unit tests)
if fake_mod_path and self.full_mod_name is not None:
try:
self.modules_tool.unload([self.full_mod_name])
self.modules_tool.remove_module_path(fake_mod_path)
rmtree2(os.path.dirname(fake_mod_path))
except OSError, err:
raise EasyBuildError("Failed to clean up fake module dir %s: %s", fake_mod_path, err)
elif self.full_mod_name is None:
self.log.warning("Not unloading module, since self.full_mod_name is not set.")
# restore original environment
restore_env(env)
def load_dependency_modules(self):
"""Load dependency modules."""
self.modules_tool.load([ActiveMNS().det_full_module_name(dep) for dep in self.cfg.dependencies()])
#
# EXTENSIONS UTILITY FUNCTIONS
#
def prepare_for_extensions(self):
"""
Also do this before (eg to set the template)
"""
pass
def skip_extensions(self):
"""
Called when self.skip is True
- use this to detect existing extensions and to remove them from self.exts
- based on initial R version
"""
# disabling templating is required here to support legacy string templates like name/version
self.cfg.enable_templating = False
exts_filter = self.cfg['exts_filter']
self.cfg.enable_templating = True
if not exts_filter or len(exts_filter) == 0:
raise EasyBuildError("Skipping of extensions, but no exts_filter set in easyconfig")
elif isinstance(exts_filter, basestring) or len(exts_filter) != 2:
raise EasyBuildError('exts_filter should be a list or tuple of ("command","input")')
cmdtmpl = exts_filter[0]
cmdinputtmpl = exts_filter[1]
if not self.exts:
self.exts = []
res = []
for ext in self.exts:
name = ext['name']
if 'options' in ext and 'modulename' in ext['options']:
modname = ext['options']['modulename']
else:
modname = name
tmpldict = {
'ext_name': modname,
'ext_version': ext.get('version'),
'src': ext.get('source'),
}
try:
cmd = cmdtmpl % tmpldict
except KeyError, err:
msg = "KeyError occured on completing extension filter template: %s; "
msg += "'name'/'version' keys are no longer supported, should use 'ext_name'/'ext_version' instead"
self.log.nosupport(msg, '2.0')
if cmdinputtmpl:
stdin = cmdinputtmpl % tmpldict
(cmdstdouterr, ec) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, inp=stdin, regexp=False)
else:
(cmdstdouterr, ec) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False)
self.log.info("exts_filter result %s %s", cmdstdouterr, ec)<|fim▁hole|> self.log.debug("exit code: %s, stdout/err: %s" % (ec, cmdstdouterr))
res.append(ext)
else:
self.log.info("Skipping %s" % name)
self.exts = res
#
# MISCELLANEOUS UTILITY FUNCTIONS
#
def guess_start_dir(self):
"""
Return the directory where to start the whole configure/make/make install cycle from
- typically self.src[0]['finalpath']
- start_dir option
-- if abspath: use that
-- else, treat it as subdir for regular procedure
"""
tmpdir = ''
if self.cfg['start_dir']:
tmpdir = self.cfg['start_dir']
if not os.path.isabs(tmpdir):
if len(self.src) > 0 and not self.skip and self.src[0]['finalpath']:
self.cfg['start_dir'] = os.path.join(self.src[0]['finalpath'], tmpdir)
else:
self.cfg['start_dir'] = os.path.join(self.builddir, tmpdir)
try:
os.chdir(self.cfg['start_dir'])
self.log.debug("Changed to real build directory %s" % (self.cfg['start_dir']))
except OSError, err:
raise EasyBuildError("Can't change to real build directory %s: %s", self.cfg['start_dir'], err)
def handle_iterate_opts(self):
"""Handle options relevant during iterated part of build/install procedure."""
# disable templating in this function, since we're messing about with values in self.cfg
self.cfg.enable_templating = False
# handle configure/build/install options that are specified as lists
# set first element to be used, keep track of list in *_list options dictionary
# this will only be done during first iteration, since after that the options won't be lists anymore
suffix = "_list"
sufflen = len(suffix)
for opt in ITERATE_OPTIONS:
# keep track of list, supply first element as first option to handle
if isinstance(self.cfg[opt], (list, tuple)):
self.iter_opts[opt + suffix] = self.cfg[opt] # copy
self.log.debug("Found list for %s: %s" % (opt, self.iter_opts[opt + suffix]))
# pop first element from all *_list options as next value to use
for (lsname, ls) in self.iter_opts.items():
opt = lsname[:-sufflen] # drop '_list' part from name to get option name
if len(self.iter_opts[lsname]) > 0:
self.cfg[opt] = self.iter_opts[lsname].pop(0) # first element will be used next
else:
self.cfg[opt] = '' # empty list => empty option as next value
self.log.debug("Next value for %s: %s" % (opt, str(self.cfg[opt])))
# re-enable templating before self.cfg values are used
self.cfg.enable_templating = True
def det_iter_cnt(self):
"""Determine iteration count based on configure/build/install options that may be lists."""
iter_cnt = max([1] + [len(self.cfg[opt]) for opt in ITERATE_OPTIONS
if isinstance(self.cfg[opt], (list, tuple))])
return iter_cnt
#
# STEP FUNCTIONS
#
def check_readiness_step(self):
"""
Verify if all is ok to start build.
"""
# set level of parallelism for build
par = build_option('parallel')
if self.cfg['parallel']:
if par is None:
par = self.cfg['parallel']
self.log.debug("Desired parallelism specified via 'parallel' easyconfig parameter: %s", par)
else:
par = min(int(par), int(self.cfg['parallel']))
self.log.debug("Desired parallelism: minimum of 'parallel' build option/easyconfig parameter: %s", par)
else:
self.log.debug("Desired parallelism specified via 'parallel' build option: %s", par)
self.cfg['parallel'] = det_parallelism(par=par, maxpar=self.cfg['maxparallel'])
self.log.info("Setting parallelism: %s" % self.cfg['parallel'])
# check whether modules are loaded
loadedmods = self.modules_tool.loaded_modules()
if len(loadedmods) > 0:
self.log.warning("Loaded modules detected: %s" % loadedmods)
# do all dependencies have a toolchain version?
self.toolchain.add_dependencies(self.cfg.dependencies())
if not len(self.cfg.dependencies()) == len(self.toolchain.dependencies):
self.log.debug("dep %s (%s)" % (len(self.cfg.dependencies()), self.cfg.dependencies()))
self.log.debug("tc.dep %s (%s)" % (len(self.toolchain.dependencies), self.toolchain.dependencies))
raise EasyBuildError('Not all dependencies have a matching toolchain version')
# check if the application is not loaded at the moment
(root, env_var) = get_software_root(self.name, with_env_var=True)
if root:
raise EasyBuildError("Module is already loaded (%s is set), installation cannot continue.", env_var)
# check if main install needs to be skipped
# - if a current module can be found, skip is ok
# -- this is potentially very dangerous
if self.cfg['skip']:
if self.modules_tool.exist([self.full_mod_name])[0]:
self.skip = True
self.log.info("Module %s found." % self.full_mod_name)
self.log.info("Going to skip actual main build and potential existing extensions. Expert only.")
else:
self.log.info("No module %s found. Not skipping anything." % self.full_mod_name)
def fetch_step(self, skip_checksums=False):
"""
prepare for building
"""
# check EasyBuild version
easybuild_version = self.cfg['easybuild_version']
if not easybuild_version:
self.log.warn("Easyconfig does not specify an EasyBuild-version (key 'easybuild_version')! "
"Assuming the latest version")
else:
if LooseVersion(easybuild_version) < VERSION:
self.log.warn("EasyBuild-version %s is older than the currently running one. Proceed with caution!",
easybuild_version)
elif LooseVersion(easybuild_version) > VERSION:
raise EasyBuildError("EasyBuild-version %s is newer than the currently running one. Aborting!",
easybuild_version)
# fetch sources
if self.cfg['sources']:
self.fetch_sources(self.cfg['sources'], checksums=self.cfg['checksums'])
else:
self.log.info('no sources provided')
# fetch extensions
if len(self.cfg['exts_list']) > 0:
self.exts = self.fetch_extension_sources()
# fetch patches
if self.cfg['patches']:
if isinstance(self.cfg['checksums'], (list, tuple)):
# if checksums are provided as a list, first entries are assumed to be for sources
patches_checksums = self.cfg['checksums'][len(self.cfg['sources']):]
else:
patches_checksums = self.cfg['checksums']
self.fetch_patches(checksums=patches_checksums)
else:
self.log.info('no patches provided')
# compute checksums for all source and patch files
if not skip_checksums:
for fil in self.src + self.patches:
check_sum = compute_checksum(fil['path'], checksum_type=DEFAULT_CHECKSUM)
fil[DEFAULT_CHECKSUM] = check_sum
self.log.info("%s checksum for %s: %s" % (DEFAULT_CHECKSUM, fil['path'], fil[DEFAULT_CHECKSUM]))
# create parent dirs in install and modules path already
# this is required when building in parallel
mod_path_suffix = build_option('suffix_modules_path')
mod_symlink_paths = ActiveMNS().det_module_symlink_paths(self.cfg)
parent_subdir = os.path.dirname(self.install_subdir)
pardirs = [
os.path.join(install_path(), parent_subdir),
os.path.join(install_path('mod'), mod_path_suffix, parent_subdir),
]
for mod_symlink_path in mod_symlink_paths:
pardirs.append(os.path.join(install_path('mod'), mod_symlink_path, parent_subdir))
self.log.info("Checking dirs that need to be created: %s" % pardirs)
for pardir in pardirs:
mkdir(pardir, parents=True)
def checksum_step(self):
"""Verify checksum of sources and patches, if a checksum is available."""
for fil in self.src + self.patches:
ok = verify_checksum(fil['path'], fil['checksum'])
if not ok:
raise EasyBuildError("Checksum verification for %s using %s failed.", fil['path'], fil['checksum'])
else:
self.log.info("Checksum verification for %s using %s passed." % (fil['path'], fil['checksum']))
def extract_step(self):
"""
Unpack the source files.
"""
for src in self.src:
self.log.info("Unpacking source %s" % src['name'])
srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], extra_options=self.cfg['unpack_options'])
if srcdir:
self.src[self.src.index(src)]['finalpath'] = srcdir
else:
raise EasyBuildError("Unpacking source %s failed", src['name'])
def patch_step(self, beginpath=None):
"""
Apply the patches
"""
for patch in self.patches:
self.log.info("Applying patch %s" % patch['name'])
# patch source at specified index (first source if not specified)
srcind = patch.get('source', 0)
# if patch level is specified, use that (otherwise let apply_patch derive patch level)
level = patch.get('level', None)
# determine suffix of source path to apply patch in (if any)
srcpathsuffix = patch.get('sourcepath', patch.get('copy', ''))
# determine whether 'patch' file should be copied rather than applied
copy_patch = 'copy' in patch and not 'sourcepath' in patch
self.log.debug("Source index: %s; patch level: %s; source path suffix: %s; copy patch: %s",
srcind, level, srcpathsuffix, copy)
if beginpath is None:
try:
beginpath = self.src[srcind]['finalpath']
self.log.debug("Determine begin path for patch %s: %s" % (patch['name'], beginpath))
except IndexError, err:
raise EasyBuildError("Can't apply patch %s to source at index %s of list %s: %s",
patch['name'], srcind, self.src, err)
else:
self.log.debug("Using specified begin path for patch %s: %s" % (patch['name'], beginpath))
src = os.path.abspath("%s/%s" % (beginpath, srcpathsuffix))
self.log.debug("Applying patch %s in path %s" % (patch, src))
if not apply_patch(patch['path'], src, copy=copy_patch, level=level):
raise EasyBuildError("Applying patch %s failed", patch['name'])
def prepare_step(self):
"""
Pre-configure step. Set's up the builddir just before starting configure
"""
# clean environment, undefine any unwanted environment variables that may be harmful
self.cfg['unwanted_env_vars'] = env.unset_env_vars(self.cfg['unwanted_env_vars'])
# prepare toolchain: load toolchain module and dependencies, set up build environment
self.toolchain.prepare(self.cfg['onlytcmod'])
# guess directory to start configure/build/install process in, and move there
self.guess_start_dir()
def configure_step(self):
"""Configure build (abstract method)."""
raise NotImplementedError
def build_step(self):
"""Build software (abstract method)."""
raise NotImplementedError
def test_step(self):
"""Run unit tests provided by software (if any)."""
if self.cfg['runtest']:
self.log.debug("Trying to execute %s as a command for running unit tests...")
(out, _) = run_cmd(self.cfg['runtest'], log_all=True, simple=False)
return out
def stage_install_step(self):
"""
Install in a stage directory before actual installation.
"""
pass
def install_step(self):
"""Install built software (abstract method)."""
raise NotImplementedError
def extensions_step(self, fetch=False):
"""
After make install, run this.
- only if variable len(exts_list) > 0
- optionally: load module that was just created using temp module file
- find source for extensions, in 'extensions' (and 'packages' for legacy reasons)
- run extra_extensions
"""
if len(self.cfg['exts_list']) == 0:
self.log.debug("No extensions in exts_list")
return
# load fake module
fake_mod_data = self.load_fake_module(purge=True)
self.prepare_for_extensions()
if fetch:
self.exts = self.fetch_extension_sources()
self.exts_all = self.exts[:] # retain a copy of all extensions, regardless of filtering/skipping
if self.skip:
self.skip_extensions()
# actually install extensions
self.log.debug("Installing extensions")
exts_defaultclass = self.cfg['exts_defaultclass']
exts_classmap = self.cfg['exts_classmap']
# we really need a default class
if not exts_defaultclass:
self.clean_up_fake_module(fake_mod_data)
raise EasyBuildError("ERROR: No default extension class set for %s", self.name)
# obtain name and module path for default extention class
if hasattr(exts_defaultclass, '__iter__'):
self.log.nosupport("Module path for default class is explicitly defined", '2.0')
elif isinstance(exts_defaultclass, basestring):
# proper way: derive module path from specified class name
default_class = exts_defaultclass
default_class_modpath = get_module_path(default_class, generic=True)
else:
raise EasyBuildError("Improper default extension class specification, should be list/tuple or string.")
# get class instances for all extensions
for ext in self.exts:
self.log.debug("Starting extension %s" % ext['name'])
# always go back to original work dir to avoid running stuff from a dir that no longer exists
os.chdir(self.orig_workdir)
cls, inst = None, None
class_name = encode_class_name(ext['name'])
mod_path = get_module_path(class_name)
# try instantiating extension-specific class
try:
# no error when importing class fails, in case we run into an existing easyblock
# with a similar name (e.g., Perl Extension 'GO' vs 'Go' for which 'EB_Go' is available)
cls = get_easyblock_class(None, name=ext['name'], default_fallback=False, error_on_failed_import=False)
self.log.debug("Obtained class %s for extension %s" % (cls, ext['name']))
if cls is not None:
inst = cls(self, ext)
except (ImportError, NameError), err:
self.log.debug("Failed to use extension-specific class for extension %s: %s" % (ext['name'], err))
# alternative attempt: use class specified in class map (if any)
if inst is None and ext['name'] in exts_classmap:
class_name = exts_classmap[ext['name']]
mod_path = get_module_path(class_name)
try:
cls = get_class_for(mod_path, class_name)
inst = cls(self, ext)
except (ImportError, NameError), err:
raise EasyBuildError("Failed to load specified class %s for extension %s: %s",
class_name, ext['name'], err)
# fallback attempt: use default class
if inst is None:
try:
cls = get_class_for(default_class_modpath, default_class)
self.log.debug("Obtained class %s for installing extension %s" % (cls, ext['name']))
inst = cls(self, ext)
self.log.debug("Installing extension %s with default class %s (from %s)",
ext['name'], default_class, default_class_modpath)
except (ImportError, NameError), err:
raise EasyBuildError("Also failed to use default class %s from %s for extension %s: %s, giving up",
default_class, default_class_modpath, ext['name'], err)
else:
self.log.debug("Installing extension %s with class %s (from %s)" % (ext['name'], class_name, mod_path))
# real work
inst.prerun()
txt = inst.run()
if txt:
self.module_extra_extensions += txt
inst.postrun()
# append so we can make us of it later (in sanity_check_step)
self.ext_instances.append(inst)
# cleanup (unload fake module, remove fake module dir)
self.clean_up_fake_module(fake_mod_data)
def package_step(self):
"""Package installed software (e.g., into an RPM), if requested, using selected package tool."""
if build_option('package'):
pkgtype = build_option('package_type')
pkgdir_dest = os.path.abspath(package_path())
opt_force = build_option('force')
self.log.info("Generating %s package in %s", pkgtype, pkgdir_dest)
pkgdir_src = package(self)
mkdir(pkgdir_dest)
for src_file in glob.glob(os.path.join(pkgdir_src, "*.%s" % pkgtype)):
dest_file = os.path.join(pkgdir_dest, os.path.basename(src_file))
if os.path.exists(dest_file) and not opt_force:
raise EasyBuildError("Unable to copy package %s to %s (already exists).", src_file, dest_file)
else:
self.log.info("Copied package %s to %s", src_file, pkgdir_dest)
shutil.copy(src_file, pkgdir_dest)
else:
self.log.info("Skipping package step (not enabled)")
def post_install_step(self):
"""
Do some postprocessing
- run post install commands if any were specified
"""
if self.cfg['postinstallcmds'] is not None:
# make sure we have a list of commands
if not isinstance(self.cfg['postinstallcmds'], (list, tuple)):
raise EasyBuildError("Invalid value for 'postinstallcmds', should be list or tuple of strings.")
for cmd in self.cfg['postinstallcmds']:
if not isinstance(cmd, basestring):
raise EasyBuildError("Invalid element in 'postinstallcmds', not a string: %s", cmd)
run_cmd(cmd, simple=True, log_ok=True, log_all=True)
def sanity_check_step(self, custom_paths=None, custom_commands=None, extension=False):
"""
Do a sanity check on the installation
- if *any* of the files/subdirectories in the installation directory listed
in sanity_check_paths are non-existent (or empty), the sanity check fails
"""
# supported/required keys in for sanity check paths, along with function used to check the paths
path_keys_and_check = {
'files': lambda fp: os.path.exists(fp), # files must exist
'dirs': lambda dp: os.path.isdir(dp) and os.listdir(dp), # directories must exist and be non-empty
}
# prepare sanity check paths
paths = self.cfg['sanity_check_paths']
if not paths:
if custom_paths:
paths = custom_paths
self.log.info("Using customized sanity check paths: %s" % paths)
else:
paths = {}
for key in path_keys_and_check:
paths.setdefault(key, [])
paths.update({'dirs': ['bin', ('lib', 'lib64')]})
self.log.info("Using default sanity check paths: %s" % paths)
else:
self.log.info("Using specified sanity check paths: %s" % paths)
# check sanity check paths
ks = sorted(paths.keys())
valnottypes = [not isinstance(x, list) for x in paths.values()]
lenvals = [len(x) for x in paths.values()]
req_keys = sorted(path_keys_and_check.keys())
if not ks == req_keys or sum(valnottypes) > 0 or sum(lenvals) == 0:
raise EasyBuildError("Incorrect format for sanity_check_paths (should (only) have %s keys, "
"values should be lists (at least one non-empty)).", ','.join(req_keys))
for key, check_fn in path_keys_and_check.items():
for xs in paths[key]:
if isinstance(xs, basestring):
xs = (xs,)
elif not isinstance(xs, tuple):
raise EasyBuildError("Unsupported type '%s' encountered in %s, not a string or tuple",
key, type(xs))
found = False
for name in xs:
path = os.path.join(self.installdir, name)
if os.path.exists(path):
self.log.debug("Sanity check: found %s %s in %s" % (key[:-1], name, self.installdir))
found = True
break
else:
self.log.debug("Could not find %s %s in %s" % (key[:-1], name, self.installdir))
if not found:
self.sanity_check_fail_msgs.append("no %s of %s in %s" % (key[:-1], xs, self.installdir))
self.log.warning("Sanity check: %s" % self.sanity_check_fail_msgs[-1])
fake_mod_data = None
if not extension:
try:
# unload all loaded modules before loading fake module
# this ensures that loading of dependencies is tested, and avoids conflicts with build dependencies
fake_mod_data = self.load_fake_module(purge=True)
except EasyBuildError, err:
self.sanity_check_fail_msgs.append("loading fake module failed: %s" % err)
self.log.warning("Sanity check: %s" % self.sanity_check_fail_msgs[-1])
# chdir to installdir (better environment for running tests)
if os.path.isdir(self.installdir):
try:
os.chdir(self.installdir)
except OSError, err:
raise EasyBuildError("Failed to move to installdir %s: %s", self.installdir, err)
# run sanity check commands
commands = self.cfg['sanity_check_commands']
if not commands:
if custom_commands:
commands = custom_commands
self.log.info("Using customised sanity check commands: %s" % commands)
else:
commands = []
self.log.info("Using specified sanity check commands: %s" % commands)
for command in commands:
# set command to default. This allows for config files with
# non-tuple commands
if not isinstance(command, tuple):
self.log.debug("Setting sanity check command to default")
command = (None, None)
# Build substition dictionary
check_cmd = {'name': self.name.lower(), 'options': '-h'}
if command[0] is not None:
check_cmd['name'] = command[0]
if command[1] is not None:
check_cmd['options'] = command[1]
cmd = "%(name)s %(options)s" % check_cmd
out, ec = run_cmd(cmd, simple=False, log_ok=False, log_all=False)
if ec != 0:
self.sanity_check_fail_msgs.append("sanity check command %s exited with code %s (output: %s)" % (cmd, ec, out))
self.log.warning("Sanity check: %s" % self.sanity_check_fail_msgs[-1])
else:
self.log.debug("sanity check command %s ran successfully! (output: %s)" % (cmd, out))
if not extension:
failed_exts = [ext.name for ext in self.ext_instances if not ext.sanity_check_step()]
if failed_exts:
self.sanity_check_fail_msgs.append("sanity checks for %s extensions failed!" % failed_exts)
self.log.warning("Sanity check: %s" % self.sanity_check_fail_msgs[-1])
# cleanup
if fake_mod_data:
self.clean_up_fake_module(fake_mod_data)
# pass or fail
if self.sanity_check_fail_msgs:
raise EasyBuildError("Sanity check failed: %s", ', '.join(self.sanity_check_fail_msgs))
else:
self.log.debug("Sanity check passed!")
def cleanup_step(self):
"""
Cleanup leftover mess: remove/clean build directory
except when we're building in the installation directory or
cleanup_builddir is False, otherwise we remove the installation
"""
if not self.build_in_installdir and build_option('cleanup_builddir'):
try:
os.chdir(self.orig_workdir) # make sure we're out of the dir we're removing
self.log.info("Cleaning up builddir %s (in %s)" % (self.builddir, os.getcwd()))
rmtree2(self.builddir)
base = os.path.dirname(self.builddir)
# keep removing empty directories until we either find a non-empty one
# or we end up in the root builddir
while len(os.listdir(base)) == 0 and not os.path.samefile(base, build_path()):
os.rmdir(base)
base = os.path.dirname(base)
except OSError, err:
raise EasyBuildError("Cleaning up builddir %s failed: %s", self.builddir, err)
if not build_option('cleanup_builddir'):
self.log.info("Keeping builddir %s" % self.builddir)
env.restore_env_vars(self.cfg['unwanted_env_vars'])
def make_module_step(self, fake=False):
"""
Generate a module file.
"""
modpath = self.module_generator.prepare(fake=fake)
txt = self.make_module_description()
txt += self.make_module_dep()
txt += self.make_module_extend_modpath()
txt += self.make_module_req()
txt += self.make_module_extra()
txt += self.make_module_footer()
mod_filepath = self.module_generator.get_module_filepath(fake=fake)
write_file(mod_filepath, txt)
self.log.info("Module file %s written: %s", mod_filepath, txt)
# only update after generating final module file
if not fake:
self.modules_tool.update()
mod_symlink_paths = ActiveMNS().det_module_symlink_paths(self.cfg)
self.module_generator.create_symlinks(mod_symlink_paths, fake=fake)
if not fake:
self.make_devel_module()
return modpath
def permissions_step(self):
"""
Finalize installation procedure: adjust permissions as configured, change group ownership (if requested).
Installing user must be member of the group that it is changed to.
"""
if self.group is not None:
# remove permissions for others, and set group ID
try:
perms = stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH
adjust_permissions(self.installdir, perms, add=False, recursive=True, group_id=self.group[1],
relative=True, ignore_errors=True)
except EasyBuildError, err:
raise EasyBuildError("Unable to change group permissions of file(s): %s", err)
self.log.info("Successfully made software only available for group %s (gid %s)" % self.group)
if build_option('read_only_installdir'):
# remove write permissions for everyone
perms = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
adjust_permissions(self.installdir, perms, add=False, recursive=True, relative=True, ignore_errors=True)
self.log.info("Successfully removed write permissions recursively for *EVERYONE* on install dir.")
elif build_option('group_writable_installdir'):
# enable write permissions for group
perms = stat.S_IWGRP
adjust_permissions(self.installdir, perms, add=True, recursive=True, relative=True, ignore_errors=True)
self.log.info("Successfully enabled write permissions recursively for group on install dir.")
else:
# remove write permissions for group and other
perms = stat.S_IWGRP | stat.S_IWOTH
adjust_permissions(self.installdir, perms, add=False, recursive=True, relative=True, ignore_errors=True)
self.log.info("Successfully removed write permissions recursively for group/other on install dir.")
def test_cases_step(self):
"""
Run provided test cases.
"""
for test in self.cfg['tests']:
os.chdir(self.orig_workdir)
if os.path.isabs(test):
path = test
else:
for source_path in source_paths():
path = os.path.join(source_path, self.name, test)
if os.path.exists(path):
break
if not os.path.exists(path):
raise EasyBuildError("Test specifies invalid path: %s", path)
try:
self.log.debug("Running test %s" % path)
run_cmd(path, log_all=True, simple=True)
except EasyBuildError, err:
raise EasyBuildError("Running test %s failed: %s", path, err)
def update_config_template_run_step(self):
"""Update the the easyconfig template dictionary with easyconfig.TEMPLATE_NAMES_EASYBLOCK_RUN_STEP names"""
for name in TEMPLATE_NAMES_EASYBLOCK_RUN_STEP:
self.cfg.template_values[name[0]] = str(getattr(self, name[0], None))
self.cfg.generate_template_values()
def _skip_step(self, step, skippable):
"""Dedice whether or not to skip the specified step."""
module_only = build_option('module_only')
force = build_option('force')
skip = False
# skip step if specified as individual (skippable) step
if skippable and (self.skip or step in self.cfg['skipsteps']):
self.log.info("Skipping %s step (skip: %s, skipsteps: %s)", step, self.skip, self.cfg['skipsteps'])
skip = True
# skip step when only generating module file
# * still run sanity check without use of force
# * always run ready & prepare step to set up toolchain + deps
elif module_only and not step in MODULE_ONLY_STEPS:
self.log.info("Skipping %s step (only generating module)", step)
skip = True
# allow skipping sanity check too when only generating module and force is used
elif module_only and step == SANITYCHECK_STEP and force:
self.log.info("Skipping %s step because of forced module-only mode", step)
skip = True
else:
self.log.debug("Not skipping %s step (skippable: %s, skip: %s, skipsteps: %s, module_only: %s, force: %s",
step, skippable, self.skip, self.cfg['skipsteps'], module_only, force)
return skip
def run_step(self, step, methods):
"""
Run step, returns false when execution should be stopped
"""
self.log.info("Starting %s step", step)
self.update_config_template_run_step()
for m in methods:
self.log.info("Running method %s part of step %s" % ('_'.join(m.func_code.co_names), step))
m(self)
if self.cfg['stop'] == step:
self.log.info("Stopping after %s step.", step)
raise StopException(step)
@staticmethod
def get_steps(run_test_cases=True, iteration_count=1):
"""Return a list of all steps to be performed."""
def get_step(tag, descr, substeps, skippable, initial=True):
"""Determine step definition based on whether it's an initial run or not."""
substeps = [substep for (always_include, substep) in substeps if (initial or always_include)]
return (tag, descr, substeps, skippable)
# list of substeps for steps that are slightly different from 2nd iteration onwards
ready_substeps = [
(False, lambda x: x.check_readiness_step()),
(True, lambda x: x.make_builddir()),
(True, lambda x: env.reset_changes()),
(True, lambda x: x.handle_iterate_opts()),
]
ready_step_spec = lambda initial: get_step(READY_STEP, "creating build dir, resetting environment",
ready_substeps, False, initial=initial)
source_substeps = [
(False, lambda x: x.checksum_step()),
(True, lambda x: x.extract_step()),
]
source_step_spec = lambda initial: get_step(SOURCE_STEP, "unpacking", source_substeps, True, initial=initial)
def prepare_step_spec(initial):
"""Return prepare step specification."""
if initial:
substeps = [lambda x: x.prepare_step()]
else:
substeps = [lambda x: x.guess_start_dir()]
return (PREPARE_STEP, 'preparing', substeps, False)
install_substeps = [
(False, lambda x: x.stage_install_step()),
(False, lambda x: x.make_installdir()),
(True, lambda x: x.install_step()),
]
install_step_spec = lambda initial: get_step('install', "installing", install_substeps, True, initial=initial)
# format for step specifications: (stop_name: (description, list of functions, skippable))
# core steps that are part of the iterated loop
patch_step_spec = (PATCH_STEP, 'patching', [lambda x: x.patch_step()], True)
configure_step_spec = (CONFIGURE_STEP, 'configuring', [lambda x: x.configure_step()], True)
build_step_spec = (BUILD_STEP, 'building', [lambda x: x.build_step()], True)
test_step_spec = (TEST_STEP, 'testing', [lambda x: x.test_step()], True)
# part 1: pre-iteration + first iteration
steps_part1 = [
(FETCH_STEP, 'fetching files', [lambda x: x.fetch_step()], False),
ready_step_spec(True),
source_step_spec(True),
patch_step_spec,
prepare_step_spec(True),
configure_step_spec,
build_step_spec,
test_step_spec,
install_step_spec(True),
]
# part 2: iterated part, from 2nd iteration onwards
# repeat core procedure again depending on specified iteration count
# not all parts of all steps need to be rerun (see e.g., ready, prepare)
steps_part2 = [
ready_step_spec(False),
source_step_spec(False),
patch_step_spec,
prepare_step_spec(False),
configure_step_spec,
build_step_spec,
test_step_spec,
install_step_spec(False),
] * (iteration_count - 1)
# part 3: post-iteration part
steps_part3 = [
(EXTENSIONS_STEP, 'taking care of extensions', [lambda x: x.extensions_step()], False),
(POSTPROC_STEP, 'postprocessing', [lambda x: x.post_install_step()], True),
(SANITYCHECK_STEP, 'sanity checking', [lambda x: x.sanity_check_step()], False),
(CLEANUP_STEP, 'cleaning up', [lambda x: x.cleanup_step()], False),
(MODULE_STEP, 'creating module', [lambda x: x.make_module_step()], False),
(PERMISSIONS_STEP, 'permissions', [lambda x: x.permissions_step()], False),
(PACKAGE_STEP, 'packaging', [lambda x: x.package_step()], False),
]
# full list of steps, included iterated steps
steps = steps_part1 + steps_part2 + steps_part3
if run_test_cases:
steps.append((TESTCASES_STEP, 'running test cases', [
lambda x: x.load_module(),
lambda x: x.test_cases_step(),
], False))
return steps
def run_all_steps(self, run_test_cases):
"""
Build and install this software.
run_test_cases (bool): run tests after building (e.g.: make test)
"""
if self.cfg['stop'] and self.cfg['stop'] == 'cfg':
return True
steps = self.get_steps(run_test_cases=run_test_cases, iteration_count=self.det_iter_cnt())
print_msg("building and installing %s..." % self.full_mod_name, self.log, silent=self.silent)
try:
for (step_name, descr, step_methods, skippable) in steps:
if self._skip_step(step_name, skippable):
print_msg("%s [skipped]" % descr, self.log, silent=self.silent)
else:
print_msg("%s..." % descr, self.log, silent=self.silent)
self.run_step(step_name, step_methods)
except StopException:
pass
# return True for successfull build (or stopped build)
return True
def build_and_install_one(ecdict, init_env):
"""
Build the software
@param ecdict: dictionary contaning parsed easyconfig + metadata
@param init_env: original environment (used to reset environment)
"""
silent = build_option('silent')
spec = ecdict['spec']
rawtxt = ecdict['ec'].rawtxt
name = ecdict['ec']['name']
print_msg("processing EasyBuild easyconfig %s" % spec, log=_log, silent=silent)
# restore original environment
_log.info("Resetting environment")
filetools.errors_found_in_log = 0
restore_env(init_env)
cwd = os.getcwd()
# load easyblock
easyblock = build_option('easyblock')
if not easyblock:
easyblock = fetch_parameters_from_easyconfig(rawtxt, ['easyblock'])[0]
try:
app_class = get_easyblock_class(easyblock, name=name)
app = app_class(ecdict['ec'])
_log.info("Obtained application instance of for %s (easyblock: %s)" % (name, easyblock))
except EasyBuildError, err:
print_error("Failed to get application instance for %s (easyblock: %s): %s" % (name, easyblock, err.msg),
silent=silent)
# application settings
stop = build_option('stop')
if stop is not None:
_log.debug("Stop set to %s" % stop)
app.cfg['stop'] = stop
skip = build_option('skip')
if skip is not None:
_log.debug("Skip set to %s" % skip)
app.cfg['skip'] = skip
# build easyconfig
errormsg = '(no error)'
# timing info
start_time = time.time()
try:
run_test_cases = not build_option('skip_test_cases') and app.cfg['tests']
result = app.run_all_steps(run_test_cases=run_test_cases)
except EasyBuildError, err:
first_n = 300
errormsg = "build failed (first %d chars): %s" % (first_n, err.msg[:first_n])
_log.warning(errormsg)
result = False
ended = "ended"
# make sure we're back in original directory before we finish up
os.chdir(cwd)
# successful build
if result:
if app.cfg['stop']:
ended = "STOPPED"
if app.builddir is not None:
new_log_dir = os.path.join(app.builddir, config.log_path())
else:
new_log_dir = os.path.dirname(app.logfile)
else:
new_log_dir = os.path.join(app.installdir, config.log_path())
if build_option('read_only_installdir'):
# temporarily re-enable write permissions for copying log/easyconfig to install dir
adjust_permissions(new_log_dir, stat.S_IWUSR, add=True, recursive=False)
# collect build stats
_log.info("Collecting build stats...")
buildstats = get_build_stats(app, start_time, build_option('command_line'))
_log.info("Build stats: %s" % buildstats)
try:
# upload spec to central repository
currentbuildstats = app.cfg['buildstats']
repo = init_repository(get_repository(), get_repositorypath())
if 'original_spec' in ecdict:
block = det_full_ec_version(app.cfg) + ".block"
repo.add_easyconfig(ecdict['original_spec'], app.name, block, buildstats, currentbuildstats)
repo.add_easyconfig(spec, app.name, det_full_ec_version(app.cfg), buildstats, currentbuildstats)
repo.commit("Built %s" % app.full_mod_name)
del repo
except EasyBuildError, err:
_log.warn("Unable to commit easyconfig to repository: %s", err)
success = True
succ = "successfully"
summary = "COMPLETED"
# cleanup logs
app.close_log()
log_fn = os.path.basename(get_log_filename(app.name, app.version))
application_log = os.path.join(new_log_dir, log_fn)
move_logs(app.logfile, application_log)
try:
newspec = os.path.join(new_log_dir, "%s-%s.eb" % (app.name, det_full_ec_version(app.cfg)))
# only copy if the files are not the same file already (yes, it happens)
if os.path.exists(newspec) and os.path.samefile(spec, newspec):
_log.debug("Not copying easyconfig file %s to %s since files are identical" % (spec, newspec))
else:
shutil.copy(spec, newspec)
_log.debug("Copied easyconfig file %s to %s" % (spec, newspec))
except (IOError, OSError), err:
print_error("Failed to copy easyconfig %s to %s: %s" % (spec, newspec, err))
if build_option('read_only_installdir'):
# take away user write permissions (again)
adjust_permissions(new_log_dir, stat.S_IWUSR, add=False, recursive=False)
# build failed
else:
success = False
summary = "FAILED"
build_dir = ''
if app.builddir:
build_dir = " (build directory: %s)" % (app.builddir)
succ = "unsuccessfully%s: %s" % (build_dir, errormsg)
# cleanup logs
app.close_log()
application_log = app.logfile
print_msg("%s: Installation %s %s" % (summary, ended, succ), log=_log, silent=silent)
# check for errors
if filetools.errors_found_in_log > 0:
print_msg("WARNING: %d possible error(s) were detected in the "
"build logs, please verify the build." % filetools.errors_found_in_log,
_log, silent=silent)
if app.postmsg:
print_msg("\nWARNING: %s\n" % app.postmsg, _log, silent=silent)
print_msg("Results of the build can be found in the log file %s" % application_log, _log, silent=silent)
del app
return (success, application_log, errormsg)
def get_easyblock_instance(ecdict):
"""
Get an instance for this easyconfig
@param easyconfig: parsed easyconfig (EasyConfig instance)
returns an instance of EasyBlock (or subclass thereof)
"""
spec = ecdict['spec']
rawtxt = ecdict['ec'].rawtxt
name = ecdict['ec']['name']
# handle easyconfigs with custom easyblocks
# determine easyblock specification from easyconfig file, if any
easyblock = fetch_parameters_from_easyconfig(rawtxt, ['easyblock'])[0]
app_class = get_easyblock_class(easyblock, name=name)
return app_class(ecdict['ec'])
def build_easyconfigs(easyconfigs, output_dir, test_results):
"""Build the list of easyconfigs."""
build_stopped = {}
apploginfo = lambda x, y: x.log.info(y)
def perform_step(step, obj, method, logfile):
"""Perform method on object if it can be built."""
if (isinstance(obj, dict) and obj['spec'] not in build_stopped) or obj not in build_stopped:
# update templates before every step (except for initialization)
if isinstance(obj, EasyBlock):
obj.update_config_template_run_step()
try:
if step == 'initialization':
_log.info("Running %s step" % step)
return get_easyblock_instance(obj)
else:
apploginfo(obj, "Running %s step" % step)
method(obj)
except Exception, err: # catch all possible errors, also crashes in EasyBuild code itself
fullerr = str(err)
if not isinstance(err, EasyBuildError):
tb = traceback.format_exc()
fullerr = '\n'.join([tb, str(err)])
# we cannot continue building it
if step == 'initialization':
obj = obj['spec']
test_results.append((obj, step, fullerr, logfile))
# keep a dict of so we can check in O(1) if objects can still be build
build_stopped[obj] = step
# initialize all instances
apps = []
for ec in easyconfigs:
instance = perform_step('initialization', ec, None, _log)
apps.append(instance)
base_dir = os.getcwd()
# keep track of environment right before initiating builds
# note: may be different from ORIG_OS_ENVIRON, since EasyBuild may have defined additional env vars itself by now
# e.g. via easyconfig.handle_allowed_system_deps
base_env = copy.deepcopy(os.environ)
succes = []
for app in apps:
# if initialisation step failed, app will be None
if app:
applog = os.path.join(output_dir, "%s-%s.log" % (app.name, det_full_ec_version(app.cfg)))
start_time = time.time()
# start with a clean slate
os.chdir(base_dir)
restore_env(base_env)
steps = EasyBlock.get_steps(iteration_count=app.det_iter_cnt())
for (step_name, _, step_methods, skippable) in steps:
if skippable and step_name in app.cfg['skipsteps']:
_log.info("Skipping step %s" % step_name)
else:
for step_method in step_methods:
method_name = '_'.join(step_method.func_code.co_names)
perform_step('_'.join([step_name, method_name]), app, step_method, applog)
# close log and move it
app.close_log()
move_logs(app.logfile, applog)
if app not in build_stopped:
# gather build stats
buildstats = get_build_stats(app, start_time, build_option('command_line'))
succes.append((app, buildstats))
for result in test_results:
_log.info("%s crashed with an error during fase: %s, error: %s, log file: %s" % result)
failed = len(build_stopped)
total = len(apps)
_log.info("%s of %s packages failed to build!" % (failed, total))
output_file = os.path.join(output_dir, "easybuild-test.xml")
_log.debug("writing xml output to %s" % output_file)
write_to_xml(succes, test_results, output_file)
return failed == 0
class StopException(Exception):
"""
StopException class definition.
"""
pass<|fim▁end|> | if ec:
self.log.info("Not skipping %s" % name) |
<|file_name|>MultisigSignatureTransactionComparator.java<|end_file_name|><|fim▁begin|>package org.nem.core.model;
import org.nem.core.utils.ArrayUtils;<|fim▁hole|>
import java.util.Comparator;
/**
* A custom comparator for comparing MultisigSignatureTransaction objects.
* <br>
* This comparator only looks at the transaction signer and other hash.
*/
public class MultisigSignatureTransactionComparator implements Comparator<MultisigSignatureTransaction> {
@Override
public int compare(final MultisigSignatureTransaction lhs, final MultisigSignatureTransaction rhs) {
final Address lhsAddress = lhs.getSigner().getAddress();
final Address rhsAddress = rhs.getSigner().getAddress();
final int addressCompareResult = lhsAddress.compareTo(rhsAddress);
if (addressCompareResult != 0) {
return addressCompareResult;
}
return ArrayUtils.compare(lhs.getOtherTransactionHash().getRaw(), rhs.getOtherTransactionHash().getRaw());
}
}<|fim▁end|> | |
<|file_name|>0019_auto_20150420_1821.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
<|fim▁hole|> ('libreosteoweb', '0018_auto_20150420_1232'),
]
operations = [
migrations.AlterField(
model_name='regulardoctor',
name='phone',
field=models.CharField(max_length=100, null=True, verbose_name='Phone', blank=True),
),
]<|fim▁end|> | dependencies = [ |
<|file_name|>MongoRecordReader.java<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.mapreduce.mongo.in;
import com.groupon.mapreduce.mongo.WritableBSONObject;
import org.apache.hadoop.fs.FileSystem;<|fim▁hole|>import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import java.io.IOException;
import java.util.Iterator;
/**
* This reads Mongo Records from an Extent and returns Hadoop Records as WritableBSONObjects. The key
* returned to the Mapper is the _id field from the Mongo Record as Text.
*/
public class MongoRecordReader extends RecordReader<Text, WritableBSONObject> {
private Record current = null;
private Iterator<Record> iterator = null;
private FileSystem fs;
@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
throws IOException, InterruptedException {
MongoInputSplit mongoInputSplit = (MongoInputSplit) inputSplit;
fs = ((MongoInputSplit) inputSplit).getExtent().getPath().getFileSystem(taskAttemptContext.getConfiguration());
iterator = mongoInputSplit.getExtent().iterator(fs);
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (!iterator.hasNext())
return false;
current = iterator.next();
return true;
}
@Override
public Text getCurrentKey() throws IOException, InterruptedException {
return new Text(current.getId(fs));
}
@Override
public WritableBSONObject getCurrentValue() throws IOException, InterruptedException {
return new WritableBSONObject(current.getContent(fs));
}
@Override
public float getProgress() throws IOException, InterruptedException {
if (!iterator.hasNext())
return 1.0f;
return 0.0f;
}
@Override
public void close() throws IOException {
}
}<|fim▁end|> | import org.apache.hadoop.io.Text; |
<|file_name|>markdown_pass.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Generate markdown from a document tree
use astsrv;
use doc::ItemUtils;
use doc;
use markdown_pass;
use markdown_writer::Writer;
use markdown_writer::WriterUtils;
use markdown_writer::WriterFactory;
use pass::Pass;
use sort_pass;
use std::cell::Cell;
use std::str;
use std::vec;
use syntax;
pub fn mk_pass(writer_factory: WriterFactory) -> Pass {
let writer_factory = Cell::new(writer_factory);
Pass {
name: ~"markdown",
f: |srv, doc| run(srv, doc, writer_factory.take())
}
}
fn run(
srv: astsrv::Srv,
doc: doc::Doc,
writer_factory: WriterFactory
) -> doc::Doc {
fn mods_last(item1: &doc::ItemTag, item2: &doc::ItemTag) -> bool {
fn is_mod(item: &doc::ItemTag) -> bool {
match *item {
doc::ModTag(_) => true,
_ => false
}
}
let lteq = !is_mod(item1) || is_mod(item2);
lteq
}
// Sort the items so mods come last. All mods will be
// output at the same header level so sorting mods last
// makes the headers come out nested correctly.
let sorted_doc = (sort_pass::mk_pass(
~"mods last", mods_last
).f)(srv, copy doc);
write_markdown(sorted_doc, writer_factory);
return doc;
}
struct Ctxt {
w: Writer
}
pub fn write_markdown(
doc: doc::Doc,
writer_factory: WriterFactory
) {
// There is easy parallelism to be had here, but
// we don't want to spawn too many pandoc processes.
// (See #2484, which is closed.)
do doc.pages.map |page| {
let ctxt = Ctxt {
w: writer_factory(copy *page)
};
write_page(&ctxt, page)
};
}
fn write_page(ctxt: &Ctxt, page: &doc::Page) {
write_title(ctxt, copy *page);
match copy *page {
doc::CratePage(doc) => {
write_crate(ctxt, doc);
}
doc::ItemPage(doc) => {
// We don't write a header for item's pages because their
// header in the html output is created by the page title
write_item_no_header(ctxt, doc);
}
}
ctxt.w.put_done();
}
fn write_title(ctxt: &Ctxt, page: doc::Page) {
ctxt.w.put_line(fmt!("%% %s", make_title(page)));
ctxt.w.put_line(~"");
}
fn make_title(page: doc::Page) -> ~str {
let item = match page {
doc::CratePage(CrateDoc) => {
doc::ModTag(copy CrateDoc.topmod)
}
doc::ItemPage(ItemTag) => {
ItemTag
}
};
let title = markdown_pass::header_text(item);
let title = title.replace("`", "");
return title;
}
enum Hlvl {
H1 = 1,
H2 = 2,
H3 = 3,
H4 = 4
}
fn write_header(ctxt: &Ctxt, lvl: Hlvl, doc: doc::ItemTag) {
let text = header_text(doc);
write_header_(ctxt, lvl, text);
}
fn write_header_(ctxt: &Ctxt, lvl: Hlvl, title: ~str) {
let hashes = str::from_chars(vec::from_elem(lvl as uint, '#'));
ctxt.w.put_line(fmt!("%s %s", hashes, title));
ctxt.w.put_line(~"");
}
pub fn header_kind(doc: doc::ItemTag) -> ~str {
match doc {
doc::ModTag(_) => {
if doc.id() == syntax::ast::crate_node_id {
~"Crate"
} else {
~"Module"
}
}
doc::NmodTag(_) => {
~"Foreign module"
}
doc::FnTag(_) => {
~"Function"
}
doc::ConstTag(_) => {
~"Freeze"
}
doc::EnumTag(_) => {
~"Enum"
}
doc::TraitTag(_) => {
~"Trait"
}
doc::ImplTag(_) => {
~"Implementation"
}
doc::TyTag(_) => {
~"Type"
}
doc::StructTag(_) => {
~"Struct"
}
}
}
pub fn header_name(doc: doc::ItemTag) -> ~str {
let fullpath = (doc.path() + &[doc.name()]).connect("::");
match &doc {
&doc::ModTag(_) if doc.id() != syntax::ast::crate_node_id => {
fullpath
}
&doc::NmodTag(_) => {
fullpath
}
&doc::ImplTag(ref doc) => {
assert!(doc.self_ty.is_some());
let bounds = if doc.bounds_str.is_some() {
fmt!(" where %s", *doc.bounds_str.get_ref())
} else {
~""
};
let self_ty = doc.self_ty.get_ref();
let mut trait_part = ~"";
for doc.trait_types.iter().enumerate().advance |(i, trait_type)| {
if i == 0 {
trait_part.push_str(" of ");
} else {
trait_part.push_str(", ");
}
trait_part.push_str(*trait_type);
}
fmt!("%s for %s%s", trait_part, *self_ty, bounds)
}
_ => {
doc.name()
}
}
}
pub fn header_text(doc: doc::ItemTag) -> ~str {
match &doc {
&doc::ImplTag(ref ImplDoc) => {
let header_kind = header_kind(copy doc);
let bounds = if ImplDoc.bounds_str.is_some() {
fmt!(" where `%s`", *ImplDoc.bounds_str.get_ref())
} else {
~""
};
let desc = if ImplDoc.trait_types.is_empty() {
fmt!("for `%s`%s", *ImplDoc.self_ty.get_ref(), bounds)
} else {
fmt!("of `%s` for `%s`%s",
ImplDoc.trait_types[0],
*ImplDoc.self_ty.get_ref(),
bounds)
};
return fmt!("%s %s", header_kind, desc);
}
_ => {}
}
header_text_(header_kind(copy doc),
header_name(doc))
}
fn header_text_(kind: &str, name: &str) -> ~str {
fmt!("%s `%s`", kind, name)
}
fn write_crate(
ctxt: &Ctxt,
doc: doc::CrateDoc
) {
write_top_module(ctxt, copy doc.topmod);
}
fn write_top_module(
ctxt: &Ctxt,
ModDoc: doc::ModDoc
) {
write_mod_contents(ctxt, ModDoc);
}
fn write_mod(
ctxt: &Ctxt,
ModDoc: doc::ModDoc
) {
write_mod_contents(ctxt, ModDoc);
}
fn write_common(
ctxt: &Ctxt,
desc: Option<~str>,
sections: &[doc::Section]
) {
write_desc(ctxt, desc);
write_sections(ctxt, sections);
}
fn write_desc(
ctxt: &Ctxt,
desc: Option<~str>
) {
match desc {
Some(desc) => {
ctxt.w.put_line(desc);
ctxt.w.put_line(~"");
}
None => ()
}
}
fn write_sections(ctxt: &Ctxt, sections: &[doc::Section]) {
for sections.iter().advance |section| {
write_section(ctxt, copy *section);
}
}
fn write_section(ctxt: &Ctxt, section: doc::Section) {
write_header_(ctxt, H4, copy section.header);
ctxt.w.put_line(copy section.body);
ctxt.w.put_line(~"");
}
fn write_mod_contents(
ctxt: &Ctxt,
doc: doc::ModDoc
) {
write_common(ctxt, doc.desc(), doc.sections());
if doc.index.is_some() {
write_index(ctxt, doc.index.get_ref());
}
for doc.items.iter().advance |itemTag| {
write_item(ctxt, copy *itemTag);
}
}
fn write_item(ctxt: &Ctxt, doc: doc::ItemTag) {
write_item_(ctxt, doc, true);
}
fn write_item_no_header(ctxt: &Ctxt, doc: doc::ItemTag) {
write_item_(ctxt, doc, false);
}
fn write_item_(ctxt: &Ctxt, doc: doc::ItemTag, write_header: bool) {
if write_header {
write_item_header(ctxt, copy doc);
}
match doc {
doc::ModTag(ModDoc) => write_mod(ctxt, ModDoc),
doc::NmodTag(nModDoc) => write_nmod(ctxt, nModDoc),
doc::FnTag(FnDoc) => write_fn(ctxt, FnDoc),
doc::ConstTag(ConstDoc) => write_const(ctxt, ConstDoc),
doc::EnumTag(EnumDoc) => write_enum(ctxt, EnumDoc),
doc::TraitTag(TraitDoc) => write_trait(ctxt, TraitDoc),
doc::ImplTag(ImplDoc) => write_impl(ctxt, ImplDoc),
doc::TyTag(TyDoc) => write_type(ctxt, TyDoc),
doc::StructTag(StructDoc) => put_struct(ctxt, StructDoc),
}
}
fn write_item_header(ctxt: &Ctxt, doc: doc::ItemTag) {
write_header(ctxt, item_header_lvl(&doc), doc);
}
fn item_header_lvl(doc: &doc::ItemTag) -> Hlvl {
match doc {
&doc::ModTag(_) | &doc::NmodTag(_) => H1,
_ => H2
}
}
fn write_index(ctxt: &Ctxt, index: &doc::Index) {
if index.entries.is_empty() {
return;
}
ctxt.w.put_line(~"<div class='index'>");
ctxt.w.put_line(~"");
for index.entries.iter().advance |entry| {
let header = header_text_(entry.kind, entry.name);
let id = copy entry.link;
if entry.brief.is_some() {
ctxt.w.put_line(fmt!("* [%s](%s) - %s",
header, id, *entry.brief.get_ref()));
} else {
ctxt.w.put_line(fmt!("* [%s](%s)", header, id));
}
}
ctxt.w.put_line(~"");
ctxt.w.put_line(~"</div>");
ctxt.w.put_line(~"");
}
fn write_nmod(ctxt: &Ctxt, doc: doc::NmodDoc) {
write_common(ctxt, doc.desc(), doc.sections());
if doc.index.is_some() {
write_index(ctxt, doc.index.get_ref());
}
for doc.fns.iter().advance |FnDoc| {
write_item_header(ctxt, doc::FnTag(copy *FnDoc));
write_fn(ctxt, copy *FnDoc);
}
}
fn write_fn(
ctxt: &Ctxt,
doc: doc::FnDoc
) {
write_fnlike(
ctxt,
copy doc.sig,
doc.desc(),
doc.sections()
);
}
fn write_fnlike(
ctxt: &Ctxt,
sig: Option<~str>,
desc: Option<~str>,
sections: &[doc::Section]
) {
write_sig(ctxt, sig);
write_common(ctxt, desc, sections);
}
fn write_sig(ctxt: &Ctxt, sig: Option<~str>) {
match sig {
Some(sig) => {
ctxt.w.put_line(code_block(sig));
ctxt.w.put_line(~"");
}
None => fail!("unimplemented")
}
}
fn code_block(s: ~str) -> ~str {
fmt!("~~~ {.rust}
%s
~~~", s)
}
fn write_const(
ctxt: &Ctxt,
doc: doc::ConstDoc
) {
write_sig(ctxt, copy doc.sig);
write_common(ctxt, doc.desc(), doc.sections());
}
fn write_enum(
ctxt: &Ctxt,
doc: doc::EnumDoc
) {
write_common(ctxt, doc.desc(), doc.sections());
write_variants(ctxt, doc.variants);
}
fn write_variants(
ctxt: &Ctxt,
docs: &[doc::VariantDoc]
) {
if docs.is_empty() {
return;
}
write_header_(ctxt, H4, ~"Variants");
for docs.iter().advance |variant| {
write_variant(ctxt, copy *variant);
}
ctxt.w.put_line(~"");
}
fn write_variant(ctxt: &Ctxt, doc: doc::VariantDoc) {
assert!(doc.sig.is_some());
let sig = doc.sig.get_ref();
// space out list items so they all end up within paragraph elements
ctxt.w.put_line(~"");
match copy doc.desc {
Some(desc) => {
ctxt.w.put_line(list_item_indent(fmt!("* `%s` - %s", *sig, desc)));
}
None => {
ctxt.w.put_line(fmt!("* `%s`", *sig));
}
}
}
fn list_item_indent(item: &str) -> ~str {
let indented = item.any_line_iter().collect::<~[&str]>();
// separate markdown elements within `*` lists must be indented by four
// spaces, or they will escape the list context. indenting everything
// seems fine though.
indented.connect("\n ")
}<|fim▁hole|> write_methods(ctxt, doc.methods);
}
fn write_methods(ctxt: &Ctxt, docs: &[doc::MethodDoc]) {
for docs.iter().advance |doc| {
write_method(ctxt, copy *doc);
}
}
fn write_method(ctxt: &Ctxt, doc: doc::MethodDoc) {
write_header_(ctxt, H3, header_text_("Method", doc.name));
write_fnlike(
ctxt,
copy doc.sig,
copy doc.desc,
doc.sections
);
}
fn write_impl(ctxt: &Ctxt, doc: doc::ImplDoc) {
write_common(ctxt, doc.desc(), doc.sections());
write_methods(ctxt, doc.methods);
}
fn write_type(
ctxt: &Ctxt,
doc: doc::TyDoc
) {
write_sig(ctxt, copy doc.sig);
write_common(ctxt, doc.desc(), doc.sections());
}
fn put_struct(
ctxt: &Ctxt,
doc: doc::StructDoc
) {
write_sig(ctxt, copy doc.sig);
write_common(ctxt, doc.desc(), doc.sections());
}
#[cfg(test)]
mod test {
use astsrv;
use attr_pass;
use config;
use desc_to_brief_pass;
use doc;
use extract;
use markdown_index_pass;
use markdown_pass::{mk_pass, write_markdown};
use markdown_writer;
use path_pass;
use page_pass;
use sectionalize_pass;
use trim_pass;
use tystr_pass;
use unindent_pass;
fn render(source: ~str) -> ~str {
let (srv, doc) = create_doc_srv(source);
let markdown = write_markdown_str_srv(srv, doc);
debug!("markdown: %s", markdown);
markdown
}
fn create_doc_srv(source: ~str) -> (astsrv::Srv, doc::Doc) {
do astsrv::from_str(source) |srv| {
let config = config::Config {
output_style: config::DocPerCrate,
.. config::default_config(&Path("whatever"))
};
let doc = extract::from_srv(srv.clone(), ~"");
debug!("doc (extract): %?", doc);
let doc = (tystr_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (tystr): %?", doc);
let doc = (path_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (path): %?", doc);
let doc = (attr_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (attr): %?", doc);
let doc = (desc_to_brief_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (desc_to_brief): %?", doc);
let doc = (unindent_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (unindent): %?", doc);
let doc = (sectionalize_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (trim): %?", doc);
let doc = (trim_pass::mk_pass().f)(srv.clone(), doc);
debug!("doc (sectionalize): %?", doc);
let doc = (markdown_index_pass::mk_pass(config).f)(
srv.clone(), doc);
debug!("doc (index): %?", doc);
(srv.clone(), doc)
}
}
fn create_doc(source: ~str) -> doc::Doc {
let (_, doc) = create_doc_srv(source);
doc
}
fn write_markdown_str(
doc: doc::Doc
) -> ~str {
let (writer_factory, po) = markdown_writer::future_writer_factory();
write_markdown(doc, writer_factory);
return po.recv().second();
}
fn write_markdown_str_srv(
srv: astsrv::Srv,
doc: doc::Doc
) -> ~str {
let (writer_factory, po) = markdown_writer::future_writer_factory();
let pass = mk_pass(writer_factory);
(pass.f)(srv, doc);
return po.recv().second();
}
#[test]
fn write_markdown_should_write_mod_headers() {
let markdown = render(~"mod moo { }");
assert!(markdown.contains("# Module `moo`"));
}
#[test]
fn should_leave_blank_line_after_header() {
let markdown = render(~"mod morp { }");
assert!(markdown.contains("Module `morp`\n\n"));
}
#[test]
fn should_write_modules_last() {
/*
Because the markdown pass writes all modules at the same level of
indentation (it doesn't 'nest' them), we need to make sure that we
write all of the modules contained in each module after all other
types of items, or else the header nesting will end up wrong, with
modules appearing to contain items that they do not.
*/
let markdown = render(
~"mod a { }\
fn b() { }\
mod c {
}\
fn d() { }"
);
let idx_a = markdown.find_str("# Module `a`").get();
let idx_b = markdown.find_str("## Function `b`").get();
let idx_c = markdown.find_str("# Module `c`").get();
let idx_d = markdown.find_str("## Function `d`").get();
assert!(idx_b < idx_d);
assert!(idx_d < idx_a);
assert!(idx_a < idx_c);
}
#[test]
fn should_request_new_writer_for_each_page() {
// This port will send us a (page, str) pair for every writer
// that was created
let (writer_factory, po) = markdown_writer::future_writer_factory();
let (srv, doc) = create_doc_srv(~"mod a { }");
// Split the document up into pages
let doc = (page_pass::mk_pass(config::DocPerMod).f)(srv, doc);
write_markdown(doc, writer_factory);
// We expect two pages to have been written
for 2.times {
po.recv();
}
}
#[test]
fn should_write_title_for_each_page() {
let (writer_factory, po) = markdown_writer::future_writer_factory();
let (srv, doc) = create_doc_srv(
~"#[link(name = \"core\")]; mod a { }");
let doc = (page_pass::mk_pass(config::DocPerMod).f)(srv, doc);
write_markdown(doc, writer_factory);
for 2.times {
let (page, markdown) = po.recv();
match page {
doc::CratePage(_) => {
assert!(markdown.contains("% Crate core"));
}
doc::ItemPage(_) => {
assert!(markdown.contains("% Module a"));
}
}
}
}
#[test]
fn should_write_full_path_to_mod() {
let markdown = render(~"mod a { mod b { mod c { } } }");
assert!(markdown.contains("# Module `a::b::c`"));
}
#[test]
fn should_write_sections() {
let markdown = render(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(markdown.contains("#### Header\n\nBody\n\n"));
}
#[test]
fn should_write_crate_description() {
let markdown = render(~"#[doc = \"this is the crate\"];");
assert!(markdown.contains("this is the crate"));
}
#[test]
fn should_write_index() {
let markdown = render(~"mod a { } mod b { }");
assert!(markdown.contains(
"\n\n* [Module `a`](#module-a)\n\
* [Module `b`](#module-b)\n\n"
));
}
#[test]
fn should_write_index_brief() {
let markdown = render(~"#[doc = \"test\"] mod a { }");
assert!(markdown.contains("(#module-a) - test\n"));
}
#[test]
fn should_not_write_index_if_no_entries() {
let markdown = render(~"");
assert!(!markdown.contains("\n\n\n"));
}
#[test]
fn should_write_index_for_foreign_mods() {
let markdown = render(~"extern { fn a(); }");
assert!(markdown.contains(
"\n\n* [Function `a`](#function-a)\n\n"
));
}
#[test]
fn should_write_foreign_fns() {
let markdown = render(
~"extern { #[doc = \"test\"] fn a(); }");
assert!(markdown.contains("test"));
}
#[test]
fn should_write_foreign_fn_headers() {
let markdown = render(
~"extern { #[doc = \"test\"] fn a(); }");
assert!(markdown.contains("## Function `a`"));
}
#[test]
fn write_markdown_should_write_function_header() {
let markdown = render(~"fn func() { }");
assert!(markdown.contains("## Function `func`"));
}
#[test]
fn should_write_the_function_signature() {
let markdown = render(~"#[doc = \"f\"] fn a() { }");
assert!(markdown.contains("\n~~~ {.rust}\nfn a()\n"));
}
#[test]
fn should_insert_blank_line_after_fn_signature() {
let markdown = render(~"#[doc = \"f\"] fn a() { }");
assert!(markdown.contains("fn a()\n~~~\n\n"));
}
#[test]
fn should_correctly_bracket_fn_signature() {
let doc = create_doc(~"fn a() { }");
let doc = doc::Doc{
pages: ~[
doc::CratePage(doc::CrateDoc{
topmod: doc::ModDoc{
items: ~[doc::FnTag(doc::SimpleItemDoc{
sig: Some(~"line 1\nline 2"),
.. copy doc.cratemod().fns()[0]
})],
.. doc.cratemod()
},
.. doc.CrateDoc()
})
]
};
let markdown = write_markdown_str(doc);
assert!(markdown.contains("~~~ {.rust}\nline 1\nline 2\n~~~"));
}
#[test]
fn should_leave_blank_line_between_fn_header_and_sig() {
let markdown = render(~"fn a() { }");
assert!(markdown.contains("Function `a`\n\n~~~ {.rust}\nfn a()"));
}
#[test]
fn should_write_const_header() {
let markdown = render(~"static a: bool = true;");
assert!(markdown.contains("## Freeze `a`\n\n"));
}
#[test]
fn should_write_const_description() {
let markdown = render(
~"#[doc = \"b\"]\
static a: bool = true;");
assert!(markdown.contains("\n\nb\n\n"));
}
#[test]
fn should_write_enum_header() {
let markdown = render(~"enum a { b }");
assert!(markdown.contains("## Enum `a`\n\n"));
}
#[test]
fn should_write_enum_description() {
let markdown = render(~"#[doc = \"b\"] enum a { b }");
assert!(markdown.contains("\n\nb\n\n"));
}
#[test]
fn should_write_variant_list() {
let markdown = render(
~"enum a { \
#[doc = \"test\"] b, \
#[doc = \"test\"] c }");
assert!(markdown.contains(
"\n\n#### Variants\n\
\n\
\n* `b` - test\
\n\
\n* `c` - test\n\n"));
}
#[test]
fn should_write_variant_list_without_descs() {
let markdown = render(~"enum a { b, c }");
assert!(markdown.contains(
"\n\n#### Variants\n\
\n\
\n* `b`\
\n\
\n* `c`\n\n"));
}
#[test]
fn should_write_variant_list_with_indent() {
let markdown = render(
~"enum a { #[doc = \"line 1\\n\\nline 2\"] b, c }");
assert!(markdown.contains(
"\n\n#### Variants\n\
\n\
\n* `b` - line 1\
\n \
\n line 2\
\n\
\n* `c`\n\n"));
}
#[test]
fn should_write_variant_list_with_signatures() {
let markdown = render(~"enum a { b(int), #[doc = \"a\"] c(int) }");
assert!(markdown.contains(
"\n\n#### Variants\n\
\n\
\n* `b(int)`\
\n\
\n* `c(int)` - a\n\n"));
}
#[test]
fn should_write_trait_header() {
let markdown = render(~"trait i { fn a(); }");
assert!(markdown.contains("## Trait `i`"));
}
#[test]
fn should_write_trait_desc() {
let markdown = render(~"#[doc = \"desc\"] trait i { fn a(); }");
assert!(markdown.contains("desc"));
}
#[test]
fn should_write_trait_method_header() {
let markdown = render(~"trait i { fn a(); }");
assert!(markdown.contains("### Method `a`"));
}
#[test]
fn should_write_trait_method_signature() {
let markdown = render(~"trait i { fn a(&self); }");
assert!(markdown.contains("\n~~~ {.rust}\nfn a(&self)"));
}
#[test]
fn should_write_impl_header() {
let markdown = render(~"impl int { fn a() { } }");
assert!(markdown.contains("## Implementation for `int`"));
}
#[test]
fn should_write_impl_header_with_bounds() {
let markdown = render(~"impl <T> int<T> { }");
assert!(markdown.contains("## Implementation for `int<T>` where `<T>`"));
}
#[test]
fn should_write_impl_header_with_trait() {
let markdown = render(~"impl j for int { fn a() { } }");
assert!(markdown.contains(
"## Implementation of `j` for `int`"));
}
#[test]
fn should_write_impl_desc() {
let markdown = render(
~"#[doc = \"desc\"] impl int { fn a() { } }");
assert!(markdown.contains("desc"));
}
#[test]
fn should_write_impl_method_header() {
let markdown = render(
~"impl int { fn a() { } }");
assert!(markdown.contains("### Method `a`"));
}
#[test]
fn should_write_impl_method_signature() {
let markdown = render(
~"impl int { fn a(&mut self) { } }");
assert!(markdown.contains("~~~ {.rust}\nfn a(&mut self)"));
}
#[test]
fn should_write_type_header() {
let markdown = render(~"type t = int;");
assert!(markdown.contains("## Type `t`"));
}
#[test]
fn should_write_type_desc() {
let markdown = render(
~"#[doc = \"desc\"] type t = int;");
assert!(markdown.contains("\n\ndesc\n\n"));
}
#[test]
fn should_write_type_signature() {
let markdown = render(~"type t = int;");
assert!(markdown.contains("\n\n~~~ {.rust}\ntype t = int\n~~~\n"));
}
#[test]
fn should_put_struct_header() {
let markdown = render(~"struct S { field: () }");
assert!(markdown.contains("## Struct `S`\n\n"));
}
}<|fim▁end|> |
fn write_trait(ctxt: &Ctxt, doc: doc::TraitDoc) {
write_common(ctxt, doc.desc(), doc.sections()); |
<|file_name|>voir.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS<|fim▁hole|>
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins:
personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle]
msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " (retour " + chemin.salles_retour[salle] + ")"
personnage << msg<|fim▁end|> | class PrmVoir(Parametre):
"""Commande 'chemin voir'. |
<|file_name|>builtin-superkinds-in-metadata.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:trait_superkinds_in_metadata.rs
// Tests (correct) usage of trait super-builtin-kinds cross-crate.
extern crate trait_superkinds_in_metadata;
use trait_superkinds_in_metadata::{RequiresRequiresShareAndSend, RequiresShare};
use trait_superkinds_in_metadata::{RequiresCopy};
#[derive(Copy)]
struct X<T>(T);
impl<T:Sync> RequiresShare for X<T> { }
impl<T:Sync+Send> RequiresRequiresShareAndSend for X<T> { }
impl<T:Copy> RequiresCopy for X<T> { }
pub fn main() { }<|fim▁end|> | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// |
<|file_name|>SdmlConfiguration.py<|end_file_name|><|fim▁begin|># SecuML
# Copyright (C) 2016 ANSSI
#
# SecuML is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# SecuML is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with SecuML. If not, see <http://www.gnu.org/licenses/>.
from SecuML.core.DimensionReduction.Algorithms.Projection.Sdml import Sdml
from SecuML.core.DimensionReduction.Configuration import DimensionReductionConfFactory
from .SemiSupervisedProjectionConfiguration import SemiSupervisedProjectionConfiguration
class SdmlConfiguration(SemiSupervisedProjectionConfiguration):
def __init__(self, families_supervision=None):
SemiSupervisedProjectionConfiguration.__init__(
self, Sdml, families_supervision=families_supervision)
<|fim▁hole|> conf.num_components = obj['num_components']
return conf
def toJson(self):
conf = SemiSupervisedProjectionConfiguration.toJson(self)
conf['__type__'] = 'SdmlConfiguration'
return conf
DimensionReductionConfFactory.getFactory().registerClass('SdmlConfiguration',
SdmlConfiguration)<|fim▁end|> | @staticmethod
def fromJson(obj):
conf = SdmlConfiguration(
families_supervision=obj['families_supervision']) |
<|file_name|>0003_auto__add_field_queryhistory_server_name__add_field_queryhistory_serve.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'QueryHistory.server_name'
db.add_column('beeswax_queryhistory', 'server_name', self.gf('django.db.models.fields.CharField')(default='', max_length=128), keep_default=False)
# Adding field 'QueryHistory.server_host'
db.add_column('beeswax_queryhistory', 'server_host', self.gf('django.db.models.fields.CharField')(default='', max_length=128), keep_default=False)
# Adding field 'QueryHistory.server_port'
db.add_column('beeswax_queryhistory', 'server_port', self.gf('django.db.models.fields.SmallIntegerField')(default=0), keep_default=False)
# Changing field 'QueryHistory.query'
db.alter_column('beeswax_queryhistory', 'query', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Deleting field 'QueryHistory.server_name'
db.delete_column('beeswax_queryhistory', 'server_name')
# Deleting field 'QueryHistory.server_host'
db.delete_column('beeswax_queryhistory', 'server_host')
# Deleting field 'QueryHistory.server_port'
db.delete_column('beeswax_queryhistory', 'server_port')
# Changing field 'QueryHistory.query'
db.alter_column('beeswax_queryhistory', 'query', self.gf('django.db.models.fields.CharField')(max_length=1024))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'beeswax.metainstall': {
'Meta': {'object_name': 'MetaInstall'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_example': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'beeswax.queryhistory': {
'Meta': {'object_name': 'QueryHistory'},
'design': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['beeswax.SavedQuery']", 'null': 'True'}),
'has_results': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_state': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'log_context': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'notify': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'query': ('django.db.models.fields.TextField', [], {}),
'server_host': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'server_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'server_port': ('django.db.models.fields.SmallIntegerField', [], {'default': "''"}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'beeswax.savedquery': {
'Meta': {'object_name': 'SavedQuery'},
'data': ('django.db.models.fields.TextField', [], {'max_length': '65536'}),
'desc': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auto': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'mtime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),<|fim▁hole|>
complete_apps = ['beeswax']<|fim▁end|> | 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
} |
<|file_name|>rpcmining.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "main.h"
#include "db.h"
#include "init.h"
#include "bitcoinrpc.h"
using namespace json_spirit;
using namespace std;
// Return average network hashes per second based on the last 'lookup' blocks,
// or from the last difficulty change if 'lookup' is nonpositive.
// If 'height' is nonnegative, compute the estimate at the time when a given block was found.
Value GetNetworkHashPS(int lookup, int height) {
CBlockIndex *pb = pindexBest;
if (height >= 0 && height < nBestHeight)
pb = FindBlockByHeight(height);
if (pb == NULL || !pb->nHeight)
return 0;
// If lookup is -1, then use blocks since last difficulty change.
if (lookup <= 0)
lookup = pb->nHeight % 2016 + 1;
// If lookup is larger than chain, then set it to chain length.
if (lookup > pb->nHeight)
lookup = pb->nHeight;
double sum = 0.0;
for (int i = 0; i < lookup; i++)
{
sum += (pb->GetBlockTime() - pb->pprev->GetBlockTime()) / GetDifficulty(pb);
pb = pb->pprev;
}
return (boost::int64_t)(pow(2.0, 32) / (sum / lookup));
}
Value getnetworkhashps(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getnetworkhashps [blocks] [height]\n"
"Returns the estimated network hashes per second based on the last 120 blocks.\n"
"Pass in [blocks] to override # of blocks, -1 specifies since last difficulty change.\n"
"Pass in [height] to estimate the network speed at the time when a certain block was found.");
return GetNetworkHashPS(params.size() > 0 ? params[0].get_int() : 120, params.size() > 1 ? params[1].get_int() : -1);
}
Value getmininginfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getmininginfo\n"
"Returns an object containing mining-related information.");
Object obj;
obj.push_back(Pair("blocks", (int)nBestHeight));
obj.push_back(Pair("currentblocksize",(uint64_t)nLastBlockSize));
obj.push_back(Pair("currentblocktx",(uint64_t)nLastBlockTx));
obj.push_back(Pair("difficulty", (double)GetDifficulty()));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
obj.push_back(Pair("networkhashps", getnetworkhashps(params, false)));
obj.push_back(Pair("pooledtx", (uint64_t)mempool.size()));
obj.push_back(Pair("testnet", fTestNet));
return obj;
}
Value getworkex(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getworkex [data, coinbase]\n"
"If [data, coinbase] is not specified, returns extended work data.\n"
);
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Colossus is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Colossus is downloading blocks...");
typedef map<uint256, pair<CBlock*, CScript> > mapNewBlock_t;
static mapNewBlock_t mapNewBlock; // FIXME: thread safety
static vector<CBlockTemplate*> vNewBlockTemplate;
static CReserveKey reservekey(pwalletMain);
if (params.size() == 0)
{
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60))
{
if (pindexPrev != pindexBest)
{
// Deallocate old blocks since they're obsolete now
mapNewBlock.clear();
BOOST_FOREACH(CBlockTemplate* pblocktemplate, vNewBlockTemplate)
delete pblocktemplate;
vNewBlockTemplate.clear();
}
// Clear pindexPrev so future getworks make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
pblocktemplate = CreateNewBlock(reservekey);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
vNewBlockTemplate.push_back(pblocktemplate);
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
// Update nExtraNonce
static unsigned int nExtraNonce = 0;
IncrementExtraNonce(pblock, pindexPrev, nExtraNonce);
// Save
mapNewBlock[pblock->hashMerkleRoot] = make_pair(pblock, pblock->vtx[0].vin[0].scriptSig);
// Pre-build hash buffers
char pmidstate[32];
char pdata[128];
char phash1[64];
FormatHashBuffers(pblock, pmidstate, pdata, phash1);
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
CTransaction coinbaseTx = pblock->vtx[0];
std::vector<uint256> merkle = pblock->GetMerkleBranch(0);
Object result;
result.push_back(Pair("data", HexStr(BEGIN(pdata), END(pdata))));
result.push_back(Pair("target", HexStr(BEGIN(hashTarget), END(hashTarget))));
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << coinbaseTx;
result.push_back(Pair("coinbase", HexStr(ssTx.begin(), ssTx.end())));
Array merkle_arr;
BOOST_FOREACH(uint256 merkleh, merkle) {
printf("%s\n", merkleh.ToString().c_str());
merkle_arr.push_back(HexStr(BEGIN(merkleh), END(merkleh)));
}
result.push_back(Pair("merkle", merkle_arr));
return result;
}
else
{
// Parse parameters
vector<unsigned char> vchData = ParseHex(params[0].get_str());
vector<unsigned char> coinbase;
if(params.size() == 2)
coinbase = ParseHex(params[1].get_str());
if (vchData.size() != 128)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
CBlock* pdata = (CBlock*)&vchData[0];
// Byte reverse<|fim▁hole|> for (int i = 0; i < 128/4; i++)
((unsigned int*)pdata)[i] = ByteReverse(((unsigned int*)pdata)[i]);
// Get saved block
if (!mapNewBlock.count(pdata->hashMerkleRoot))
return false;
CBlock* pblock = mapNewBlock[pdata->hashMerkleRoot].first;
pblock->nTime = pdata->nTime;
pblock->nNonce = pdata->nNonce;
if(coinbase.size() == 0)
pblock->vtx[0].vin[0].scriptSig = mapNewBlock[pdata->hashMerkleRoot].second;
else
CDataStream(coinbase, SER_NETWORK, PROTOCOL_VERSION) >> pblock->vtx[0];
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
return CheckWork(pblock, *pwalletMain, reservekey);
}
}
Value getwork(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getwork [data]\n"
"If [data] is not specified, returns formatted hash data to work on:\n"
" \"midstate\" : precomputed hash state after hashing the first half of the data (DEPRECATED)\n" // deprecated
" \"data\" : block data\n"
" \"hash1\" : formatted hash buffer for second hash (DEPRECATED)\n" // deprecated
" \"target\" : little endian hash target\n"
"If [data] is specified, tries to solve the block and returns true if it was successful.");
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Colossus is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Colossus is downloading blocks...");
typedef map<uint256, pair<CBlock*, CScript> > mapNewBlock_t;
static mapNewBlock_t mapNewBlock; // FIXME: thread safety
static vector<CBlockTemplate*> vNewBlockTemplate;
if (params.size() == 0)
{
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60))
{
if (pindexPrev != pindexBest)
{
// Deallocate old blocks since they're obsolete now
mapNewBlock.clear();
BOOST_FOREACH(CBlockTemplate* pblocktemplate, vNewBlockTemplate)
delete pblocktemplate;
vNewBlockTemplate.clear();
}
// Clear pindexPrev so future getworks make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
pblocktemplate = CreateNewBlock(*pMiningKey);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
vNewBlockTemplate.push_back(pblocktemplate);
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
// Update nExtraNonce
static unsigned int nExtraNonce = 0;
IncrementExtraNonce(pblock, pindexPrev, nExtraNonce);
// Save
mapNewBlock[pblock->hashMerkleRoot] = make_pair(pblock, pblock->vtx[0].vin[0].scriptSig);
// Pre-build hash buffers
char pmidstate[32];
char pdata[128];
char phash1[64];
FormatHashBuffers(pblock, pmidstate, pdata, phash1);
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
Object result;
result.push_back(Pair("midstate", HexStr(BEGIN(pmidstate), END(pmidstate)))); // deprecated
result.push_back(Pair("data", HexStr(BEGIN(pdata), END(pdata))));
result.push_back(Pair("hash1", HexStr(BEGIN(phash1), END(phash1)))); // deprecated
result.push_back(Pair("target", HexStr(BEGIN(hashTarget), END(hashTarget))));
return result;
}
else
{
// Parse parameters
vector<unsigned char> vchData = ParseHex(params[0].get_str());
if (vchData.size() != 128)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
CBlock* pdata = (CBlock*)&vchData[0];
// Byte reverse
for (int i = 0; i < 128/4; i++)
((unsigned int*)pdata)[i] = ByteReverse(((unsigned int*)pdata)[i]);
// Get saved block
if (!mapNewBlock.count(pdata->hashMerkleRoot))
return false;
CBlock* pblock = mapNewBlock[pdata->hashMerkleRoot].first;
pblock->nTime = pdata->nTime;
pblock->nNonce = pdata->nNonce;
pblock->vtx[0].vin[0].scriptSig = mapNewBlock[pdata->hashMerkleRoot].second;
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
return CheckWork(pblock, *pwalletMain, *pMiningKey);
}
}
Value getblocktemplate(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getblocktemplate [params]\n"
"Returns data needed to construct a block to work on:\n"
" \"version\" : block version\n"
" \"previousblockhash\" : hash of current highest block\n"
" \"transactions\" : contents of non-coinbase transactions that should be included in the next block\n"
" \"coinbaseaux\" : data that should be included in coinbase\n"
" \"coinbasevalue\" : maximum allowable input to coinbase transaction, including the generation award and transaction fees\n"
" \"target\" : hash target\n"
" \"mintime\" : minimum timestamp appropriate for next block\n"
" \"curtime\" : current timestamp\n"
" \"mutable\" : list of ways the block template may be changed\n"
" \"noncerange\" : range of valid nonces\n"
" \"sigoplimit\" : limit of sigops in blocks\n"
" \"sizelimit\" : limit of block size\n"
" \"bits\" : compressed target of next block\n"
" \"height\" : height of the next block\n"
"See https://en.bitcoin.it/wiki/BIP_0022 for full specification.");
std::string strMode = "template";
if (params.size() > 0)
{
const Object& oparam = params[0].get_obj();
const Value& modeval = find_value(oparam, "mode");
if (modeval.type() == str_type)
strMode = modeval.get_str();
else if (modeval.type() == null_type)
{
/* Do nothing */
}
else
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid mode");
}
if (strMode != "template")
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid mode");
if (vNodes.empty())
throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Colossus is not connected!");
if (IsInitialBlockDownload())
throw JSONRPCError(RPC_CLIENT_IN_INITIAL_DOWNLOAD, "Colossus is downloading blocks...");
// Update block
static unsigned int nTransactionsUpdatedLast;
static CBlockIndex* pindexPrev;
static int64 nStart;
static CBlockTemplate* pblocktemplate;
if (pindexPrev != pindexBest ||
(nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 5))
{
// Clear pindexPrev so future calls make a new block, despite any failures from here on
pindexPrev = NULL;
// Store the pindexBest used before CreateNewBlock, to avoid races
nTransactionsUpdatedLast = nTransactionsUpdated;
CBlockIndex* pindexPrevNew = pindexBest;
nStart = GetTime();
// Create new block
if(pblocktemplate)
{
delete pblocktemplate;
pblocktemplate = NULL;
}
pblocktemplate = CreateNewBlock(*pMiningKey);
if (!pblocktemplate)
throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory");
// Need to update only after we know CreateNewBlock succeeded
pindexPrev = pindexPrevNew;
}
CBlock* pblock = &pblocktemplate->block; // pointer for convenience
// Update nTime
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
Array transactions;
map<uint256, int64_t> setTxIndex;
int i = 0;
BOOST_FOREACH (CTransaction& tx, pblock->vtx)
{
uint256 txHash = tx.GetHash();
setTxIndex[txHash] = i++;
if (tx.IsCoinBase())
continue;
Object entry;
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << tx;
entry.push_back(Pair("data", HexStr(ssTx.begin(), ssTx.end())));
entry.push_back(Pair("hash", txHash.GetHex()));
Array deps;
BOOST_FOREACH (const CTxIn &in, tx.vin)
{
if (setTxIndex.count(in.prevout.hash))
deps.push_back(setTxIndex[in.prevout.hash]);
}
entry.push_back(Pair("depends", deps));
int index_in_template = i - 1;
entry.push_back(Pair("fee", pblocktemplate->vTxFees[index_in_template]));
entry.push_back(Pair("sigops", pblocktemplate->vTxSigOps[index_in_template]));
transactions.push_back(entry);
}
Object aux;
aux.push_back(Pair("flags", HexStr(COINBASE_FLAGS.begin(), COINBASE_FLAGS.end())));
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
static Array aMutable;
if (aMutable.empty())
{
aMutable.push_back("time");
aMutable.push_back("transactions");
aMutable.push_back("prevblock");
}
Object result;
result.push_back(Pair("version", pblock->nVersion));
result.push_back(Pair("previousblockhash", pblock->hashPrevBlock.GetHex()));
result.push_back(Pair("transactions", transactions));
result.push_back(Pair("coinbaseaux", aux));
result.push_back(Pair("coinbasevalue", (int64_t)pblock->vtx[0].vout[0].nValue));
result.push_back(Pair("target", hashTarget.GetHex()));
result.push_back(Pair("mintime", (int64_t)pindexPrev->GetMedianTimePast()+1));
result.push_back(Pair("mutable", aMutable));
result.push_back(Pair("noncerange", "00000000ffffffff"));
result.push_back(Pair("sigoplimit", (int64_t)MAX_BLOCK_SIGOPS));
result.push_back(Pair("sizelimit", (int64_t)MAX_BLOCK_SIZE));
result.push_back(Pair("curtime", (int64_t)pblock->nTime));
result.push_back(Pair("bits", HexBits(pblock->nBits)));
result.push_back(Pair("height", (int64_t)(pindexPrev->nHeight+1)));
return result;
}
Value submitblock(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"submitblock <hex data> [optional-params-obj]\n"
"[optional-params-obj] parameter is currently ignored.\n"
"Attempts to submit new block to network.\n"
"See https://en.bitcoin.it/wiki/BIP_0022 for full specification.");
vector<unsigned char> blockData(ParseHex(params[0].get_str()));
CDataStream ssBlock(blockData, SER_NETWORK, PROTOCOL_VERSION);
CBlock pblock;
try {
ssBlock >> pblock;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block decode failed");
}
CValidationState state;
bool fAccepted = ProcessBlock(state, NULL, &pblock);
if (!fAccepted)
return "rejected"; // TODO: report validation state
return Value::null;
}<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import sys
import os
class Nwchem(Package):
"""High-performance computational chemistry software"""
homepage = "http://www.nwchem-sw.org"
url = "http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz"
tags = ['ecp', 'ecp-apps']
version('6.8', '50b18116319f4c15d1cb7eaa1b433006',
url='https://github.com/nwchemgit/nwchem/archive/v6.8-release.tar.gz')
version('6.6', 'c581001c004ea5e5dfacb783385825e3',
url='http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz')
depends_on('blas')
depends_on('lapack')
depends_on('mpi')
depends_on('scalapack')
depends_on('[email protected]:2.8', type=('build', 'link', 'run'))
# first hash is sha256 of the patch (required for URL patches),
# second is sha256 for the archive.
# patches for 6.6-27746:
urls_for_patches = {
'@6.6': [
('http://www.nwchem-sw.org/images/Tddft_mxvec20.patch.gz', 'ae04d4754c25fc324329dab085d4cc64148c94118ee702a7e14fce6152b4a0c5', 'cdfa8a5ae7d6ee09999407573b171beb91e37e1558a3bfb2d651982a85f0bc8f'),
('http://www.nwchem-sw.org/images/Tools_lib64.patch.gz', 'ef2eadef89c055c4651ea807079577bd90e1bc99ef6c89f112f1f0e7560ec9b4', '76b8d3e1b77829b683234c8307fde55bc9249b87410914b605a76586c8f32dae'),
('http://www.nwchem-sw.org/images/Config_libs66.patch.gz', '56f9c4bab362d82fb30d97564469e77819985a38e15ccaf04f647402c1ee248e', 'aa17f03cbb22ad7d883e799e0fddad1b5957f5f30b09f14a1a2caeeb9663cc07'),
('http://www.nwchem-sw.org/images/Cosmo_meminit.patch.gz', 'f05f09ca235ad222fe47d880bfd05a1b88d0148b990ca8c7437fa231924be04b', '569c5ee528f3922ee60ca831eb20ec6591633a36f80efa76cbbe41cabeb9b624'),
('http://www.nwchem-sw.org/images/Sym_abelian.patch.gz', 'e3470fb5786ab30bf2eda3bb4acc1e4c48fb5e640a09554abecf7d22b315c8fd', 'aa693e645a98dbafbb990e26145d65b100d6075254933f36326cf00bac3c29e0'),
('http://www.nwchem-sw.org/images/Xccvs98.patch.gz', '75540e0436c12e193ed0b644cff41f5036d78c101f14141846083f03ad157afa', '1c0b0f1293e3b9b05e9e51e7d5b99977ccf1edb4b072872c8316452f6cea6f13'),
('http://www.nwchem-sw.org/images/Dplot_tolrho.patch.gz', '8c30f92730d15f923ec8a623e3b311291eb2ba8b9d5a9884716db69a18d14f24', '2ebb1a5575c44eef4139da91f0e1e60057b2eccdba7f57a8fb577e840c326cbb'),
('http://www.nwchem-sw.org/images/Driver_smalleig.patch.gz', 'a040df6f1d807402ce552ba6d35c9610d5efea7a9d6342bbfbf03c8d380a4058', 'dd65bfbae6b472b94c8ee81d74f6c3ece37c8fc8766ff7a3551d8005d44815b8'),
('http://www.nwchem-sw.org/images/Ga_argv.patch.gz', '6fcd3920978ab95083483d5ed538cd9a6f2a80c2cafa0c5c7450fa5621f0a314', '8a78cb2af14314b92be9d241b801e9b9fed5527b9cb47a083134c7becdfa7cf1'),
('http://www.nwchem-sw.org/images/Raman_displ.patch.gz', 'ca4312cd3ed1ceacdc3a7d258bb05b7824c393bf44f44c28a789ebeb29a8dba4', '6a16f0f589a5cbb8d316f68bd2e6a0d46cd47f1c699a4b256a3973130061f6c3'),
('http://www.nwchem-sw.org/images/Ga_defs.patch.gz', 'f8ac827fbc11f7d2a9d8ec840c6f79d4759ef782bd4d291f2e88ec81b1b230aa', 'c6f1a48338d196e1db22bcfc6087e2b2e6eea50a34d3a2b2d3e90cccf43742a9'),
('http://www.nwchem-sw.org/images/Zgesvd.patch.gz', 'c333a94ceb2c35a490f24b007485ac6e334e153b03cfc1d093b6037221a03517', '4af592c047dc3e0bc4962376ae2c6ca868eb7a0b40a347ed9b88e887016ad9ed'),
('http://www.nwchem-sw.org/images/Cosmo_dftprint.patch.gz', '449d59983dc68c23b34e6581370b2fb3d5ea425b05c3182f0973e5b0e1a62651', 'd3b73431a68d6733eb7b669d471e18a83e03fa8e40c48e536fe8edecd99250ff'),
('http://www.nwchem-sw.org/images/Txs_gcc6.patch.gz', '1dab87f23b210e941c765f7dd7cc2bed06d292a2621419dede73f10ba1ca1bcd', '139692215718cd7414896470c0cc8b7817a73ece1e4ca93bf752cf1081a195af'),
('http://www.nwchem-sw.org/images/Gcc6_optfix.patch.gz', '8f8a5f8246bc1e42ef0137049acab4448a2e560339f44308703589adf753c148', '15cff43ab0509e0b0e83c49890032a848d6b7116bd6c8e5678e6c933f2d051ab'),
('http://www.nwchem-sw.org/images/Util_gnumakefile.patch.gz', '173e17206a9099c3512b87e3f42441f5b089db82be1d2b306fe2a0070e5c8fad', '5dd82b9bd55583152295c999a0e4d72dd9d5c6ab7aa91117c2aae57a95a14ba1'),
('http://www.nwchem-sw.org/images/Util_getppn.patch.gz', 'c4a23592fdcfb1fb6b65bc6c1906ac36f9966eec4899c4329bc8ce12015d2495', '8be418e1f8750778a31056f1fdf2a693fa4a12ea86a531f1ddf6f3620421027e'),
('http://www.nwchem-sw.org/images/Gcc6_macs_optfix.patch.gz', 'ff33d5f1ccd33385ffbe6ce7a18ec1506d55652be6e7434dc8065af64c879aaa', 'fade16098a1f54983040cdeb807e4e310425d7f66358807554e08392685a7164'),
('http://www.nwchem-sw.org/images/Notdir_fc.patch.gz', '54c722fa807671d6bf1a056586f0923593319d09c654338e7dd461dcd29ff118', 'a6a233951eb254d8aff5b243ca648def21fa491807a66c442f59c437f040ee69')
]
}
# Iterate over patches
for __condition, __urls in urls_for_patches.items():
for __url, __sha256, __archive_sha256 in __urls:
patch(__url, when=__condition, level=0, sha256=__sha256, archive_sha256=__archive_sha256)
def install(self, spec, prefix):
scalapack = spec['scalapack'].libs
lapack = spec['lapack'].libs
blas = spec['blas'].libs
# see http://www.nwchem-sw.org/index.php/Compiling_NWChem
args = []
args.extend([
'NWCHEM_TOP=%s' % self.stage.source_path,
# NWCHEM is picky about FC and CC. They should NOT be full path.
# see http://www.nwchem-sw.org/index.php/Special:AWCforum/sp/id7524
'CC=%s' % os.path.basename(spack_cc),
'FC=%s' % os.path.basename(spack_fc),
'USE_MPI=y',
'MPI_LOC=%s' % spec['mpi'].prefix,
'USE_PYTHONCONFIG=y',
'PYTHONVERSION=%s' % spec['python'].version.up_to(2),
'PYTHONHOME=%s' % spec['python'].home,
'BLASOPT=%s' % ((lapack + blas).ld_flags),
'BLAS_LIB=%s' % blas.ld_flags,
'LAPACK_LIB=%s' % lapack.ld_flags,
'USE_SCALAPACK=y',
'SCALAPACK=%s' % scalapack.ld_flags,
'NWCHEM_MODULES=all python',
'NWCHEM_LONG_PATHS=Y' # by default NWCHEM_TOP is 64 char max
])<|fim▁hole|> # A flag to distinguish between 32bit and 64bit integers in linear
# algebra (Blas, Lapack, Scalapack)
use_32_bit_lin_alg = True
if use_32_bit_lin_alg:
args.extend([
'USE_64TO32=y',
'BLAS_SIZE=4',
'LAPACK_SIZE=4',
'SCALAPACK_SIZE=4'
])
else:
args.extend([
'BLAS_SIZE=8',
'LAPACK_SIZE=8'
'SCALAPACK_SIZE=8'
])
if sys.platform == 'darwin':
target = 'MACX64'
args.extend([
'CFLAGS_FORGA=-DMPICH_NO_ATTR_TYPE_TAGS'
])
else:
target = 'LINUX64'
args.extend(['NWCHEM_TARGET=%s' % target])
with working_dir('src'):
make('nwchem_config', *args)
if use_32_bit_lin_alg:
make('64_to_32', *args)
make(*args)
# need to install by hand. Follow Ubuntu:
# http://packages.ubuntu.com/trusty/all/nwchem-data/filelist
# http://packages.ubuntu.com/trusty/amd64/nwchem/filelist
share_path = join_path(prefix, 'share', 'nwchem')
mkdirp(prefix.bin)
install_tree('data', share_path)
install_tree(join_path('basis', 'libraries'),
join_path(share_path, 'libraries'))
install_tree(join_path('nwpw', 'libraryps'),
join_path(share_path, 'libraryps'))
b_path = join_path(self.stage.source_path, 'bin',
target, 'nwchem')
chmod = which('chmod')
chmod('+x', b_path)
install(b_path, prefix.bin)
# Finally, make user's life easier by creating a .nwchemrc file
# to point to the required data files.
nwchemrc = """\
nwchem_basis_library {data}/libraries/
nwchem_nwpw_library {data}/libraryps/
ffield amber
amber_1 {data}/amber_s/
amber_2 {data}/amber_q/
amber_3 {data}/amber_x/
amber_4 {data}/amber_u/
spce {data}/solvents/spce.rst
charmm_s {data}/charmm_s/
charmm_x {data}/charmm_x/
""".format(data=share_path)
with open(".nwchemrc", 'w') as f:
f.write(nwchemrc)
install(".nwchemrc", share_path)<|fim▁end|> |
# TODO: query if blas/lapack/scalapack uses 64bit Ints |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>KISSY.add("brix/gallery/switchable/index", function(S,Brick,KSSwitchable) {
/**
* kissy switchable 组件的封装。
* @extends Brix.Brick
* @class Brix.Gallery.Switchable
* <a target="_blank" href="http://docs.kissyui.com/docs/html/api/component/switchable/">其他配置、方法、事件请参考KISSY API</a>
* @param {Object} config 配置信息
*/
var Switchable = Brick.extend({
constructor:function(config){
this.config = config;
Switchable.superclass.constructor.apply(this, arguments);
},
bindUI:function(){
var self = this,
config = self.config;
if(config.switchType){
var switchType = config.switchType;
delete config.switchType;
self.switchable = new KSSwitchable[switchType](self.get('el'),config);
}
else{
self.switchable = new KSSwitchable(self.get('el'),config);
}
config = null;
delete self.config;
},
destructor:function(){
var self = this;
if(self.switchable&&self.switchable.destroy){
self.switchable.destroy();
}
}
});
/**
* Switchable 实例对象类型Tabs|Slide|Carousel|Accordion。
* @cfg switchType
*/
/**
* @property {Object} switchable KISSY switchable实例化后的对象
*/
return Switchable;
}, {<|fim▁hole|><|fim▁end|> | requires: ["brix/core/brick",'switchable']
}); |
<|file_name|>app.py<|end_file_name|><|fim▁begin|># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2015
# Author: Cleber Rosa <[email protected]>
"""
This is the main entry point for the rest client cli application
"""
import sys
import types
import importlib
import functools
from . import parser
from .. import connection
from ... import settings
from ... import output
from ... import exit_codes
__all__ = ['App']
class App(object):
"""
Base class for CLI application
"""
def __init__(self):<|fim▁hole|>
This class is intended both to be used by the stock client application
and also to be reused by custom applications. If you want, say, to
limit the amount of command line actions and its arguments, you can
simply supply another argument parser class to this constructor. Of
course another way to customize it is to inherit from this and modify
its members at will.
"""
self.connection = None
self.parser = parser.Parser()
self.parser.add_arguments_on_all_modules()
self.view = output.View()
def initialize_connection(self):
"""
Initialize the connection instance
"""
try:
self.connection = connection.Connection(
hostname=self.args.hostname,
port=self.args.port,
username=self.args.username,
password=self.args.password)
except connection.InvalidConnectionError:
self.view.notify(event="error",
msg="Error: could not connect to the server")
sys.exit(exit_codes.AVOCADO_JOB_FAIL)
except connection.InvalidServerVersionError:
self.view.notify(event="error",
msg=("REST server version is higher than "
"than this client can support."))
self.view.notify(event="error",
msg=("Please use a more recent version "
"of the REST client application."))
sys.exit(exit_codes.AVOCADO_JOB_FAIL)
def dispatch_action(self):
"""
Calls the actions that was specified via command line arguments.
This involves loading the relevant module file.
"""
module_name = "%s.%s" % ('avocado.core.restclient.cli.actions',
self.args.top_level_action)
try:
module = importlib.import_module(module_name)
except ImportError:
return
# Filter out the attributes out of the loaded module that look
# like command line actions, based on type and 'is_action' attribute
module_actions = {}
for attribute_name in module.__dict__:
attribute = module.__dict__[attribute_name]
if (isinstance(attribute, types.FunctionType) and
hasattr(attribute, 'is_action')):
if attribute.is_action:
module_actions[attribute_name] = attribute
chosen_action = None
for action in module_actions.keys():
if getattr(self.args, action, False):
chosen_action = action
break
kallable = module_actions.get(chosen_action, None)
if kallable is not None:
self.initialize_connection()
return kallable(self)
else:
self.view.notify(event="error",
msg="Action specified is not implemented")
def run(self):
"""
Main entry point for application
"""
action_result = None
try:
self.args = self.parser.parse_args()
action_result = self.dispatch_action()
except KeyboardInterrupt:
print 'Interrupted'
if isinstance(action_result, int):
sys.exit(action_result)
elif isinstance(action_result, bool):
if action_result is True:
sys.exit(0)
else:
sys.exit(1)<|fim▁end|> | """
Initializes a new app instance. |
<|file_name|>ec2creds.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v2 EC2 Credentials action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
from openstackclient.i18n import _ # noqa
class CreateEC2Creds(show.ShowOne):
"""Create EC2 credentials"""
log = logging.getLogger(__name__ + ".CreateEC2Creds")
def get_parser(self, prog_name):
parser = super(CreateEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--project',
metavar='<project>',
help=_('Specify a project [admin only]'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.project:
project = utils.find_resource(
identity_client.tenants,
parsed_args.project,
).id
else:
# Get the project from the current auth
project = identity_client.auth_tenant_id
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.create(user, project)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))
class DeleteEC2Creds(command.Command):
"""Delete EC2 credentials"""
log = logging.getLogger(__name__ + '.DeleteEC2Creds')
def get_parser(self, prog_name):
parser = super(DeleteEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
identity_client.ec2.delete(user, parsed_args.access_key)
class ListEC2Creds(lister.Lister):
"""List EC2 credentials"""
log = logging.getLogger(__name__ + '.ListEC2Creds')
def get_parser(self, prog_name):
parser = super(ListEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
columns = ('access', 'secret', 'tenant_id', 'user_id')
column_headers = ('Access', 'Secret', 'Project ID', 'User ID')
data = identity_client.ec2.list(user)
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class ShowEC2Creds(show.ShowOne):
"""Show EC2 credentials"""
log = logging.getLogger(__name__ + '.ShowEC2Creds')
def get_parser(self, prog_name):
parser = super(ShowEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity<|fim▁hole|>
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.get(user, parsed_args.access_key)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))<|fim▁end|> | |
<|file_name|>mac.py<|end_file_name|><|fim▁begin|># Copyright 2014 Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.<|fim▁hole|>from compass.hdsdiscovery import base
from compass.utils import setting_wrapper as setting
from compass.utils import util
import logging
CLASS_NAME = "Mac"
class Mac(base.BaseSnmpMacPlugin):
"""Processes MAC address."""
def __init__(self, host, credential):
self.host = host
# self.credential = credential
# return
def scan(self):
"""Implemnets the scan method in BasePlugin class.
.. note::
Dummy scan function for compass appliance.
Returns fixed mac addresses.
"""
mac_list = None
machine_lists = util.load_configs(setting.MACHINE_LIST_DIR)
for items in machine_lists:
for item in items['MACHINE_LIST']:
for k, v in item.items():
if k == self.host:
mac_list = v
return mac_list<|fim▁end|> |
"""Compass Appliance Mac module.""" |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
fn main() {
println!("cargo:rustc-flags=-l dbgeng");
}<|fim▁end|> | |
<|file_name|>data_models.py<|end_file_name|><|fim▁begin|># Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gbpservice.nfp.configurator.drivers.loadbalancer.v2.haproxy.octavia_lib.\
common import data_models
class Interface(data_models.BaseDataModel):
def __init__(self, id=None, compute_id=None, network_id=None,
fixed_ips=None, port_id=None):
self.id = id
self.compute_id = compute_id
self.network_id = network_id
self.port_id = port_id
self.fixed_ips = fixed_ips
class Delta(data_models.BaseDataModel):
def __init__(self, amphora_id=None, compute_id=None,
add_nics=None, delete_nics=None):
self.compute_id = compute_id
self.amphora_id = amphora_id
self.add_nics = add_nics
self.delete_nics = delete_nics
class Network(data_models.BaseDataModel):
def __init__(self, id=None, name=None, subnets=None,
project_id=None, admin_state_up=None, mtu=None,
provider_network_type=None,
provider_physical_network=None,
provider_segmentation_id=None,
router_external=None):
self.id = id
self.name = name
self.subnets = subnets
self.project_id = project_id
self.admin_state_up = admin_state_up
self.provider_network_type = provider_network_type<|fim▁hole|> self.mtu = mtu
class Subnet(data_models.BaseDataModel):
def __init__(self, id=None, name=None, network_id=None, project_id=None,
gateway_ip=None, cidr=None, ip_version=None):
self.id = id
self.name = name
self.network_id = network_id
self.project_id = project_id
self.gateway_ip = gateway_ip
self.cidr = cidr
self.ip_version = ip_version
class Port(data_models.BaseDataModel):
def __init__(self, id=None, name=None, device_id=None, device_owner=None,
mac_address=None, network_id=None, status=None,
project_id=None, admin_state_up=None, fixed_ips=None,
network=None):
self.id = id
self.name = name
self.device_id = device_id
self.device_owner = device_owner
self.mac_address = mac_address
self.network_id = network_id
self.status = status
self.project_id = project_id
self.admin_state_up = admin_state_up
self.fixed_ips = fixed_ips or []
self.network = network
def get_subnet_id(self, fixed_ip_address):
for fixed_ip in self.fixed_ips:
if fixed_ip.ip_address == fixed_ip_address:
return fixed_ip.subnet_id
class FixedIP(data_models.BaseDataModel):
def __init__(self, subnet_id=None, ip_address=None, subnet=None):
self.subnet_id = subnet_id
self.ip_address = ip_address
self.subnet = subnet
class AmphoraNetworkConfig(data_models.BaseDataModel):
def __init__(self, amphora=None, vip_subnet=None, vip_port=None,
vrrp_subnet=None, vrrp_port=None, ha_subnet=None,
ha_port=None):
self.amphora = amphora
self.vip_subnet = vip_subnet
self.vip_port = vip_port
self.vrrp_subnet = vrrp_subnet
self.vrrp_port = vrrp_port
self.ha_subnet = ha_subnet
self.ha_port = ha_port<|fim▁end|> | self.provider_physical_network = provider_physical_network
self.provider_segmentation_id = provider_segmentation_id
self.router_external = router_external |
<|file_name|>app.component.spec.ts<|end_file_name|><|fim▁begin|>import { TestBed, async } from '@angular/core/testing';
import { AppComponent } from './app.component';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { OnChangesDemoComponent } from './on-changes-demo/on-changes-demo.component';
import { DoCheckDemoComponent } from './do-check-demo/do-check-demo.component';<|fim▁hole|>import { OnInitDemoComponent } from './on-init-demo/on-init-demo.component';
import { AfterContentInitChildComponent } from './after-content-init-child/after-content-init-child.component';
import { AfterContentInitDemoComponent } from './after-content-init-demo/after-content-init-demo.component';
import { OnDestroyDemoComponent } from './on-destroy-demo/on-destroy-demo.component';
import { AfterViewInitDemoComponent } from './after-view-init-demo/after-view-init-demo.component';
describe('AppComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [CommonModule, FormsModule],
declarations: [
AppComponent,
OnInitDemoComponent,
OnChangesDemoComponent,
DoCheckDemoComponent,
OnInitDemoComponent,
AfterContentInitChildComponent,
AfterContentInitDemoComponent,
OnDestroyDemoComponent,
AfterViewInitDemoComponent
]
}).compileComponents();
}));
it('should create the app', async(() => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
}));
});<|fim▁end|> | |
<|file_name|>shiny_types.py<|end_file_name|><|fim▁begin|>from shinymud.lib.world import World
import json
world = World.get_world()
def to_bool(val):
"""Take a string representation of true or false and convert it to a boolean
value. Returns a boolean value or None, if no corresponding boolean value
exists.
"""
bool_states = {'true': True, 'false': False, '0': False, '1': True}
if not val:
return None
if isinstance(val, bool):
return val
val = str(val)
val = val.strip().lower()
return bool_states.get(val)
def read_dict(val):
# val is a string like "foo=bar,name=fred"
# return {'foo':'bar', 'name':'fred'}
return dict([thing.split('=') for thing in val.split(',')])
def write_dict(val):
return ",".join('='.join([str(k),str(v)]) for k,v in val.items())
def copy_dict(val):
return dict(val.items())
def read_list(val):
if isinstance(val, list):
return val
if not val:
return []
return val.split(',')
def write_list(val):
if not val:
return None
return ','.join(map(str,val))
def copy_list(val):
return val[:]
def read_area(val):
if isinstance(val, basestring):
return world.get_area(val)
return val
def write_area(val):
if isinstance(val, basestring):
return val
return val.name
def read_merchandise(val):
return [read_dict(each) for each in val.split('<>')]
def write_merchandise(val):
lst = []
for dicts in val:
if dicts.get('keywords'):
del dicts['keywords']
lst.append(write_dict(dicts))
return '<>'.join(lst)
def read_json(val):
return json.loads(val)
def write_json(val):
return json.dumps(val)
def write_model(val):
if isinstance(val, int):
return val
return val.dbid
def read_int_dict(val):
d = {}
if val:
for a in val.split(','):
key, val = a.split('=')
d[key] = int(val)
return d
def write_int_dict(val):
s = []
if val:
for key, val in val.items():
s.append("%s=%s" % (str(key), str(val)))
return ",".join(s)
def read_damage(val):
dmg = []
if val:
for d in val.split('|'):
dmg.append(Damage(d))
return dmg
def write_damage(val):
return '|'.join([str(d) for d in val])
def read_channels(val):
d = {}
for pair in val.split(','):<|fim▁hole|> d[k] = to_bool(v)
return d
def read_location(val):
#loc == 'area,id'
loc = val.split(',')
return world.get_location(loc[0], loc[1])
def write_location(val):
if val:
return '%s,%s' % (val.area.name, val.id)
return None
def read_int(val):
try:
r = int(val)
except ValueError:
r = 0
return r
def read_float(val):
try:
r = float(val)
except ValueError:
r = 0.0
return r<|fim▁end|> | k,v = pair.split('=') |
<|file_name|>DlgMSMeer.cpp<|end_file_name|><|fim▁begin|>// DlgMSMeer.cpp : implementation file
//
#include "stdafx.h"
#include "Risa.h"
#include "DlgMSMeer.h"
#include "afxdialogex.h"
#include "msImageProcessor.h"
// CDlgMSMeer dialog
IMPLEMENT_DYNAMIC(CDlgMSMeer, CDialogEx)
CDlgMSMeer::CDlgMSMeer(cv::Mat* pmImg, bool bColor, CWnd* pParent /*=NULL*/)
: CDialogEx(CDlgMSMeer::IDD, pParent)
, m_nOP(0)
, m_sigmaS(6)
, m_sigmaR(2)
, m_nMinRegion(100)
{
m_pmSource = pmImg;
m_bColor = bColor;
hasFilter_ = 0;
hasSegment_ = 0;
cbgImage_ = NULL;
filtImage_ = NULL;
segmImage_ = NULL;
}
CDlgMSMeer::~CDlgMSMeer()
{
if(cbgImage_ != NULL) delete cbgImage_;
if(filtImage_ != NULL) delete filtImage_;
if(segmImage_ != NULL) delete segmImage_;
}
void CDlgMSMeer::DoDataExchange(CDataExchange* pDX)
{
CDialogEx::DoDataExchange(pDX);
DDX_Radio(pDX, IDC_RADIO_OP, m_nOP);
DDX_Text(pDX, IDC_EDIT_BW_SPATIAL, m_sigmaS);
DDX_Text(pDX, IDC_EDIT_BW_COLOR, m_sigmaR);
DDX_Text(pDX, IDC_EDIT_MIN_REGION, m_nMinRegion);
}
BEGIN_MESSAGE_MAP(CDlgMSMeer, CDialogEx)
ON_BN_CLICKED(IDC_RUN, &CDlgMSMeer::OnBnClickedRun)
ON_BN_CLICKED(IDOK, &CDlgMSMeer::OnBnClickedOk)
END_MESSAGE_MAP()
// CDlgMSMeer message handlers
void CDlgMSMeer::OnBnClickedRun()
{
// TODO: Add your control notification handler code here
UpdateData(TRUE); // ==> variable
BeginWaitCursor();
// parameters
// sigma_s
// sigma_r
// a
// epsilon
// minRegion
// kernel radius
// filter
// speedup level
sigmaS = m_sigmaS;
sigmaR = m_sigmaR;
minRegion = m_nMinRegion;
//, kernelSize
//obtain image dimensions
float speedUpThreshold_ = (float) 0.1;
SpeedUpLevel speedUpLevel_ = MED_SPEEDUP;;
int width, height;
width = cbgImage_->x_;
height = cbgImage_->y_;
//obtain image type (color or grayscale)
imageType gtype;
if(cbgImage_->colorIm_)
gtype = COLOR;
else
gtype = GRAYSCALE;
gpMsgbar->ShowMessage("Input parameters:\n");
gpMsgbar->ShowMessage("\tSpatial Bandwidth\t= %4d\n\tColor Bandwidth\t= %4.1f\n", sigmaS, sigmaR);
gpMsgbar->ShowMessage("\tMinimum Region\t= %4d\n", minRegion);
//determine operation (filtering or segmentation)
int operation = m_nOP;
//create instance of image processor class
msImageProcessor *iProc = new msImageProcessor();
//define an input image using the image under consideration
//(if filtering or segmentation has taken place, then use this
// result upon performing fusing...)
if((operation == 2)&&(hasFilter_))
iProc->DefineImage(filtImage_->im_, gtype, height, width);
else
iProc->DefineImage(cbgImage_->im_, gtype, height, width);
int dim;
if(cbgImage_->colorIm_) {
dim = 3;
m_mResult.create(height, width, CV_8UC3);
}else{
dim = 1;
m_mResult.create(height, width, CV_8UC1);
}
iProc->SetSpeedThreshold(speedUpThreshold_);
switch(operation)
{<|fim▁hole|> //filter
case 1:
iProc->Filter(sigmaS, sigmaR, speedUpLevel_);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
} else if (iProc->ErrorStatus == EL_HALT)
{
break;
}
//obtain the filtered image....
filtImage_->Resize(width, height, cbgImage_->colorIm_);
iProc->GetResults(filtImage_->im_);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
}
//indicate that only the filtered image has been computed...
hasFilter_ = 1;
hasSegment_ = 0;
memcpy(m_mResult.data, filtImage_->im_, dim*height*width*sizeof(unsigned char));
if(m_bColor) {
cv::Mat bgr(height, width, CV_8UC3 );
int from_to[] = { 0,2, 1,1, 2,0};
cv::mixChannels( &m_mResult, 1, &bgr, 1, from_to, 3 );
bgr.copyTo(m_mResult);
}
cv::namedWindow("filter", CV_WINDOW_AUTOSIZE );
cv::imshow( "filter", m_mResult);
break;
//fuse
case 2:
iProc->FuseRegions(sigmaR, minRegion);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
} else if (iProc->ErrorStatus == EL_HALT)
{
break;
}
//obtain the segmented image...
segmImage_->Resize(width, height, cbgImage_->colorIm_);
iProc->GetResults(segmImage_->im_);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
}
//indicate that the segmented image has been computed...
hasSegment_ = 1;
memcpy(m_mResult.data, segmImage_->im_, dim*height*width*sizeof(unsigned char));
if(m_bColor) {
cv::Mat bgr(height, width, CV_8UC3 );
int from_to[] = { 0,2, 1,1, 2,0};
cv::mixChannels( &m_mResult, 1, &bgr, 1, from_to, 3 );
bgr.copyTo(m_mResult);
}
cv::namedWindow("fuse", CV_WINDOW_AUTOSIZE );
cv::imshow( "fuse", m_mResult);
break;
//segment
default:
//filter the image...
iProc->Filter(sigmaS, sigmaR, speedUpLevel_);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
} else if (iProc->ErrorStatus == EL_HALT)
{
break;
}
//filter the image....
unsigned char *tempImage = new unsigned char [dim*height*width];
iProc->GetResults(tempImage);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
delete [] tempImage;
return;
}
//fuse regions...
iProc->FuseRegions(sigmaR, minRegion);
if (iProc->ErrorStatus == EL_ERROR)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
delete [] tempImage;
return;
} else if (iProc->ErrorStatus == EL_HALT)
{
delete [] tempImage;
break;
}
//obtain the segmented and filtered image...
filtImage_->Resize(width, height, cbgImage_->colorIm_);
memcpy(filtImage_->im_, tempImage, dim*height*width*sizeof(unsigned char));
delete [] tempImage;
segmImage_->Resize(width, height, cbgImage_->colorIm_);
iProc->GetResults(segmImage_->im_);
if (iProc->ErrorStatus)
{
gpMsgbar->ShowMessage("%s\n", iProc->ErrorMessage);
return;
}
//indicate that both the filtered and segmented image have been computed...
hasFilter_ = 1;
hasSegment_ = 1;
memcpy(m_mResult.data, segmImage_->im_, dim*height*width*sizeof(unsigned char));
if(m_bColor) {
cv::Mat bgr(height, width, CV_8UC3 );
int from_to[] = { 0,2, 1,1, 2,0};
cv::mixChannels( &m_mResult, 1, &bgr, 1, from_to, 3 );
bgr.copyTo(m_mResult);
}
cv::namedWindow("segment", CV_WINDOW_AUTOSIZE );
cv::imshow( "segment", m_mResult);
}
//delete the image processing object
delete iProc;
EndWaitCursor();
}
BOOL CDlgMSMeer::OnInitDialog()
{
CDialogEx::OnInitDialog();
// TODO: Add extra initialization here
cbgImage_ = new BgImage();
filtImage_ = new BgImage();
segmImage_ = new BgImage();
if(m_bColor) {
cv::Mat rgb(m_pmSource->rows, m_pmSource->cols, CV_8UC3 );
int from_to[] = { 0,2, 1,1, 2,0};
cv::mixChannels( m_pmSource, 1, &rgb, 1, from_to, 3 );
cbgImage_->SetImageFromRGB(rgb.data, rgb.cols, rgb.rows, true);
//rgb.copyTo(pNew->m_cvMat);
}else
cbgImage_->SetImageFromRGB(m_pmSource->data, m_pmSource->cols, m_pmSource->rows, true);
hasFilter_ = 0;
hasSegment_ = 0;
return TRUE; // return TRUE unless you set the focus to a control
// EXCEPTION: OCX Property Pages should return FALSE
}
void CDlgMSMeer::OnBnClickedOk()
{
// TODO: Add your control notification handler code here
CDialogEx::OnOK();
if(m_mResult.data != NULL) {
m_mResult.copyTo(*m_pmSource);
}
}<|fim▁end|> | |
<|file_name|>env.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements a standard mechanism for Chrome Infra Python environment setup.
This library provides a central location to define Chrome Infra environment
setup. It also provides several faculties to install this environment.
Within a cooperating script, the environment can be setup by importing this
module and running its 'Install' method:
# Install Chrome-Infra environment (replaces 'sys.path').
sys.path.insert(0,
os.path.join(os.path.dirname(__file__), os.pardir, ...))
# (/path/to/build/scripts)
import common.env
common.env.Install()
When attempting to export the Chrome Infra path to external scripts, this
script can be invoked as an executable with various subcommands to emit a valid
PYTHONPATH clause.
In addition, this module has several functions to construct the path.
The goal is to deploy this module universally among Chrome-Infra scripts,
BuildBot configurations, tool invocations, and tests to ensure that they all
execute with the same centrally-defined environment.
"""
import argparse
import collections
import contextlib
import imp
import itertools
import os
import sys
import traceback
# Export for bootstrapping.
__all__ = [
'Install',
'PythonPath',
]
# Name of enviornment extension file to seek.
ENV_EXTENSION_NAME = 'environment.cfg.py'
# Standard directories (based on this file's location in the <build> tree).
def path_if(*args):
if not all(args):
return None
path = os.path.abspath(os.path.join(*args))
return (path) if os.path.exists(path) else (None)
# The path to the <build> directory in which this script resides.
Build = path_if(os.path.dirname(__file__), os.pardir, os.pardir)
# The path to the <build_internal> directory.
BuildInternal = path_if(Build, os.pardir, 'build_internal')
def SetPythonPathEnv(value):
"""Sets the system's PYTHONPATH environemnt variable.
Args:
value (str): The value to use. If this is empty/None, the system's
PYTHONPATH will be cleared.
"""
# Since we can't assign None to the environment "dictionary", we have to
# either set or delete the key depending on the original value.
if value is not None:
os.environ['PYTHONPATH'] = str(value)
else:
os.environ.pop('PYTHONPATH', None)
def Install(**kwargs):
"""Replaces the current 'sys.path' with a hermetic Chrome-Infra path.
Args:
kwargs (dict): See GetInfraPythonPath arguments.
Returns (PythonPath): The PythonPath object that was installed.
"""
infra_python_path = GetInfraPythonPath(**kwargs)
infra_python_path.Install()
return infra_python_path
def SplitPath(path):
"""Returns (list): A list of path elements.
Splits a path into path elements. For example (assuming '/' is the local
system path separator):
>>> print SplitPath('/a/b/c/d')
['/', 'a', 'b', 'c', 'd']
>>> print SplitPath('a/b/c')
['a', 'b,' 'c']<|fim▁hole|> while True:
path, component = os.path.split(path)
if not component:
if path:
parts.append(path)
break
parts.append(component)
parts.reverse()
return parts
def ExtendPath(base, root_dir):
"""Returns (PythonPath): The extended python path.
This method looks for the ENV_EXTENSION_NAME file within "root_dir". If
present, it will be loaded as a Python module and have its "Extend" method
called.
If no extension is found, the base PythonPath will be returned.
Args:
base (PythonPath): The base python path.
root_dir (str): The path to check for an extension.
"""
extension_path = os.path.join(root_dir, ENV_EXTENSION_NAME)
if not os.path.isfile(extension_path):
return base
with open(extension_path, 'r') as fd:
extension = fd.read()
extension_module = imp.new_module('env-extension')
# Execute the enviornment extension.
try:
exec extension in extension_module.__dict__
extend_func = getattr(extension_module, 'Extend', None)
assert extend_func, (
"The environment extension module is missing the 'Extend()' method.")
base = extend_func(base, root_dir)
if not isinstance(base, PythonPath):
raise TypeError("Extension module returned non-PythonPath object (%s)" % (
type(base).__name__,))
except Exception:
# Re-raise the exception, but include the configuration file name.
tb = traceback.format_exc()
raise RuntimeError("Environment extension [%s] raised exception: %s" % (
extension_path, tb))
return base
def IsSystemPythonPath(path):
"""Returns (bool): If a python path is user-installed.
Paths that are known to be user-installed paths can be ignored when setting
up a hermetic Python path environment to avoid user libraries that would not
be present in other environments falsely affecting code.
This function can be updated as-needed to exclude other non-system paths
encountered on bots and in the wild.
"""
components = SplitPath(path)
for component in components:
if component in ('dist-packages', 'site-packages'):
return False
return True
class PythonPath(collections.Sequence):
"""An immutable set of Python path elements.
All paths represented in this structure are absolute. If a relative path
is passed into this structure, it will be converted to absolute based on
the current working directory (via os.path.abspath).
"""
def __init__(self, components=None):
"""Initializes a new PythonPath instance.
Args:
components (list): A list of path component strings.
"""
seen = set()
self._components = []
for component in (components or ()):
component = os.path.abspath(component)
assert isinstance(component, basestring), (
"Path component '%s' is not a string (%s)" % (
component, type(component).__name__))
if component in seen:
continue
seen.add(component)
self._components.append(component)
def __getitem__(self, value):
return self._components[value]
def __len__(self):
return len(self._components)
def __iadd__(self, other):
return self.Append(other)
def __repr__(self):
return self.pathstr
def __eq__(self, other):
assert isinstance(other, type(self))
return self._components == other._components
@classmethod
def Flatten(cls, *paths):
"""Returns (list): A single-level list containing flattened path elements.
>>> print PythonPath.Flatten('a', ['b', ['c', 'd']])
['a', 'b', 'c', 'd']
"""
result = []
for path in paths:
if not isinstance(path, basestring):
# Assume it's an iterable of paths.
result += cls.Flatten(*path)
else:
result.append(path)
return result
@classmethod
def FromPaths(cls, *paths):
"""Returns (PythonPath): A PythonPath instantiated from path elements.
Args:
paths (tuple): A tuple of path elements or iterables containing path
elements (e.g., PythonPath instances).
"""
return cls(cls.Flatten(*paths))
@classmethod
def FromPathStr(cls, pathstr):
"""Returns (PythonPath): A PythonPath instantiated from the path string.
Args:
pathstr (str): An os.pathsep()-delimited path string.
"""
return cls(pathstr.split(os.pathsep))
@property
def pathstr(self):
"""Returns (str): A path string for the instance's path elements."""
return os.pathsep.join(self)
def IsHermetic(self):
"""Returns (bool): True if this instance contains only system paths."""
return all(IsSystemPythonPath(p) for p in self)
def GetHermetic(self):
"""Returns (PythonPath): derivative PythonPath containing only system paths.
"""
return type(self).FromPaths(*(p for p in self if IsSystemPythonPath(p)))
def Append(self, *paths):
"""Returns (PythonPath): derivative PythonPath with paths added to the end.
Args:
paths (tuple): A tuple of path elements to append to the current instance.
"""
return type(self)(itertools.chain(self, self.FromPaths(*paths)))
def Override(self, *paths):
"""Returns (PythonPath): derivative PythonPath with paths prepended.
Args:
paths (tuple): A tuple of path elements to prepend to the current
instance.
"""
return self.FromPaths(*paths).Append(self)
def Install(self):
"""Overwrites Python runtime variables based on the current instance.
Performs the following operations:
- Replaces sys.path with the current instance's path.
- Replaces os.environ['PYTHONPATH'] with the current instance's path
string.
"""
sys.path = list(self)
SetPythonPathEnv(self.pathstr)
@contextlib.contextmanager
def Enter(self):
"""Context manager wrapper for Install.
On exit, the context manager will restore the original environment.
"""
orig_sys_path = sys.path[:]
orig_pythonpath = os.environ.get('PYTHONPATH')
try:
self.Install()
yield
finally:
sys.path = orig_sys_path
SetPythonPathEnv(orig_pythonpath)
def GetSysPythonPath(hermetic=True):
"""Returns (PythonPath): A path based on 'sys.path'.
Args:
hermetic (bool): If True, prune any non-system path.
"""
path = PythonPath.FromPaths(*sys.path)
if hermetic:
path = path.GetHermetic()
return path
def GetEnvPythonPath():
"""Returns (PythonPath): A path based on the PYTHONPATH environment variable.
"""
pythonpath = os.environ.get('PYTHONPATH')
if not pythonpath:
return PythonPath.FromPaths()
return PythonPath.FromPathStr(pythonpath)
def GetMasterPythonPath(master_dir):
"""Returns (PythonPath): A path including a BuildBot master's directory.
Args:
master_dir (str): The BuildBot master root directory.
"""
return PythonPath.FromPaths(master_dir)
def GetBuildPythonPath():
"""Returns (PythonPath): The Chrome Infra build path."""
build_path = PythonPath.FromPaths()
for extension_dir in (
Build,
BuildInternal,
):
if extension_dir:
build_path = ExtendPath(build_path, extension_dir)
return build_path
def GetInfraPythonPath(hermetic=True, master_dir=None):
"""Returns (PythonPath): The full working Chrome Infra utility path.
This path is consistent for master, slave, and tool usage. It includes (in
this order):
- Any environment PYTHONPATH overrides.
- If 'master_dir' is supplied, the master's python path component.
- The Chrome Infra build path.
- The system python path.
Args:
hermetic (bool): True, prune any non-system path from the system path.
master_dir (str): If not None, include a master path component.
"""
path = GetEnvPythonPath()
if master_dir:
path += GetMasterPythonPath(master_dir)
path += GetBuildPythonPath()
path += GetSysPythonPath(hermetic=hermetic)
return path
def _InfraPathFromArgs(args):
"""Returns (PythonPath): A PythonPath populated from command-line arguments.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
"""
return GetInfraPythonPath(
master_dir=args.master_dir,
)
def _Command_Echo(args, path):
"""Returns (int): Return code.
Command function for the 'echo' subcommand. Outputs the path string for
'path'.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
path (PythonPath): The python path to use.
"""
args.output.write(path.pathstr)
return 0
def _Command_Print(args, path):
"""Returns (int): Return code.
Command function for the 'print' subcommand. Outputs each path component in
path on a separate line.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
path (PythonPath): The python path to use.
"""
for component in path:
print >>args.output, component
return 0
def main():
"""Main execution function."""
parser = argparse.ArgumentParser()
parser.add_argument('-M', '--master_dir',
help="Augment the path with the master's directory.")
parser.add_argument('-o', '--output', metavar='PATH',
type=argparse.FileType('w'), default='-',
help="File to output to (use '-' for STDOUT).")
subparsers = parser.add_subparsers()
# 'echo'
subparser = subparsers.add_parser('echo')
subparser.set_defaults(func=_Command_Echo)
# 'print'
subparser = subparsers.add_parser('print')
subparser.set_defaults(func=_Command_Print)
# Parse
args = parser.parse_args()
# Execute our subcommand function, which will return the exit code.
path = _InfraPathFromArgs(args)
return args.func(args, path)
if __name__ == '__main__':
sys.exit(main())<|fim▁end|> | """
parts = [] |
<|file_name|>Asset.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package nl.hyranasoftware.githubupdater.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.Objects;
import org.joda.time.DateTime;
/**
*
* @author danny_000
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class Asset {
String url;
String browser_download_url;
int id;
String name;
String label;
String state;
String content_type;
long size;
long download_count;
DateTime created_at;
DateTime updated_at;
GithubUser uploader;
public Asset() {<|fim▁hole|> this.browser_download_url = browser_download_url;
this.id = id;
this.name = name;
this.label = label;
this.state = state;
this.content_type = content_type;
this.size = size;
this.download_count = download_count;
this.created_at = created_at;
this.updated_at = updated_at;
this.uploader = uploader;
}
public String getState() {
return state;
}
public String getUrl() {
return url;
}
public String getBrowser_download_url() {
return browser_download_url;
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public String getLabel() {
return label;
}
public String getContent_type() {
return content_type;
}
public long getSize() {
return size;
}
public long getDownload_count() {
return download_count;
}
public DateTime getCreated_at() {
return created_at;
}
public DateTime getUpdated_at() {
return updated_at;
}
public GithubUser getUploader() {
return uploader;
}
@Override
public int hashCode() {
int hash = 7;
hash = 79 * hash + Objects.hashCode(this.content_type);
hash = 79 * hash + (int) (this.download_count ^ (this.download_count >>> 32));
hash = 79 * hash + Objects.hashCode(this.created_at);
hash = 79 * hash + Objects.hashCode(this.updated_at);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Asset other = (Asset) obj;
if (this.id != other.id) {
return false;
}
if (!Objects.equals(this.name, other.name)) {
return false;
}
if (!Objects.equals(this.content_type, other.content_type)) {
return false;
}
return true;
}
@Override
public String toString(){
return this.name;
}
}<|fim▁end|> | }
public Asset(String url, String browser_download_url, int id, String name, String label, String state, String content_type, long size, long download_count, DateTime created_at, DateTime updated_at, GithubUser uploader) {
this.url = url; |
<|file_name|>infinite-scroll.d.ts<|end_file_name|><|fim▁begin|>import { ElementRef, EventEmitter, NgZone } from '@angular/core';
import { Content, ScrollEvent } from '../content/content';
import { DomController } from '../../platform/dom-controller';
/**
* @name InfiniteScroll
* @description
* The Infinite Scroll allows you to perform an action when the user
* scrolls a specified distance from the bottom of the page.
*
* The expression assigned to the `infinite` event is called when
* the user scrolls to the specified distance. When this expression
* has finished its tasks, it should call the `complete()` method
* on the infinite scroll instance.
*
* @usage
* ```html
* <ion-content>
*
* <ion-list>
* <ion-item *ngFor="let i of items">{% raw %}{{i}}{% endraw %}</ion-item>
* </ion-list>
*
* <ion-infinite-scroll (ionInfinite)="doInfinite($event)">
* <ion-infinite-scroll-content></ion-infinite-scroll-content>
* </ion-infinite-scroll>
*
* </ion-content>
* ```
*
* ```ts
* @Component({...})
* export class NewsFeedPage {
* items = [];
*
* constructor() {
* for (let i = 0; i < 30; i++) {
* this.items.push( this.items.length );
* }
* }
*
* doInfinite(infiniteScroll) {
* console.log('Begin async operation');
*
* setTimeout(() => {
* for (let i = 0; i < 30; i++) {
* this.items.push( this.items.length );
* }
*
* console.log('Async operation has ended');
* infiniteScroll.complete();
* }, 500);
* }
*
* }
* ```
*
*
* ## Infinite Scroll Content
*
* By default, Ionic uses the infinite scroll spinner that looks
* best for the platform the user is on. However, you can change the<|fim▁hole|> * ```html
* <ion-content>
*
* <ion-infinite-scroll (ionInfinite)="doInfinite($event)">
* <ion-infinite-scroll-content
* loadingSpinner="bubbles"
* loadingText="Loading more data...">
* </ion-infinite-scroll-content>
* </ion-infinite-scroll>
*
* </ion-content>
* ```
*
*
* ## Further Customizing Infinite Scroll Content
*
* The `ion-infinite-scroll` component holds the infinite scroll logic.
* It requires a child component in order to display the content.
* Ionic uses `ion-infinite-scroll-content` by default. This component
* displays the infinite scroll and changes the look depending
* on the infinite scroll's state. Separating these components allows
* developers to create their own infinite scroll content components.
* You could replace our default content with custom SVG or CSS animations.
*
* @demo /docs/v2/demos/src/infinite-scroll/
*
*/
export declare class InfiniteScroll {
private _content;
private _zone;
private _elementRef;
private _dom;
_lastCheck: number;
_highestY: number;
_scLsn: any;
_thr: string;
_thrPx: number;
_thrPc: number;
_init: boolean;
/**
* @internal
*/
state: string;
/**
* @input {string} The threshold distance from the bottom
* of the content to call the `infinite` output event when scrolled.
* The threshold value can be either a percent, or
* in pixels. For example, use the value of `10%` for the `infinite`
* output event to get called when the user has scrolled 10%
* from the bottom of the page. Use the value `100px` when the
* scroll is within 100 pixels from the bottom of the page.
* Default is `15%`.
*/
threshold: string;
/**
* @input {boolean} If true, Whether or not the infinite scroll should be
* enabled or not. Setting to `false` will remove scroll event listeners
* and hide the display.
*/
enabled: boolean;
/**
* @output {event} Emitted when the scroll reaches
* the threshold distance. From within your infinite handler,
* you must call the infinite scroll's `complete()` method when
* your async operation has completed.
*/
ionInfinite: EventEmitter<InfiniteScroll>;
constructor(_content: Content, _zone: NgZone, _elementRef: ElementRef, _dom: DomController);
_onScroll(ev: ScrollEvent): number;
/**
* Call `complete()` within the `infinite` output event handler when
* your async operation has completed. For example, the `loading`
* state is while the app is performing an asynchronous operation,
* such as receiving more data from an AJAX request to add more items
* to a data list. Once the data has been received and UI updated, you
* then call this method to signify that the loading has completed.
* This method will change the infinite scroll's state from `loading`
* to `enabled`.
*/
complete(): void;
/**
* Call `enable(false)` to disable the infinite scroll from actively
* trying to receive new data while scrolling. This method is useful
* when it is known that there is no more data that can be added, and
* the infinite scroll is no longer needed.
* @param {boolean} shouldEnable If the infinite scroll should be
* enabled or not. Setting to `false` will remove scroll event listeners
* and hide the display.
*/
enable(shouldEnable: boolean): void;
/**
* @private
*/
_setListeners(shouldListen: boolean): void;
/**
* @private
*/
ngAfterContentInit(): void;
/**
* @private
*/
ngOnDestroy(): void;
}<|fim▁end|> | * default spinner or add text by adding properties to the
* `ion-infinite-scroll-content` component.
* |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import {
Badge,
Box,
Flex,
Heading,
Stack,
Tab,
TabList,
TabPanel,
TabPanels,
Tabs
} from '@chakra-ui/react'
import useSWR from 'swr'
import {API} from 'lib/constants'
import JobDashboard from 'lib/components/admin-job-dashboard'
import TextLink from 'lib/components/text-link'
import WorkerList from 'lib/components/admin-worker-list'
import withAuth from 'lib/with-auth'
// Refresh every five seconds
const refreshInterval = 5000
// Fetcher
const fetcher = (url, user) =>
fetch(url, {
headers: {
Authorization: `bearer ${user.idToken}`,
'X-Conveyal-Access-Group': user.adminTempAccessGroup
}
}).then((res) => res.json())
export default withAuth(function AdminDashboard({user}) {
const jobRequest = useSWR([API.Jobs, user], fetcher, {refreshInterval})
const workerRequest = useSWR([API.Workers, user], fetcher, {refreshInterval})
const jobs = (jobRequest.data || []).filter((j) => j.graphId !== 'SUM')<|fim▁hole|> <Flex p={16}>
<Stack spacing={4} mr={10}>
<Heading>ADMIN</Heading>
<TextLink href='/'>Regions</TextLink>
<TextLink href='/admin/set-access-group'>Set access group</TextLink>
</Stack>
<Box flex='1'>
<Tabs isFitted>
<TabList>
<Tab>
Jobs{' '}
<Badge ml='2' rounded='full'>
{jobs.length}
</Badge>
</Tab>
<Tab>
Workers{' '}
<Badge ml='2' rounded='full'>
{workers.length}
</Badge>
</Tab>
</TabList>
<TabPanels>
<TabPanel p={0}>
<JobDashboard jobs={jobs} workers={workers} />
</TabPanel>
<TabPanel p={0}>
<WorkerList workers={workers} />
</TabPanel>
</TabPanels>
</Tabs>
</Box>
</Flex>
)
})<|fim▁end|> | const workers = workerRequest.data || []
return ( |
<|file_name|>verlet_nvt_andersen.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright © 2008-2012 Peter Colberg and Felix Höfling
*
* This file is part of HALMD.
*
* HALMD is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <algorithm>
#include <boost/bind.hpp>
#include <cmath>
#include <memory>
#include <halmd/mdsim/host/integrators/verlet_nvt_andersen.hpp>
#include <halmd/utility/lua/lua.hpp>
namespace halmd {
namespace mdsim {
namespace host {
namespace integrators {
template <int dimension, typename float_type>
verlet_nvt_andersen<dimension, float_type>::verlet_nvt_andersen(
std::shared_ptr<particle_type> particle
, std::shared_ptr<box_type const> box
, std::shared_ptr<random_type> random
, float_type timestep
, float_type temperature
, float_type coll_rate
, std::shared_ptr<logger> logger
)
: particle_(particle)
, box_(box)
, random_(random)
, coll_rate_(coll_rate)
, logger_(logger)
{
set_timestep(timestep);
set_temperature(temperature);
LOG("collision rate with heat bath: " << coll_rate_);
}
template <int dimension, typename float_type>
void verlet_nvt_andersen<dimension, float_type>::set_timestep(double timestep)
{
timestep_ = timestep;
timestep_half_ = 0.5 * timestep;
coll_prob_ = coll_rate_ * timestep;
}
template <int dimension, typename float_type>
void verlet_nvt_andersen<dimension, float_type>::set_temperature(double temperature)
{
temperature_ = temperature;
sqrt_temperature_ = std::sqrt(temperature_);
LOG("temperature of heat bath: " << temperature_);
}
template <int dimension, typename float_type>
void verlet_nvt_andersen<dimension, float_type>::integrate()
{
LOG_TRACE("update positions and velocities")
force_array_type const& force = read_cache(particle_->force());
mass_array_type const& mass = read_cache(particle_->mass());
size_type nparticle = particle_->nparticle();
// invalidate the particle caches after accessing the force!
auto position = make_cache_mutable(particle_->position());
auto image = make_cache_mutable(particle_->image());
auto velocity = make_cache_mutable(particle_->velocity());
scoped_timer_type timer(runtime_.integrate);
for (size_type i = 0; i < nparticle; ++i) {
vector_type& v = (*velocity)[i];
vector_type& r = (*position)[i];
v += force[i] * timestep_half_ / mass[i];
r += v * timestep_;
(*image)[i] += box_->reduce_periodic(r);
}
}
template <int dimension, typename float_type>
void verlet_nvt_andersen<dimension, float_type>::finalize()
{
LOG_TRACE("update velocities")
force_array_type const& force = read_cache(particle_->force());
mass_array_type const& mass = read_cache(particle_->mass());
size_type nparticle = particle_->nparticle();
// invalidate the particle caches after accessing the force!
auto velocity = make_cache_mutable(particle_->velocity());
scoped_timer_type timer(runtime_.finalize);
// cache random numbers
float_type rng_cache = 0;
bool rng_cache_valid = false;
// loop over all particles
for (size_type i = 0; i < nparticle; ++i) {
vector_type& v = (*velocity)[i];
// is deterministic step?
if (random_->uniform<float_type>() > coll_prob_) {
v += force[i] * timestep_half_ / mass[i];
}
// stochastic coupling with heat bath
else {
// assign two velocity components at a time
for (unsigned int i = 0; i < dimension - 1; i += 2) {
std::tie(v[i], v[i + 1]) = random_->normal(sqrt_temperature_);
}
// handle last component separately for odd dimensions
if (dimension % 2 == 1) {
if (rng_cache_valid) {
v[dimension - 1] = rng_cache;
}
else {
std::tie(v[dimension - 1], rng_cache) = random_->normal(sqrt_temperature_);
}
rng_cache_valid = !rng_cache_valid;
}
}
}
}
template <int dimension, typename float_type>
void verlet_nvt_andersen<dimension, float_type>::luaopen(lua_State* L)
{
using namespace luaponte;
module(L, "libhalmd")
[
namespace_("mdsim")
[
namespace_("integrators")
[
class_<verlet_nvt_andersen>()
.def("integrate", &verlet_nvt_andersen::integrate)
.def("finalize", &verlet_nvt_andersen::finalize)
.def("set_timestep", &verlet_nvt_andersen::set_timestep)
.def("set_temperature", &verlet_nvt_andersen::set_temperature)
.property("timestep", &verlet_nvt_andersen::timestep)
.property("temperature", &verlet_nvt_andersen::temperature)
.property("collision_rate", &verlet_nvt_andersen::collision_rate)
.scope
[
class_<runtime>()
.def_readonly("integrate", &runtime::integrate)
.def_readonly("finalize", &runtime::finalize)
]
.def_readonly("runtime", &verlet_nvt_andersen::runtime_)
, def("verlet_nvt_andersen", &std::make_shared<verlet_nvt_andersen
, std::shared_ptr<particle_type>
, std::shared_ptr<box_type const>
, std::shared_ptr<random_type>
, float_type
, float_type
, float_type
, std::shared_ptr<logger>
>)
]
]
];
}
HALMD_LUA_API int luaopen_libhalmd_mdsim_host_integrators_verlet_nvt_andersen(lua_State* L)
{
#ifndef USE_HOST_SINGLE_PRECISION
verlet_nvt_andersen<3, double>::luaopen(L);
verlet_nvt_andersen<2, double>::luaopen(L);
#else
verlet_nvt_andersen<3, float>::luaopen(L);
verlet_nvt_andersen<2, float>::luaopen(L);
#endif<|fim▁hole|> return 0;
}
// explicit instantiation
#ifndef USE_HOST_SINGLE_PRECISION
template class verlet_nvt_andersen<3, double>;
template class verlet_nvt_andersen<2, double>;
#else
template class verlet_nvt_andersen<3, float>;
template class verlet_nvt_andersen<2, float>;
#endif
} // namespace integrators
} // namespace host
} // namespace mdsim
} // namespace halmd<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
const Async = require('async');
const Boom = require('boom');
const DataRetrievalRouter = require('./DataRetrievalRouter');
const DENY = 0;
const PERMIT = 1;
const UNDETERMINED = 3;
const internals = {};
/**
* Evaluate a single Policy of PolicySet
*
**/
internals.evaluatePolicy = (item, dataRetriever, callback) => {
if (!item) {
return callback(Boom.badImplementation('RBAC configuration error: null item'));
}
if (!dataRetriever) {
return callback(Boom.badImplementation('RBAC configuration error: null data retriever'));
}
if (!(dataRetriever instanceof DataRetrievalRouter)) {
return callback(Boom.badImplementation('RBAC configuration error: invalid data retriever'));
}
if (!item.apply) {
// Default combinatory algorithm
item.apply = 'permit-overrides';
}
if (!(item.apply instanceof Function)) {
if (!internals.combineAlg[item.apply]) {
return callback(Boom.badImplementation('RBAC error: combinatory algorithm does not exist: ' + item.apply));
}
item.apply = internals.combineAlg[item.apply];
}
internals.evaluateTarget(item.target, dataRetriever, (err, applies) => {
if (err) {
return callback(err);
}
if (!applies) {
return callback(null, UNDETERMINED);
}
// Policy set
if (item.policies) {
return item.apply(item.policies, dataRetriever, internals.evaluatePolicy, callback);
}
// Policy
if (item.rules) {
return item.apply(item.rules, dataRetriever, internals.evaluateRule, callback);
}
// Rule
internals.evaluateRule(item, dataRetriever, callback);
});
};
const VALID_EFFECTS = ['permit', 'deny'];
/**
* Evaluate a single rule.
*
* {
* 'target': [...],
* 'effect': PERMIT, DENY
* }
**/
internals.evaluateRule = (rule, dataRetriever, callback) => {
if (!rule) {
return callback(Boom.badImplementation('RBAC rule is missing'));
}
if (!rule.effect) {
return callback(Boom.badImplementation('RBAC rule effect is missing'));
}
if (VALID_EFFECTS.indexOf(rule.effect) === -1) {
return callback(Boom.badImplementation('RBAC rule effect is invalid. Use one of', VALID_EFFECTS));
}
internals.evaluateTarget(rule.target, dataRetriever, (err, applies) => {
if (err) {
return callback(err);
}
if (!applies) {
return callback(null, UNDETERMINED);
}
switch (rule.effect) {
case 'permit':
case PERMIT:
return callback(null, PERMIT);
case 'deny':
case DENY:
return callback(null, DENY);
default:
return callback(Boom.badImplementation('RBAC rule error: invalid effect ' + rule.effect));
}
});
};
/**
* Evaluate a target
* The objects in the target array are matched with OR condition. The keys in an object are matched with AND condition.
*
* [
* {
* 'credentials:username': 'francisco', // AND
* 'credentials:group': 'admin'
* }, // OR
* {
* 'credentials:username': 'francisco', // AND
* 'credentials:group': 'writer'
* }<|fim▁hole|> * This target applies to francisco, if he is in the group admin or writer.
*
**/
internals.evaluateTarget = (target, dataRetriever, callback) => {
if (!target) {
// Applies by default, when no target is defined
return callback(null, true);
}
if (target instanceof Array) {
if (!target.length) {
return callback(Boom.badImplementation('RBAC target error: invalid format. The array in target should have at least one element.'));
}
}
else {
// Allow defining a single element in target without using an array
target = [target];
}
const tasks = [];
for (const index in target) {
const element = target[index];
tasks.push(internals.evaluateTargetElement(dataRetriever, element));
}
Async.parallel(tasks, (err, result) => {
if (err) {
return callback(err);
}
// At least one should apply (OR)
const applicables = result.filter((value) => value);
callback(null, applicables.length > 0);
});
};
internals.evaluateTargetElement = (dataRetriever, element) => {
return (callback) => {
const promises = Object.keys(element).map((key) => internals.evaluateTargetElementKey(dataRetriever, element, key));
Promise.all(promises)
.then((results) => {
// Should all apply (AND)
const nonApplicable = results.filter((value) => !value);
callback(null, nonApplicable.length === 0);
})
.catch((err) => callback(err))
};
};
/**
* If target is defined as:
* { field: "credentials:user" }
* then this definition should be replaced by
* a value from dataRetriever for matching.
*
* @param dataRetriever
* @param definedValue
* @returns Promise
**/
internals.getTargetValue = (dataRetriever, definedValue) => {
if(typeof definedValue === "object") {
if (definedValue.field) {
return dataRetriever.get(definedValue.field);
}
}
return Promise.resolve(definedValue);
};
internals.evaluateTargetElementKey = (dataRetriever, element, key) => {
return Promise.all([
internals.getTargetValue(dataRetriever, element[key]),
dataRetriever.get(key)
])
.then((results) => {
const targetValue = results[0];
const value = results[1];
return internals._targetApplies(targetValue, value);
});
};
/**
* If target has more than one value, all of them should match
**/
internals._targetApplies = (targets, values) => {
if (!Array.isArray(targets)) {
targets = [targets];
}
if (!Array.isArray(values)) {
values = [values];
}
// Should match all
// So: continue looping unless one doesn't
for (const index in targets) {
const target = targets[index];
const matches = values.filter((value) => {
if (target instanceof RegExp) {
return target.test(value);
}
return value === target;
});
if (matches.length === 0) {
return false;
}
}
// All targets are matched
return true;
};
/**
* Combinator algorithms:
*
* - permit-overrides - If at least one permit is evaluated, then permit
* - deny-overrides - If at least one deny is evaluated, then deny
* - only-one-applicable -
* - first-applicable - Only evaluate the first applicable rule
**/
internals.combineAlg = {};
internals.combineAlg['permit-overrides'] = (items, information, fn, callback) => {
if (!items || items.length === 0) {
return callback(null, UNDETERMINED);
}
const tasks = [];
for (let i = 0; i < items.length; ++i) {
tasks.push(fn.bind(null, items[i], information));
}
Async.parallel(tasks, (err, results) => {
if (err) {
return callback(err);
}
for (let i = 0; i < results.length; ++i) {
if (results[i] === PERMIT) {
return callback(null, PERMIT);
}
}
callback(null, DENY);
});
};
internals.combineAlg['deny-overrides'] = (items, information, fn, callback) => {
if (!items || items.length === 0) {
return callback(null, UNDETERMINED);
}
const tasks = [];
for (let i = 0; i < items.length; ++i) {
tasks.push(fn.bind(null, items[i], information));
}
Async.parallel(tasks, (err, results) => {
if (err) {
return callback(err);
}
for (let i = 0; i < results.length; ++i) {
if (results[i] === DENY) {
return callback(null, DENY);
}
}
callback(null, PERMIT);
});
};
exports = module.exports = {
evaluatePolicy: internals.evaluatePolicy,
evaluateRule: internals.evaluateRule,
evaluateTarget: internals.evaluateTarget,
DENY: DENY,
PERMIT: PERMIT,
UNDETERMINED: UNDETERMINED,
DataRetrievalRouter: DataRetrievalRouter
};<|fim▁end|> | * ]
* |
<|file_name|>ContractsAndGrantsBillingServiceImpl.java<|end_file_name|><|fim▁begin|>/*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.cg.service.impl;
import java.util.ArrayList;
import java.util.List;
import org.kuali.kfs.module.cg.CGPropertyConstants;
import org.kuali.kfs.module.cg.service.ContractsAndGrantsBillingService;
/**
* Service with methods related to the Contracts & Grants Billing (CGB) enhancement.
*/
public class ContractsAndGrantsBillingServiceImpl implements ContractsAndGrantsBillingService {
@Override
public List<String> getAgencyContractsGrantsBillingSectionIds() {
List<String> contractsGrantsSectionIds = new ArrayList<String>();
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AGENCY_ADDRESS_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AGENCY_ADDRESSES_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AGENCY_COLLECTIONS_MAINTENANCE_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AGENCY_CONTRACTS_AND_GRANTS_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AGENCY_CUSTOMER_SECTION_ID);
return contractsGrantsSectionIds;
}
@Override
public List<String> getAwardContractsGrantsBillingSectionIds() {
List<String> contractsGrantsSectionIds = new ArrayList<String>();
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AWARD_FUND_MANAGERS_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AWARD_INVOICING_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AWARD_MILESTONE_SCHEDULE_SECTION_ID);
contractsGrantsSectionIds.add(CGPropertyConstants.SectionId.AWARD_PREDETERMINED_BILLING_SCHEDULE_SECTION_ID);
return contractsGrantsSectionIds;
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>eew.rs<|end_file_name|><|fim▁begin|>use chrono::{DateTime, Utc};
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum IssuePattern { Cancel, IntensityOnly, LowAccuracy, HighAccuracy }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Source { Tokyo, Osaka }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Kind { Normal, Drill, Cancel, DrillCancel, Reference, Trial }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Status { Normal, Correction, CancelCorrection, LastWithCorrection, Last, Unknown }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum EpicenterAccuracy {
Single, Territory, GridSearchLow, GridSearchHigh,<|fim▁hole|>
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum DepthAccuracy {
Single, Territory, GridSearchLow, GridSearchHigh,
NIEDLow, NIEDHigh, EPOSLow, EPOSHigh, Unknown
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum MagnitudeAccuracy {
NIED, PWave, PSMixed, SWave, EPOS, Level, Unknown
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum EpicenterCategory { Land, Sea, Unknown }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum WarningStatus { Forecast, Alert, Unknown }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum IntensityChange { Same, Up, Down, Unknown }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum ChangeReason { Nothing, Magnitude, Epicenter, Mixed, Depth, Plum, Unknown }
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum WaveStatus { Unreached, Reached, Plum, Unknown }
#[derive(Ord, PartialOrd, Eq, PartialEq, Debug, Clone, Copy)]
pub enum IntensityClass {
Zero, One, Two, Three, Four, FiveLower, FiveUpper, SixLower, SixUpper, Seven
}
impl IntensityClass {
pub fn new(intensity: f32) -> IntensityClass
{
match intensity {
x if x < 0.5 => IntensityClass::Zero,
x if x < 1.5 => IntensityClass::One,
x if x < 2.5 => IntensityClass::Two,
x if x < 3.5 => IntensityClass::Three,
x if x < 4.5 => IntensityClass::Four,
x if x < 5.0 => IntensityClass::FiveLower,
x if x < 5.5 => IntensityClass::FiveUpper,
x if x < 6.0 => IntensityClass::SixLower,
x if x < 6.5 => IntensityClass::SixUpper,
_ => IntensityClass::Seven,
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct AreaEEW {
pub area_name: String,
pub minimum_intensity: IntensityClass,
pub maximum_intensity: Option<IntensityClass>,
pub reach_at: Option<DateTime<Utc>>,
pub warning_status: WarningStatus,
pub wave_status: WaveStatus,
}
#[derive(PartialEq, Debug, Clone)]
pub struct EEW {
pub issue_pattern: IssuePattern,
pub source: Source,
pub kind: Kind,
pub issued_at: DateTime<Utc>,
pub occurred_at: DateTime<Utc>,
pub id: String,
pub status: Status,
pub number: u32, // we don't accept an EEW which has no telegram number
pub detail: Option<EEWDetail>,
}
#[derive(PartialEq, Debug, Clone)]
pub struct EEWDetail {
pub epicenter_name: String,
pub epicenter: (f32, f32),
pub depth: Option<f32>,
pub magnitude: Option<f32>,
pub maximum_intensity: Option<IntensityClass>,
pub epicenter_accuracy: EpicenterAccuracy,
pub depth_accuracy: DepthAccuracy,
pub magnitude_accuracy: MagnitudeAccuracy,
pub epicenter_category: EpicenterCategory,
pub warning_status: WarningStatus,
pub intensity_change: IntensityChange,
pub change_reason: ChangeReason,
pub plum: bool,
pub area_info: Vec<AreaEEW>,
}<|fim▁end|> | NIEDLow, NIEDHigh, EPOSLow, EPOSHigh, Unknown
} |
<|file_name|>_2533BeritrasCurse.java<|end_file_name|><|fim▁begin|>/**
* This file is part of Aion-Lightning <aion-lightning.org>.
*
* Aion-Lightning is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Aion-Lightning is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details. *
*
* You should have received a copy of the GNU General Public License
* along with Aion-Lightning.
* If not, see <http://www.gnu.org/licenses/>.
*
*
* Credits goes to all Open Source Core Developer Groups listed below
* Please do not change here something, ragarding the developer credits, except the "developed by XXXX".
* Even if you edit a lot of files in this source, you still have no rights to call it as "your Core".
* Everybody knows that this Emulator Core was developed by Aion Lightning
* @-Aion-Unique-
* @-Aion-Lightning
* @Aion-Engine
* @Aion-Extreme
* @Aion-NextGen
* @Aion-Core Dev.
*/
package quest.beluslan;
import com.aionemu.gameserver.model.gameobjects.Item;
import com.aionemu.gameserver.model.gameobjects.Npc;
import com.aionemu.gameserver.model.gameobjects.player.Player;
import com.aionemu.gameserver.questEngine.handlers.HandlerResult;
import com.aionemu.gameserver.questEngine.handlers.QuestHandler;
import com.aionemu.gameserver.model.DialogAction;
import com.aionemu.gameserver.questEngine.model.QuestEnv;
import com.aionemu.gameserver.questEngine.model.QuestState;
import com.aionemu.gameserver.questEngine.model.QuestStatus;
import com.aionemu.gameserver.services.QuestService;
import com.aionemu.gameserver.world.zone.ZoneName;
/**
* @author Ritsu
*
*/
public class _2533BeritrasCurse extends QuestHandler {
private final static int questId = 2533;
public _2533BeritrasCurse() {
super(questId);
}
@Override
public void register() {
qe.registerQuestNpc(204801).addOnQuestStart(questId); //Gigrite
qe.registerQuestNpc(204801).addOnTalkEvent(questId);
qe.registerQuestItem(182204425, questId);//Empty Durable Potion Bottle
qe.registerOnQuestTimerEnd(questId);
}
@Override
public HandlerResult onItemUseEvent(final QuestEnv env, Item item) {
Player player = env.getPlayer();
QuestState qs = player.getQuestStateList().getQuestState(questId);
if (qs != null && qs.getStatus() == QuestStatus.START) {
if (player.isInsideZone(ZoneName.get("BERITRAS_WEAPON_220040000"))) {
QuestService.questTimerStart(env, 300);
return HandlerResult.fromBoolean(useQuestItem(env, item, 0, 1, false, 182204426, 1, 0));
}
}
return HandlerResult.SUCCESS; // ??
}
@Override
public boolean onDialogEvent(QuestEnv env) {
final Player player = env.getPlayer();
int targetId = 0;
if (env.getVisibleObject() instanceof Npc) {
targetId = ((Npc) env.getVisibleObject()).getNpcId();
}
final QuestState qs = player.getQuestStateList().getQuestState(questId);
DialogAction dialog = env.getDialog();
if (qs == null || qs.getStatus() == QuestStatus.NONE) {
if (targetId == 204801) {
if (dialog == DialogAction.QUEST_SELECT) {
return sendQuestDialog(env, 4762);
} else if (dialog == DialogAction.QUEST_ACCEPT_1) {
if (!giveQuestItem(env, 182204425, 1)) {
return true;
}
return sendQuestStartDialog(env);
} else {
return sendQuestStartDialog(env);
}
}
} else if (qs.getStatus() == QuestStatus.START) {
int var = qs.getQuestVarById(0);
if (targetId == 204801) {
switch (dialog) {
case QUEST_SELECT:
if (var == 1) {
qs.setStatus(QuestStatus.REWARD);
updateQuestStatus(env);
return sendQuestDialog(env, 1352);
}
case SELECT_QUEST_REWARD: {
QuestService.questTimerEnd(env);
return sendQuestDialog(env, 5);
}
}
}
} else if (qs.getStatus() == QuestStatus.REWARD) {
if (targetId == 204801) {
return sendQuestEndDialog(env);
}
}
return false;
}
@Override
public boolean onQuestTimerEndEvent(QuestEnv env) {
Player player = env.getPlayer();
QuestState qs = player.getQuestStateList().getQuestState(questId);
if (qs != null && qs.getStatus() == QuestStatus.START) {
removeQuestItem(env, 182204426, 1);
QuestService.abandonQuest(player, questId);
<|fim▁hole|> return true;
}
return false;
}
}<|fim▁end|> | player.getController().updateNearbyQuests();
|
<|file_name|>input.js<|end_file_name|><|fim▁begin|>(function(global) {
var LiteGraph = global.LiteGraph;
function GamepadInput() {
this.addOutput("left_x_axis", "number");
this.addOutput("left_y_axis", "number");
this.addOutput("button_pressed", LiteGraph.EVENT);
this.properties = { gamepad_index: 0, threshold: 0.1 };
this._left_axis = new Float32Array(2);
this._right_axis = new Float32Array(2);
this._triggers = new Float32Array(2);
this._previous_buttons = new Uint8Array(17);
this._current_buttons = new Uint8Array(17);
}
GamepadInput.title = "Gamepad";
GamepadInput.desc = "gets the input of the gamepad";
GamepadInput.CENTER = 0;
GamepadInput.LEFT = 1;
GamepadInput.RIGHT = 2;
GamepadInput.UP = 4;
GamepadInput.DOWN = 8;
GamepadInput.zero = new Float32Array(2);
GamepadInput.buttons = [
"a",
"b",
"x",
"y",
"lb",
"rb",
"lt",
"rt",
"back",
"start",
"ls",
"rs",
"home"
];
GamepadInput.prototype.onExecute = function() {
//get gamepad
var gamepad = this.getGamepad();
var threshold = this.properties.threshold || 0.0;
if (gamepad) {
this._left_axis[0] =
Math.abs(gamepad.xbox.axes["lx"]) > threshold
? gamepad.xbox.axes["lx"]
: 0;
this._left_axis[1] =
Math.abs(gamepad.xbox.axes["ly"]) > threshold
? gamepad.xbox.axes["ly"]
: 0;
this._right_axis[0] =
Math.abs(gamepad.xbox.axes["rx"]) > threshold
? gamepad.xbox.axes["rx"]
: 0;
this._right_axis[1] =
Math.abs(gamepad.xbox.axes["ry"]) > threshold
? gamepad.xbox.axes["ry"]
: 0;
this._triggers[0] =
Math.abs(gamepad.xbox.axes["ltrigger"]) > threshold
? gamepad.xbox.axes["ltrigger"]
: 0;
this._triggers[1] =
<|fim▁hole|> Math.abs(gamepad.xbox.axes["rtrigger"]) > threshold
? gamepad.xbox.axes["rtrigger"]
: 0;
}
if (this.outputs) {
for (var i = 0; i < this.outputs.length; i++) {
var output = this.outputs[i];
if (!output.links || !output.links.length) {
continue;
}
var v = null;
if (gamepad) {
switch (output.name) {
case "left_axis":
v = this._left_axis;
break;
case "right_axis":
v = this._right_axis;
break;
case "left_x_axis":
v = this._left_axis[0];
break;
case "left_y_axis":
v = this._left_axis[1];
break;
case "right_x_axis":
v = this._right_axis[0];
break;
case "right_y_axis":
v = this._right_axis[1];
break;
case "trigger_left":
v = this._triggers[0];
break;
case "trigger_right":
v = this._triggers[1];
break;
case "a_button":
v = gamepad.xbox.buttons["a"] ? 1 : 0;
break;
case "b_button":
v = gamepad.xbox.buttons["b"] ? 1 : 0;
break;
case "x_button":
v = gamepad.xbox.buttons["x"] ? 1 : 0;
break;
case "y_button":
v = gamepad.xbox.buttons["y"] ? 1 : 0;
break;
case "lb_button":
v = gamepad.xbox.buttons["lb"] ? 1 : 0;
break;
case "rb_button":
v = gamepad.xbox.buttons["rb"] ? 1 : 0;
break;
case "ls_button":
v = gamepad.xbox.buttons["ls"] ? 1 : 0;
break;
case "rs_button":
v = gamepad.xbox.buttons["rs"] ? 1 : 0;
break;
case "hat_left":
v = gamepad.xbox.hatmap & GamepadInput.LEFT;
break;
case "hat_right":
v = gamepad.xbox.hatmap & GamepadInput.RIGHT;
break;
case "hat_up":
v = gamepad.xbox.hatmap & GamepadInput.UP;
break;
case "hat_down":
v = gamepad.xbox.hatmap & GamepadInput.DOWN;
break;
case "hat":
v = gamepad.xbox.hatmap;
break;
case "start_button":
v = gamepad.xbox.buttons["start"] ? 1 : 0;
break;
case "back_button":
v = gamepad.xbox.buttons["back"] ? 1 : 0;
break;
case "button_pressed":
for (
var j = 0;
j < this._current_buttons.length;
++j
) {
if (
this._current_buttons[j] &&
!this._previous_buttons[j]
) {
this.triggerSlot(
i,
GamepadInput.buttons[j]
);
}
}
break;
default:
break;
}
} else {
//if no gamepad is connected, output 0
switch (output.name) {
case "button_pressed":
break;
case "left_axis":
case "right_axis":
v = GamepadInput.zero;
break;
default:
v = 0;
}
}
this.setOutputData(i, v);
}
}
};
GamepadInput.mapping = {a:0,b:1,x:2,y:3,lb:4,rb:5,lt:6,rt:7,back:8,start:9,ls:10,rs:11 };
GamepadInput.mapping_array = ["a","b","x","y","lb","rb","lt","rt","back","start","ls","rs"];
GamepadInput.prototype.getGamepad = function() {
var getGamepads =
navigator.getGamepads ||
navigator.webkitGetGamepads ||
navigator.mozGetGamepads;
if (!getGamepads) {
return null;
}
var gamepads = getGamepads.call(navigator);
var gamepad = null;
this._previous_buttons.set(this._current_buttons);
//pick the first connected
for (var i = this.properties.gamepad_index; i < 4; i++) {
if (!gamepads[i]) {
continue;
}
gamepad = gamepads[i];
//xbox controller mapping
var xbox = this.xbox_mapping;
if (!xbox) {
xbox = this.xbox_mapping = {
axes: [],
buttons: {},
hat: "",
hatmap: GamepadInput.CENTER
};
}
xbox.axes["lx"] = gamepad.axes[0];
xbox.axes["ly"] = gamepad.axes[1];
xbox.axes["rx"] = gamepad.axes[2];
xbox.axes["ry"] = gamepad.axes[3];
xbox.axes["ltrigger"] = gamepad.buttons[6].value;
xbox.axes["rtrigger"] = gamepad.buttons[7].value;
xbox.hat = "";
xbox.hatmap = GamepadInput.CENTER;
for (var j = 0; j < gamepad.buttons.length; j++) {
this._current_buttons[j] = gamepad.buttons[j].pressed;
if(j < 12)
{
xbox.buttons[ GamepadInput.mapping_array[j] ] = gamepad.buttons[j].pressed;
if(gamepad.buttons[j].was_pressed)
this.trigger( GamepadInput.mapping_array[j] + "_button_event" );
}
else //mapping of XBOX
switch ( j ) //I use a switch to ensure that a player with another gamepad could play
{
case 12:
if (gamepad.buttons[j].pressed) {
xbox.hat += "up";
xbox.hatmap |= GamepadInput.UP;
}
break;
case 13:
if (gamepad.buttons[j].pressed) {
xbox.hat += "down";
xbox.hatmap |= GamepadInput.DOWN;
}
break;
case 14:
if (gamepad.buttons[j].pressed) {
xbox.hat += "left";
xbox.hatmap |= GamepadInput.LEFT;
}
break;
case 15:
if (gamepad.buttons[j].pressed) {
xbox.hat += "right";
xbox.hatmap |= GamepadInput.RIGHT;
}
break;
case 16:
xbox.buttons["home"] = gamepad.buttons[j].pressed;
break;
default:
}
}
gamepad.xbox = xbox;
return gamepad;
}
};
GamepadInput.prototype.onDrawBackground = function(ctx) {
if (this.flags.collapsed) {
return;
}
//render gamepad state?
var la = this._left_axis;
var ra = this._right_axis;
ctx.strokeStyle = "#88A";
ctx.strokeRect(
(la[0] + 1) * 0.5 * this.size[0] - 4,
(la[1] + 1) * 0.5 * this.size[1] - 4,
8,
8
);
ctx.strokeStyle = "#8A8";
ctx.strokeRect(
(ra[0] + 1) * 0.5 * this.size[0] - 4,
(ra[1] + 1) * 0.5 * this.size[1] - 4,
8,
8
);
var h = this.size[1] / this._current_buttons.length;
ctx.fillStyle = "#AEB";
for (var i = 0; i < this._current_buttons.length; ++i) {
if (this._current_buttons[i]) {
ctx.fillRect(0, h * i, 6, h);
}
}
};
GamepadInput.prototype.onGetOutputs = function() {
return [
["left_axis", "vec2"],
["right_axis", "vec2"],
["left_x_axis", "number"],
["left_y_axis", "number"],
["right_x_axis", "number"],
["right_y_axis", "number"],
["trigger_left", "number"],
["trigger_right", "number"],
["a_button", "number"],
["b_button", "number"],
["x_button", "number"],
["y_button", "number"],
["lb_button", "number"],
["rb_button", "number"],
["ls_button", "number"],
["rs_button", "number"],
["start_button", "number"],
["back_button", "number"],
["a_button_event", LiteGraph.EVENT ],
["b_button_event", LiteGraph.EVENT ],
["x_button_event", LiteGraph.EVENT ],
["y_button_event", LiteGraph.EVENT ],
["lb_button_event", LiteGraph.EVENT ],
["rb_button_event", LiteGraph.EVENT ],
["ls_button_event", LiteGraph.EVENT ],
["rs_button_event", LiteGraph.EVENT ],
["start_button_event", LiteGraph.EVENT ],
["back_button_event", LiteGraph.EVENT ],
["hat_left", "number"],
["hat_right", "number"],
["hat_up", "number"],
["hat_down", "number"],
["hat", "number"],
["button_pressed", LiteGraph.EVENT]
];
};
LiteGraph.registerNodeType("input/gamepad", GamepadInput);
})(this);<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be <|fim▁hole|>#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
import os
import gobject
import json
import shutil
import tarfile
from glue.paths import INSTALLER
from data.skarphed.Skarphed import AbstractInstaller, AbstractDestroyer
from glue.lng import _
from glue.paths import COREFILES
import logging
TARGETNAME = "Debian 7 / nginx"
EXTRA_PARAMS = {
'nginx.domain':(_('Domain'),_('example.org or leave empty')),
'nginx.subdomain':(_('Subdomain'),_('sub.example.org or leave empty')),
'nginx.port':(_('Port'),_('80'))
}
class Installer(AbstractInstaller):
def execute_installation(self):
os.mkdir(self.BUILDPATH)
p = os.path.dirname(os.path.realpath(__file__))
nginx_template = open(os.path.join(p,"nginx.conf"),"r").read()
nginx_domain = ""
domainlineterm = ""
if self.data['nginx.port'] == "":
self.data['nginx.port'] = "80"
if self.data['nginx.domain'] != "":
nginx_domain = "server_name "+self.data['nginx.domain']
self.domain = self.data['nginx.domain']
domainlineterm = ";"
nginx_subdomain = ""
if self.data['nginx.subdomain'] != "":
nginx_subdomain = "alias "+self.data['nginx.subdomain']
domainlineterm = ";"
nginxconf = nginx_template%{'port':self.data['nginx.port'],
'domain':nginx_domain,
'subdomain':nginx_subdomain,
'domainlineterm':domainlineterm}
nginxconfresult = open(os.path.join(self.BUILDPATH,"nginx.conf"),"w")
nginxconfresult.write(nginxconf)
nginxconfresult.close()
self.status = 10
gobject.idle_add(self.updated)
scv_config = {}
for key,val in self.data.items():
if key.startswith("core.") or key.startswith("db."):
if key == "db.name":
scv_config[key] = val+".fdb"
continue
scv_config[key] = val
scv_config_defaults = {
"core.session_duration":2,
"core.session_extend":1,
"core.cookielaw":1,
"core.debug":True
}
scv_config.update(scv_config_defaults)
jenc = json.JSONEncoder()
config_json = open(os.path.join(self.BUILDPATH,"config.json"),"w")
config_json.write(jenc.encode(scv_config))
config_json.close()
shutil.copyfile(os.path.join(p,"skarphed.conf"), os.path.join(self.BUILDPATH,"skarphed.conf"))
shutil.copyfile(os.path.join(p,"install.sh"), os.path.join(self.BUILDPATH,"install.sh"))
shutil.copyfile(os.path.join(p,"uwsgi.conf"), os.path.join(self.BUILDPATH,"uwsgi.conf"))
self.status = 30
gobject.idle_add(self.updated)
shutil.copytree(os.path.join(COREFILES,"web"), os.path.join(self.BUILDPATH, "web"))
shutil.copytree(os.path.join(COREFILES,"lib"), os.path.join(self.BUILDPATH,"lib"))
tar = tarfile.open(os.path.join(self.BUILDPATH,"scv_install.tar.gz"),"w:gz")
tar.add(os.path.join(self.BUILDPATH,"nginx.conf"))
tar.add(os.path.join(self.BUILDPATH,"uwsgi.conf"))
tar.add(os.path.join(self.BUILDPATH,"config.json"))
tar.add(os.path.join(self.BUILDPATH,"skarphed.conf"))
tar.add(os.path.join(self.BUILDPATH,"install.sh"))
tar.add(os.path.join(self.BUILDPATH,"web"))
tar.add(os.path.join(self.BUILDPATH,"lib"))
tar.close()
self.status = 45
gobject.idle_add(self.updated)
con = self.server.getSSH()
con_stdin, con_stdout, con_stderr = con.exec_command("mkdir /tmp/scvinst"+str(self.installationId))
self.status = 50
gobject.idle_add(self.updated)
con = self.server.getSSH()
ftp = con.open_sftp()
ftp.put(os.path.join(self.BUILDPATH,"scv_install.tar.gz"),"/tmp/scvinst"+str(self.installationId)+"/scv_install.tar.gz")
ftp.close()
self.status = 65
gobject.idle_add(self.updated)
con = self.server.getSSH()
con_stdin, con_stdout, con_stderr = con.exec_command("cd /tmp/scvinst"+str(self.installationId)+"; tar xvfz scv_install.tar.gz -C / ; chmod 755 install.sh ; ./install.sh ")
output = con_stdout.read()
logging.debug("SSH-outputlength: %d"%len(output))
logging.debug(output)
shutil.rmtree(self.BUILDPATH)
self.status = 100
gobject.idle_add(self.updated)
gobject.idle_add(self.addInstanceToServer)
class Destroyer(AbstractDestroyer):
def execute_destruction(self):
p = os.path.dirname(os.path.realpath(__file__))
server = self.instance.getServer()
self.status = 10
gobject.idle_add(self.updated)
con = server.getSSH()
ftp = con.open_sftp()
ftp.put(os.path.join(p,"teardown.sh"),"/tmp/teardown.sh")
ftp.close()
self.status = 30
gobject.idle_add(self.updated)
con = server.getSSH()
con_stdin, con_stdout, con_stderr = con.exec_command("cd /tmp/ ; chmod 755 teardown.sh ; ./teardown.sh %d "%self.instanceid)
logging.debug(con_stdout.read())
self.status = 100
gobject.idle_add(self.updated)
gobject.idle_add(self.updated)
gobject.idle_add(self.removeInstanceFromServer)<|fim▁end|> | # useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details. |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Pdfkit v0.10.0
// Project: http://pdfkit.org
// Definitions by: Eric Hillah <https://github.com/erichillah>
// Erik Berreßem <https://github.com/she11sh0cked>
// Jeroen Vervaeke <https://github.com/jeroenvervaeke/>
// Thales Agapito <https://github.com/thalesagapito/>
// Evgeny Baram <https://github.com/r4tz52/>
// BamButz <https://github.com/BamButz/>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference types="node" />
declare namespace PDFKit {
interface PDFGradient {
new (document: any): PDFGradient;
stop(pos: number, color?: string | PDFKit.PDFGradient, opacity?: number): PDFGradient;
embed(): void;
apply(): void;
}
interface PDFLinearGradient extends PDFGradient {
new (document: any, x1: number, y1: number, x2: number, y2: number): PDFLinearGradient;
shader(fn: () => any): any;
opacityGradient(): PDFLinearGradient;
}
interface PDFRadialGradient extends PDFGradient {
new (document: any, x1: number, y1: number, x2: number, y2: number): PDFRadialGradient;
shader(fn: () => any): any;
opacityGradient(): PDFRadialGradient;
}
}
declare namespace PDFKit.Mixins {
interface AnnotationOption {
Type?: string;
Rect?: any;
Border?: Array<number>;
SubType?: string;
Contents?: string;
Name?: string;
color?: string;
QuadPoints?: Array<number>;
A?: any;
B?: any;
C?: any;
L?: any;
DA?: string;
}
interface PDFAnnotation {
annotate(x: number, y: number, w: number, h: number, option: AnnotationOption): this;
note(x: number, y: number, w: number, h: number, content: string, option?: AnnotationOption): this;
goTo(x: number, y: number, w: number, h: number, name: string, options?: AnnotationOption): this;
link(x: number, y: number, w: number, h: number, url: string, option?: AnnotationOption): this;
highlight(x: number, y: number, w: number, h: number, option?: AnnotationOption): this;
underline(x: number, y: number, w: number, h: number, option?: AnnotationOption): this;
strike(x: number, y: number, w: number, h: number, option?: AnnotationOption): this;
lineAnnotation(x1: number, y1: number, x2: number, y2: number, option?: AnnotationOption): this;
rectAnnotation(x: number, y: number, w: number, h: number, option?: AnnotationOption): this;
ellipseAnnotation(x: number, y: number, w: number, h: number, option?: AnnotationOption): this;
textAnnotation(x: number, y: number, w: number, h: number, text: string, option?: AnnotationOption): this;
}
// The color forms accepted by PDFKit:
// example: "red" [R, G, B] [C, M, Y, K]
type ColorValue = string | PDFGradient | [number, number, number] | [number, number, number, number];
// The winding / filling rule accepted by PDFKit:
type RuleValue = 'even-odd' | 'evenodd' | 'non-zero' | 'nonzero';
// Text option opentype features as listed at https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist
type OpenTypeFeatures =
| 'aalt' | 'abvf' | 'abvm' | 'abvs' | 'afrc' | 'akhn' | 'blwf' | 'blwm' | 'blws' | 'calt' | 'case'
| 'cfar' | 'cjct' | 'clig' | 'cpct' | 'cpsp' | 'cswh' | 'curs' | 'cv01' | 'cv02' | 'cv03' | 'cv04'
| 'cv05' | 'cv06' | 'cv07' | 'cv08' | 'cv09' | 'cv10' | 'cv11' | 'cv12' | 'cv13' | 'cv14' | 'cv15'
| 'cv16' | 'cv17' | 'cv18' | 'cv19' | 'cv20' | 'cv21' | 'cv22' | 'cv23' | 'cv24' | 'cv25' | 'cv26'
| 'cv27' | 'cv28' | 'cv29' | 'cv30' | 'cv31' | 'cv32' | 'cv33' | 'cv34' | 'cv35' | 'cv36' | 'cv37'
| 'cv38' | 'cv39' | 'cv40' | 'cv41' | 'cv42' | 'cv43' | 'cv44' | 'cv45' | 'cv46' | 'cv47' | 'cv48'
| 'cv49' | 'cv50' | 'cv51' | 'cv52' | 'cv53' | 'cv54' | 'cv55' | 'cv56' | 'cv57' | 'cv58' | 'cv59'
| 'cv60' | 'cv61' | 'cv62' | 'cv63' | 'cv64' | 'cv65' | 'cv66' | 'cv67' | 'cv68' | 'cv69' | 'cv70'
| 'cv71' | 'cv72' | 'cv73' | 'cv74' | 'cv75' | 'cv76' | 'cv77' | 'cv78' | 'cv79' | 'cv80' | 'cv81'
| 'cv82' | 'cv83' | 'cv84' | 'cv85' | 'cv86' | 'cv87' | 'cv88' | 'cv89' | 'cv90' | 'cv91' | 'cv92'
| 'cv93' | 'cv94' | 'cv95' | 'cv96' | 'cv97' | 'cv98' | 'cv99' | 'c2pc' | 'c2sc' | 'dist' | 'ccmp'
| 'dlig' | 'dnom' | 'dtls' | 'expt' | 'falt' | 'fin2' | 'fin3' | 'fina' | 'flac' | 'frac' | 'fwid'
| 'half' | 'haln' | 'halt' | 'hist' | 'hkna' | 'hlig' | 'hngl' | 'hojo' | 'hwid' | 'init' | 'isol'
| 'ital' | 'jalt' | 'jp78' | 'jp83' | 'jp90' | 'jp04' | 'kern' | 'lfbd' | 'liga' | 'ljmo' | 'lnum'
| 'locl' | 'ltra' | 'ltrm' | 'mark' | 'med2' | 'medi' | 'mgrk' | 'mkmk' | 'mset' | 'nalt' | 'nlck'
| 'nukt' | 'numr' | 'onum' | 'opbd' | 'ordn' | 'ornm' | 'palt' | 'pcap' | 'pkna' | 'pnum' | 'pref'
| 'pres' | 'pstf' | 'psts' | 'pwid' | 'qwid' | 'rand' | 'rclt' | 'rkrf' | 'rlig' | 'rphf' | 'rtbd'
| 'rtla' | 'rtlm' | 'ruby' | 'rvrn' | 'salt' | 'sinf' | 'size' | 'smcp' | 'smpl' | 'ss01' | 'ss02'
| 'ss03' | 'ss04' | 'ss05' | 'ss06' | 'ss07' | 'ss08' | 'ss09' | 'ss10' | 'ss11' | 'ss12' | 'ss13'
| 'ss14' | 'ss15' | 'ss16' | 'ss17' | 'ss18' | 'ss19' | 'ss20' | 'ssty' | 'stch' | 'subs' | 'sups'
| 'swsh' | 'titl' | 'tjmo' | 'tnam' | 'tnum' | 'trad' | 'twid' | 'unic' | 'valt' | 'vatu' | 'vert'
| 'vhal' | 'vjmo' | 'vkna' | 'vkrn' | 'vpal' | 'vrt2' | 'vrtr' | 'zero';
interface PDFColor {
fillColor(color: ColorValue, opacity?: number): this;
strokeColor(color: ColorValue, opacity?: number): this;
opacity(opacity: number): this;
fillOpacity(opacity: number): this;
strokeOpacity(opacity: number): this;
linearGradient(x1: number, y1: number, x2: number, y2: number): PDFLinearGradient;
radialGradient(x1: number, y1: number, r1: number, x2: number, y2: number, r2: number): PDFRadialGradient;
}
type PDFFontSource = string | Buffer | Uint8Array | ArrayBuffer;
interface PDFFont {
font(buffer: Buffer): this;
font(src: string, family?: string, size?: number): this;
fontSize(size: number): this;
currentLineHeight(includeGap?: boolean): number;
registerFont(name: string, src?: PDFFontSource, family?: string): this;
}
interface ImageOption {
width?: number;
height?: number;
/** Scale percentage */
scale?: number;
/** Two elements array specifying dimensions(w,h) */
fit?: [number, number];
cover?: [number, number];
align?: 'center' | 'right';
valign?: 'center' | 'bottom';
link?: AnnotationOption;
goTo?: AnnotationOption;
destination?: string;
}
interface PDFImage {
/**
* Draw an image in PDFKit document.
*/
image(src: any, x?: number, y?: number, options?: ImageOption): this;
image(src: any, options?: ImageOption): this;
}
interface TextOptions {
/** Set to false to disable line wrapping all together */
lineBreak?: boolean;
/** The width that text should be wrapped to (by default, the page width minus the left and right margin) */
width?: number;
/** The maximum height that text should be clipped to */
height?: number;
/** The character to display at the end of the text when it is too long. Set to true to use the default character. */
ellipsis?: boolean | string;
/** the number of columns to flow the text into */
columns?: number;
/** the amount of space between each column (1/4 inch by default) */
columnGap?: number;
/** The amount in PDF points (72 per inch) to indent each paragraph of text */
indent?: number;
/** the amount of space between each paragraph of text */
paragraphGap?: number;
/** the amount of space between each line of text */
lineGap?: number;
/** the amount of space between each word in the text */
wordSpacing?: number;
/** the amount of space between each character in the text */
characterSpacing?: number;
/** whether to fill the text (true by default) */
fill?: boolean;
/** whether to stroke the text */
stroke?: boolean;
/** A URL to link this text to (shortcut to create an annotation) */
link?: string;
/** whether to underline the text */
underline?: boolean;
/** whether to strike out the text */
strike?: boolean;
/** whether the text segment will be followed immediately by another segment. Useful for changing styling in the middle of a paragraph. */
continued?: boolean;
/** whether to slant the text (angle in degrees or true) */
oblique?: boolean | number;
/** the alignment of the text (center, justify, left, right) */
//TODO check this
align?: 'center' | 'justify' | 'left' | 'right' | string;
/** the vertical alignment of the text with respect to its insertion point */
baseline?: number | 'svg-middle' | 'middle' | 'svg-central' | 'bottom' | 'ideographic' | 'alphabetic' | 'mathematical' | 'hanging' | 'top';
/** an array of OpenType feature tags to apply. If not provided, a set of defaults is used. */
features?: OpenTypeFeatures[];
}
interface PDFText {
lineGap(lineGap: number): this;
moveDown(line?: number): this;
moveUp(line?: number): this;
text(text: string, x?: number, y?: number, options?: TextOptions): this;
text(text: string, options?: TextOptions): this;
widthOfString(text: string, options?: TextOptions): number;
heightOfString(text: string, options?: TextOptions): number;
list(list: Array<string | any>, x?: number, y?: number, options?: TextOptions): this;
list(list: Array<string | any>, options?: TextOptions): this;
}
interface PDFVector {
save(): this;
restore(): this;
closePath(): this;
lineWidth(w: number): this;
lineCap(c: string): this;
lineJoin(j: string): this;
miterLimit(m: any): this;
dash(length: number, option: any): this;
undash(): this;
moveTo(x: number, y: number): this;
lineTo(x: number, y: number): this;
bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number): this;
quadraticCurveTo(cpx: number, cpy: number, x: number, y: number): this;
rect(x: number, y: number, w: number, h: number): this;
roundedRect(x: number, y: number, w: number, h: number, r?: number): this;
ellipse(x: number, y: number, r1: number, r2?: number): this;
circle(x: number, y: number, raduis: number): this;
polygon(...points: number[][]): this;
path(path: string): this;
fill(color?: ColorValue, rule?: RuleValue): this;
fill(rule: RuleValue): this;
stroke(color?: ColorValue): this;
fillAndStroke(fillColor?: ColorValue, strokeColor?: ColorValue, rule?: RuleValue): this;
fillAndStroke(fillColor: ColorValue, rule?: RuleValue): this;
fillAndStroke(rule: RuleValue): this;
clip(rule?: RuleValue): this;
transform(m11: number, m12: number, m21: number, m22: number, dx: number, dy: number): this;
translate(x: number, y: number): this;
rotate(angle: number, options?: { origin?: number[] }): this;
scale(xFactor: number, yFactor?: number, options?: { origin?: number[] }): this;
}
}
declare namespace PDFKit {
/**
* PDFKit data
*/
interface PDFData {
new (data: any[]): PDFData;
readByte(): any;
writeByte(byte: any): void;
byteAt(index: number): any;
readBool(): boolean;
writeBool(val: boolean): boolean;
readUInt32(): number;
writeUInt32(val: number): void;
readInt32(): number;
writeInt32(val: number): void;
readUInt16(): number;<|fim▁hole|> readString(length: number): string;
writeString(val: string): void;
stringAt(pos: number, length: number): string;
readShort(): number;
writeShort(val: number): void;
readLongLong(): number;
writeLongLong(val: number): void;
readInt(): number;
writeInt(val: number): void;
slice(start: number, end: number): any[];
read(length: number): any[];
write(bytes: any[]): void;
}
}
declare module 'pdfkit/js/data' {
var PDFKitData: PDFKit.PDFData;
export = PDFKitData;
}
declare namespace PDFKit {
interface DocumentInfo {
Producer?: string;
Creator?: string;
CreationDate?: Date;
Title?: string;
Author?: string;
Keywords?: string;
ModDate?: Date;
}
interface DocumentPermissions {
modifying?: boolean;
copying?: boolean;
annotating?: boolean;
fillingForms?: boolean;
contentAccessibility?: boolean;
documentAssembly?: boolean;
printing?: 'lowResolution' | 'highResolution';
}
interface PDFDocumentOptions {
compress?: boolean;
info?: DocumentInfo;
userPassword?: string;
ownerPassword?: string;
permissions?: DocumentPermissions;
pdfVersion?: '1.3' | '1.4' | '1.5' | '1.6' | '1.7' | '1.7ext3';
autoFirstPage?: boolean;
size?: number[] | string;
margin?: number;
margins?: { top: number; left: number; bottom: number; right: number };
layout?: 'portrait' | 'landscape';
bufferPages?: boolean;
}
interface PDFDocument
extends NodeJS.ReadableStream,
Mixins.PDFAnnotation,
Mixins.PDFColor,
Mixins.PDFImage,
Mixins.PDFText,
Mixins.PDFVector,
Mixins.PDFFont {
/**
* PDF Version
*/
version: number;
/**
* Wheter streams should be compressed
*/
compress: boolean;
/**
* PDF document Metadata
*/
info: DocumentInfo;
/**
* Options for the document
*/
options: PDFDocumentOptions;
/**
* Represent the current page.
*/
page: PDFPage;
x: number;
y: number;
new (options?: PDFDocumentOptions): PDFDocument;
addPage(options?: PDFDocumentOptions): PDFDocument;
bufferedPageRange(): { start: number; count: number };
switchToPage(n?: number): PDFPage;
flushPages(): void;
ref(data: {}): PDFKitReference;
addContent(data: any): PDFDocument;
/**
* Deprecated
*/
write(fileName: string, fn: any): void;
/**
* Deprecated. Throws exception
*/
output(fn: any): void;
end(): void;
toString(): string;
}
}
declare module 'pdfkit' {
var doc: PDFKit.PDFDocument;
export = doc;
}
declare module 'pdfkit/js/gradient' {
var gradient: {
PDFGradient: PDFKit.PDFGradient;
PDFLinearGradient: PDFKit.PDFLinearGradient;
PDFRadialGradiant: PDFKit.PDFRadialGradient;
};
export = gradient;
}
declare namespace PDFKit {
/**
* Represent a single page in the PDF document
*/
interface PDFPage {
size: string;
layout: string;
margins: { top: number; left: number; bottom: number; right: number };
width: number;
height: number;
document: PDFDocument;
content: PDFKitReference;
/**
* The page dictionnary
*/
dictionary: PDFKitReference;
fonts: any;
xobjects: any;
ext_gstates: any;
patterns: any;
annotations: any;
maxY(): number;
write(chunk: any): void;
end(): void;
}
}
declare module 'pdfkit/js/page' {
var PDFKitPage: PDFKit.PDFPage;
export = PDFKitPage;
}
declare namespace PDFKit {
/** PDFReference - represents a reference to another object in the PDF object heirarchy */
class PDFKitReference {
id: number;
gen: number;
deflate: any;
compress: boolean;
uncompressedLength: number;
chunks: any[];
data: { Font?: any; XObject?: any; ExtGState?: any; Pattern: any; Annots: any };
document: PDFDocument;
constructor(document: PDFDocument, id: number, data: {});
initDeflate(): void;
write(chunk: any): void;
end(chunk: any): void;
finalize(): void;
toString(): string;
}
}
declare module 'pdfkit/js/reference' {
var PDFKitReference: PDFKit.PDFKitReference;
export = PDFKitReference;
}
declare module 'pdfkit/js/mixins/annotations' {
var PDFKitAnnotation: PDFKit.Mixins.PDFAnnotation;
export = PDFKitAnnotation;
}
declare module 'pdfkit/js/mixins/color' {
var PDFKitColor: PDFKit.Mixins.PDFColor;
export = PDFKitColor;
}
declare module 'pdfkit/js/mixins/fonts' {
var PDFKitFont: PDFKit.Mixins.PDFFont;
export = PDFKitFont;
}
declare module 'pdfkit/js/mixins/images' {
var PDFKitImage: PDFKit.Mixins.PDFImage;
export = PDFKitImage;
}
declare module 'pdfkit/js/mixins/text' {
var PDFKitText: PDFKit.Mixins.PDFText;
export = PDFKitText;
}
declare module 'pdfkit/js/mixins/vector' {
var PDFKitVector: PDFKit.Mixins.PDFVector;
export = PDFKitVector;
}<|fim▁end|> | writeUInt16(val: number): void;
readInt16(): number;
writeInt16(val: number): void; |
<|file_name|>storage_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use net_traits::storage_thread::{StorageThreadMsg, StorageType};
use resource_thread;
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::path::PathBuf;
use util::thread::spawn_named;
const QUOTA_SIZE_LIMIT: usize = 5 * 1024 * 1024;
pub trait StorageThreadFactory {
fn new(config_dir: Option<PathBuf>) -> Self;
}
impl StorageThreadFactory for IpcSender<StorageThreadMsg> {
/// Create a storage thread
fn new(config_dir: Option<PathBuf>) -> IpcSender<StorageThreadMsg> {
let (chan, port) = ipc::channel().unwrap();
spawn_named("StorageManager".to_owned(), move || {
StorageManager::new(port, config_dir).start();
});
chan
}
}
struct StorageManager {
port: IpcReceiver<StorageThreadMsg>,
session_data: HashMap<String, (usize, BTreeMap<String, String>)>,
local_data: HashMap<String, (usize, BTreeMap<String, String>)>,
config_dir: Option<PathBuf>,
}
impl StorageManager {
fn new(port: IpcReceiver<StorageThreadMsg>,
config_dir: Option<PathBuf>)
-> StorageManager {
let mut local_data = HashMap::new();
if let Some(ref config_dir) = config_dir {
resource_thread::read_json_from_file(&mut local_data, config_dir, "local_data.json");
}
StorageManager {
port: port,
session_data: HashMap::new(),
local_data: local_data,
config_dir: config_dir,
}
}
}
impl StorageManager {
fn start(&mut self) {
loop {
match self.port.recv().unwrap() {
StorageThreadMsg::Length(sender, url, storage_type) => {
self.length(sender, url, storage_type)
}
StorageThreadMsg::Key(sender, url, storage_type, index) => {
self.key(sender, url, storage_type, index)
}
StorageThreadMsg::Keys(sender, url, storage_type) => {
self.keys(sender, url, storage_type)
}
StorageThreadMsg::SetItem(sender, url, storage_type, name, value) => {
self.set_item(sender, url, storage_type, name, value)
}
StorageThreadMsg::GetItem(sender, url, storage_type, name) => {
self.request_item(sender, url, storage_type, name)
}
StorageThreadMsg::RemoveItem(sender, url, storage_type, name) => {
self.remove_item(sender, url, storage_type, name)
}
StorageThreadMsg::Clear(sender, url, storage_type) => {
self.clear(sender, url, storage_type)
}
StorageThreadMsg::Exit(sender) => {
if let Some(ref config_dir) = self.config_dir {
resource_thread::write_json_to_file(&self.local_data, config_dir, "local_data.json");
}
let _ = sender.send(());
break
}
}
}
}
fn select_data(&self, storage_type: StorageType)
-> &HashMap<String, (usize, BTreeMap<String, String>)> {
match storage_type {
StorageType::Session => &self.session_data,
StorageType::Local => &self.local_data
}
}
fn select_data_mut(&mut self, storage_type: StorageType)
-> &mut HashMap<String, (usize, BTreeMap<String, String>)> {
match storage_type {
StorageType::Session => &mut self.session_data,
StorageType::Local => &mut self.local_data
}
}
fn length(&self, sender: IpcSender<usize>, url: ServoUrl, storage_type: StorageType) {
let origin = self.origin_as_string(url);
let data = self.select_data(storage_type);
sender.send(data.get(&origin).map_or(0, |&(_, ref entry)| entry.len())).unwrap();
}
fn key(&self,
sender: IpcSender<Option<String>>,
url: ServoUrl,
storage_type: StorageType,
index: u32) {
let origin = self.origin_as_string(url);
let data = self.select_data(storage_type);
let key = data.get(&origin)
.and_then(|&(_, ref entry)| entry.keys().nth(index as usize))
.cloned();
sender.send(key).unwrap();
}
fn keys(&self,
sender: IpcSender<Vec<String>>,
url: ServoUrl,
storage_type: StorageType) {
let origin = self.origin_as_string(url);
let data = self.select_data(storage_type);
let keys = data.get(&origin)
.map_or(vec![], |&(_, ref entry)| entry.keys().cloned().collect());
sender.send(keys).unwrap();
}
/// Sends Ok(changed, Some(old_value)) in case there was a previous
/// value with the same key name but with different value name
/// otherwise sends Err(()) to indicate that the operation would result in
/// exceeding the quota limit
fn set_item(&mut self,
sender: IpcSender<Result<(bool, Option<String>), ()>>,
url: ServoUrl,
storage_type: StorageType,
name: String,
value: String) {
let origin = self.origin_as_string(url);
let (this_storage_size, other_storage_size) = {
let local_data = self.select_data(StorageType::Local);
let session_data = self.select_data(StorageType::Session);
let local_data_size = local_data.get(&origin).map_or(0, |&(total, _)| total);
let session_data_size = session_data.get(&origin).map_or(0, |&(total, _)| total);
match storage_type {
StorageType::Local => (local_data_size, session_data_size),
StorageType::Session => (session_data_size, local_data_size),
}
};
let data = self.select_data_mut(storage_type);
if !data.contains_key(&origin) {
data.insert(origin.clone(), (0, BTreeMap::new()));
}
let message = data.get_mut(&origin).map(|&mut (ref mut total, ref mut entry)| {
let mut new_total_size = this_storage_size + value.as_bytes().len();
if let Some(old_value) = entry.get(&name) {
new_total_size -= old_value.as_bytes().len();
} else {
new_total_size += name.as_bytes().len();
}
if (new_total_size + other_storage_size) > QUOTA_SIZE_LIMIT {
return Err(());
}
let message = entry.insert(name.clone(), value.clone()).map_or(
Ok((true, None)),
|old| if old == value {
Ok((false, None))
} else {
Ok((true, Some(old)))
});
*total = new_total_size;
message
}).unwrap();<|fim▁hole|> sender.send(message).unwrap();
}
fn request_item(&self,
sender: IpcSender<Option<String>>,
url: ServoUrl,
storage_type: StorageType,
name: String) {
let origin = self.origin_as_string(url);
let data = self.select_data(storage_type);
sender.send(data.get(&origin)
.and_then(|&(_, ref entry)| entry.get(&name))
.map(String::clone)).unwrap();
}
/// Sends Some(old_value) in case there was a previous value with the key name, otherwise sends None
fn remove_item(&mut self,
sender: IpcSender<Option<String>>,
url: ServoUrl,
storage_type: StorageType,
name: String) {
let origin = self.origin_as_string(url);
let data = self.select_data_mut(storage_type);
let old_value = data.get_mut(&origin).and_then(|&mut (ref mut total, ref mut entry)| {
entry.remove(&name).and_then(|old| {
*total -= name.as_bytes().len() + old.as_bytes().len();
Some(old)
})
});
sender.send(old_value).unwrap();
}
fn clear(&mut self, sender: IpcSender<bool>, url: ServoUrl, storage_type: StorageType) {
let origin = self.origin_as_string(url);
let data = self.select_data_mut(storage_type);
sender.send(data.get_mut(&origin)
.map_or(false, |&mut (ref mut total, ref mut entry)| {
if !entry.is_empty() {
entry.clear();
*total = 0;
true
} else {
false
}})).unwrap();
}
fn origin_as_string(&self, url: ServoUrl) -> String {
url.origin().ascii_serialization()
}
}<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""URLs to run the tests."""<|fim▁hole|>try:
from django.urls import include
except ImportError:
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = (
url(r'^admin/', admin.site.urls),
url(r'^status', include('server_status.urls')),
)<|fim▁end|> | |
<|file_name|>get_binary.py<|end_file_name|><|fim▁begin|>"""
CMSIS-DAP Interface Firmware
Copyright (c) 2009-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Extract and patch the interface without bootloader
"""
from options import get_options
from paths import get_interface_path, TMP_DIR
from utils import gen_binary, is_lpc, split_path
from os.path import join
<|fim▁hole|> in_path = get_interface_path(options.interface, options.target, bootloader=False)
_, name, _ = split_path(in_path)
out_path = join(TMP_DIR, name + '.bin')
print '\nELF: %s' % in_path
gen_binary(in_path, out_path, is_lpc(options.interface))
print "\nBINARY: %s" % out_path<|fim▁end|> | if __name__ == '__main__':
options = get_options()
|
<|file_name|>async-fn.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>// FIXME: once `--edition` is stable in rustdoc, remove that `compile-flags` directive
#![feature(async_await, futures_api)]
// @has async_fn/struct.S.html
// @has - '//code' 'pub async fn f()'
pub struct S;
impl S {
pub async fn f() {}
}<|fim▁end|> | // edition:2018
// compile-flags:-Z unstable-options
|
<|file_name|>util.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
import json
import logging
import os
import random
import re
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was returned or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, rpchost=None):
rpc_u, rpc_p = get_auth_cookie(datadir, i)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node" + str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("listenonion=0\n")
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def get_auth_cookie(datadir, n):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]<|fim▁hole|> return user, password
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
for _ in range(50):
if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
break
time.sleep(0.1)
else:
raise AssertionError("timed out waiting for disconnect")
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]] * len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match + 1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
# Transaction/Block functions
#############################
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >= 0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5 * count) + 101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{"txid": coinbase, "vout": 0}]
outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)<|fim▁end|> | if user is None or password is None:
raise ValueError("No RPC credentials") |
<|file_name|>git.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Git tools."""
from shlex import split
from plumbum import ProcessExecutionError
from plumbum.cmd import git
DEVELOPMENT_BRANCH = "develop"
<|fim▁hole|> if not quiet:
print("{}{}".format("[DRY-RUN] " if dry_run else "", command))
if dry_run:
return ""
rv = command()
if not quiet and rv:
print(rv)
return rv
def branch_exists(branch):
"""Return True if the branch exists."""
try:
run_git("rev-parse --verify {}".format(branch), quiet=True)
return True
except ProcessExecutionError:
return False
def get_current_branch():
"""Get the current branch name."""
return run_git("rev-parse --abbrev-ref HEAD", quiet=True).strip()<|fim▁end|> | def run_git(*args, dry_run=False, quiet=False):
"""Run a git command, print it before executing and capture the output."""
command = git[split(" ".join(args))] |
<|file_name|>perf_profiler.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import re
import signal
import subprocess
import sys
import tempfile
<|fim▁hole|>from telemetry.core import platform
from telemetry.core import util
from telemetry.core.platform import profiler
from telemetry.core.platform.profiler import android_profiling_helper
from telemetry.util import support_binaries
util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android')
from pylib.perf import perf_control # pylint: disable=F0401
_PERF_OPTIONS = [
# In perf 3.13 --call-graph requires an argument, so use the -g short-hand
# which does not.
'-g',
# Increase sampling frequency for better coverage.
'--freq', '2000',
]
_PERF_OPTIONS_ANDROID = [
# Increase priority to avoid dropping samples. Requires root.
'--realtime', '80',
]
def _NicePath(path):
rel_path = os.path.relpath(path, os.curdir)
return rel_path if len(rel_path) < len(path) else path
def _PrepareHostForPerf():
kptr_file = '/proc/sys/kernel/kptr_restrict'
with open(kptr_file) as f:
if f.read().strip() != '0':
logging.warning('Making kernel symbols unrestricted. You might have to '
'enter your password for "sudo".')
with tempfile.NamedTemporaryFile() as zero:
zero.write('0')
zero.flush()
subprocess.call(['sudo', 'cp', zero.name, kptr_file])
def _InstallPerfHost():
host = platform.GetHostPlatform()
if not host.CanLaunchApplication('perfhost'):
host.InstallApplication('perfhost')
return support_binaries.FindPath('perfhost', host.GetOSName())
class _SingleProcessPerfProfiler(object):
"""An internal class for using perf for a given process.
On android, this profiler uses pre-built binaries from AOSP.
See more details in prebuilt/android/README.txt.
"""
def __init__(self, pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary):
self._pid = pid
self._browser_backend = browser_backend
self._platform_backend = platform_backend
self._output_file = output_file
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._is_android = platform_backend.GetOSName() == 'android'
self._perfhost_binary = perfhost_binary
cmd_prefix = []
perf_args = ['record', '--pid', str(pid)]
if self._is_android:
cmd_prefix = ['adb', '-s', browser_backend.adb.device_serial(), 'shell',
perf_binary]
perf_args += _PERF_OPTIONS_ANDROID
output_file = os.path.join('/sdcard', 'perf_profiles',
os.path.basename(output_file))
self._device_output_file = output_file
browser_backend.adb.RunShellCommand(
'mkdir -p ' + os.path.dirname(self._device_output_file))
browser_backend.adb.RunShellCommand('rm -f ' + self._device_output_file)
else:
cmd_prefix = [perf_binary]
perf_args += ['--output', output_file] + _PERF_OPTIONS
self._proc = subprocess.Popen(cmd_prefix + perf_args,
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
def CollectProfile(self):
if ('renderer' in self._output_file and
not self._is_android and
not self._platform_backend.GetCommandLine(self._pid)):
logging.warning('Renderer was swapped out during profiling. '
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
if self._is_android:
device = self._browser_backend.adb.device()
perf_pids = device.old_interface.ExtractPid('perf')
device.RunShellCommand('kill -SIGINT ' + ' '.join(perf_pids))
util.WaitFor(lambda: not device.old_interface.ExtractPid('perf'),
timeout=2)
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code == 128:
raise Exception(
"""perf failed with exit code 128.
Try rerunning this script under sudo or setting
/proc/sys/kernel/perf_event_paranoid to "-1".\nOutput:\n%s""" %
self._GetStdOut())
elif exit_code not in (0, -2):
raise Exception(
'perf failed with exit code %d. Output:\n%s' % (exit_code,
self._GetStdOut()))
finally:
self._tmp_output_file.close()
cmd = '%s report -n -i %s' % (_NicePath(self._perfhost_binary),
self._output_file)
if self._is_android:
device = self._browser_backend.adb.device()
device.old_interface.Adb().Pull(self._device_output_file,
self._output_file)
required_libs = \
android_profiling_helper.GetRequiredLibrariesForPerfProfile(
self._output_file)
symfs_root = os.path.dirname(self._output_file)
kallsyms = android_profiling_helper.CreateSymFs(device,
symfs_root,
required_libs,
use_symlinks=True)
cmd += ' --symfs %s --kallsyms %s' % (symfs_root, kallsyms)
for lib in required_libs:
lib = os.path.join(symfs_root, lib[1:])
if not os.path.exists(lib):
continue
objdump_path = android_profiling_helper.GetToolchainBinaryPath(
lib, 'objdump')
if objdump_path:
cmd += ' --objdump %s' % _NicePath(objdump_path)
break
print 'To view the profile, run:'
print ' ', cmd
return self._output_file
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class PerfProfiler(profiler.Profiler):
def __init__(self, browser_backend, platform_backend, output_path, state):
super(PerfProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
process_output_file_map = self._GetProcessOutputFileMap()
self._process_profilers = []
self._is_android = platform_backend.GetOSName() == 'android'
perf_binary = perfhost_binary = _InstallPerfHost()
try:
if self._is_android:
device = browser_backend.adb.device()
perf_binary = android_profiling_helper.PrepareDeviceForPerf(device)
self._perf_control = perf_control.PerfControl(device)
self._perf_control.SetPerfProfilingMode()
else:
_PrepareHostForPerf()
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
continue
self._process_profilers.append(
_SingleProcessPerfProfiler(
pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary))
except:
if self._is_android:
self._perf_control.SetDefaultPerfMode()
raise
@classmethod
def name(cls):
return 'perf'
@classmethod
def is_supported(cls, browser_type):
if sys.platform != 'linux2':
return False
if browser_type.startswith('cros'):
return False
return True
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
options.AppendExtraBrowserArgs([
'--no-sandbox',
'--allow-sandbox-debugging',
])
def CollectProfile(self):
if self._is_android:
self._perf_control.SetDefaultPerfMode()
output_files = []
for single_process in self._process_profilers:
output_files.append(single_process.CollectProfile())
return output_files
@classmethod
def GetTopSamples(cls, file_name, number):
"""Parses the perf generated profile in |file_name| and returns a
{function: period} dict of the |number| hottests functions.
"""
assert os.path.exists(file_name)
with open(os.devnull, 'w') as devnull:
_InstallPerfHost()
report = subprocess.Popen(
['perfhost', 'report', '--show-total-period', '-U', '-t', '^', '-i',
file_name],
stdout=subprocess.PIPE, stderr=devnull).communicate()[0]
period_by_function = {}
for line in report.split('\n'):
if not line or line.startswith('#'):
continue
fields = line.split('^')
if len(fields) != 5:
continue
period = int(fields[1])
function = fields[4].partition(' ')[2]
function = re.sub('<.*>', '', function) # Strip template params.
function = re.sub('[(].*[)]', '', function) # Strip function params.
period_by_function[function] = period
if len(period_by_function) == number:
break
return period_by_function<|fim▁end|> | |
<|file_name|>gae_models.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for an Oppia exploration."""
import datetime
from constants import constants
import core.storage.base_model.gae_models as base_models
import core.storage.user.gae_models as user_models
import feconf
from google.appengine.ext import ndb
class ExplorationSnapshotMetadataModel(base_models.BaseSnapshotMetadataModel):
"""Storage model for the metadata for an exploration snapshot."""
pass
class ExplorationSnapshotContentModel(base_models.BaseSnapshotContentModel):
"""Storage model for the content of an exploration snapshot."""
pass
class ExplorationModel(base_models.VersionedModel):
"""Versioned storage model for an Oppia exploration.
This class should only be imported by the exploration services file
and the exploration model test file.
"""
SNAPSHOT_METADATA_CLASS = ExplorationSnapshotMetadataModel
SNAPSHOT_CONTENT_CLASS = ExplorationSnapshotContentModel
ALLOW_REVERT = True
# What this exploration is called.
title = ndb.StringProperty(required=True)
# The category this exploration belongs to.
category = ndb.StringProperty(required=True, indexed=True)
# The objective of this exploration.
objective = ndb.TextProperty(default='', indexed=False)
# The ISO 639-1 code for the language this exploration is written in.
language_code = ndb.StringProperty(
default=constants.DEFAULT_LANGUAGE_CODE, indexed=True)
# Tags (topics, skills, concepts, etc.) associated with this
# exploration.
tags = ndb.StringProperty(repeated=True, indexed=True)
# A blurb for this exploration.
blurb = ndb.TextProperty(default='', indexed=False)
# 'Author notes' for this exploration.
author_notes = ndb.TextProperty(default='', indexed=False)
<|fim▁hole|> required=True, default=0, indexed=True)
# The name of the initial state of this exploration.
init_state_name = ndb.StringProperty(required=True, indexed=False)
# A dict representing the states of this exploration. This dict should
# not be empty.
states = ndb.JsonProperty(default={}, indexed=False)
# The dict of parameter specifications associated with this exploration.
# Each specification is a dict whose keys are param names and whose values
# are each dicts with a single key, 'obj_type', whose value is a string.
param_specs = ndb.JsonProperty(default={}, indexed=False)
# The list of parameter changes to be performed once at the start of a
# reader's encounter with an exploration.
param_changes = ndb.JsonProperty(repeated=True, indexed=False)
# A boolean indicating whether automatic text-to-speech is enabled in
# this exploration.
auto_tts_enabled = ndb.BooleanProperty(default=True, indexed=True)
# A boolean indicating whether correctness feedback is enabled in this
# exploration.
correctness_feedback_enabled = ndb.BooleanProperty(
default=False, indexed=True)
# DEPRECATED in v2.0.0.rc.2. Do not use. Retaining it here because deletion
# caused GAE to raise an error on fetching a specific version of the
# exploration model.
# TODO(sll): Fix this error and remove this property.
skill_tags = ndb.StringProperty(repeated=True, indexed=True)
# DEPRECATED in v2.0.1. Do not use.
# TODO(sll): Remove this property from the model.
default_skin = ndb.StringProperty(default='conversation_v1')
# DEPRECATED in v2.5.4. Do not use.
skin_customizations = ndb.JsonProperty(indexed=False)
@classmethod
def get_exploration_count(cls):
"""Returns the total number of explorations."""
return cls.get_all().count()
def _trusted_commit(
self, committer_id, commit_type, commit_message, commit_cmds):
"""Record the event to the commit log after the model commit.
Note that this extends the superclass method.
Args:
committer_id: str. The user_id of the user who committed the
change.
commit_type: str. The type of commit. Possible values are in
core.storage.base_models.COMMIT_TYPE_CHOICES.
commit_message: str. The commit description message.
commit_cmds: list(dict). A list of commands, describing changes
made in this model, which should give sufficient information to
reconstruct the commit. Each dict always contains:
cmd: str. Unique command.
and then additional arguments for that command.
"""
super(ExplorationModel, self)._trusted_commit(
committer_id, commit_type, commit_message, commit_cmds)
committer_user_settings_model = (
user_models.UserSettingsModel.get_by_id(committer_id))
committer_username = (
committer_user_settings_model.username
if committer_user_settings_model else '')
exp_rights = ExplorationRightsModel.get_by_id(self.id)
# TODO(msl): test if put_async() leads to any problems (make
# sure summary dicts get updated correctly when explorations
# are changed).
exploration_commit_log = ExplorationCommitLogEntryModel.create(
self.id, self.version, committer_id, committer_username,
commit_type, commit_message, commit_cmds, exp_rights.status,
exp_rights.community_owned
)
exploration_commit_log.exploration_id = self.id
exploration_commit_log.put()
class ExplorationRightsSnapshotMetadataModel(
base_models.BaseSnapshotMetadataModel):
"""Storage model for the metadata for an exploration rights snapshot."""
pass
class ExplorationRightsSnapshotContentModel(
base_models.BaseSnapshotContentModel):
"""Storage model for the content of an exploration rights snapshot."""
pass
class ExplorationRightsModel(base_models.VersionedModel):
"""Storage model for rights related to an exploration.
The id of each instance is the id of the corresponding exploration.
"""
SNAPSHOT_METADATA_CLASS = ExplorationRightsSnapshotMetadataModel
SNAPSHOT_CONTENT_CLASS = ExplorationRightsSnapshotContentModel
ALLOW_REVERT = False
# The user_ids of owners of this exploration.
owner_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to edit this exploration.
editor_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to voiceover this exploration.
voice_artist_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to view this exploration.
viewer_ids = ndb.StringProperty(indexed=True, repeated=True)
# Whether this exploration is owned by the community.
community_owned = ndb.BooleanProperty(indexed=True, default=False)
# The exploration id which this exploration was cloned from. If None, this
# exploration was created from scratch.
cloned_from = ndb.StringProperty()
# For private explorations, whether this exploration can be viewed
# by anyone who has the URL. If the exploration is not private, this
# setting is ignored.
viewable_if_private = ndb.BooleanProperty(indexed=True, default=False)
# Time, in milliseconds, when the exploration was first published.
first_published_msec = ndb.FloatProperty(indexed=True, default=None)
# The publication status of this exploration.
status = ndb.StringProperty(
default=constants.ACTIVITY_STATUS_PRIVATE, indexed=True,
choices=[
constants.ACTIVITY_STATUS_PRIVATE,
constants.ACTIVITY_STATUS_PUBLIC
]
)
# DEPRECATED in v2.8.3. Do not use.
translator_ids = ndb.StringProperty(indexed=True, repeated=True)
def save(self, committer_id, commit_message, commit_cmds):
"""Saves a new version of the exploration, updating the Exploration
datastore model.
Args:
committer_id: str. The user_id of the user who committed the
change.
commit_message: str. The commit description message.
commit_cmds: list(dict). A list of commands, describing changes
made in this model, which should give sufficient information to
reconstruct the commit. Each dict always contains:
cmd: str. The type of the command. A full list of command
types can be found in core/domain/exp_domain.py.
and then additional arguments for that command. For example:
{'cmd': 'AUTO_revert_version_number',
'version_number': 4}
"""
super(ExplorationRightsModel, self).commit(
committer_id, commit_message, commit_cmds)
def _trusted_commit(
self, committer_id, commit_type, commit_message, commit_cmds):
"""Record the event to the commit log after the model commit.
Note that this extends the superclass method.
Args:
committer_id: str. The user_id of the user who committed the
change.
commit_type: str. The type of commit. Possible values are in
core.storage.base_models.COMMIT_TYPE_CHOICES.
commit_message: str. The commit description message.
commit_cmds: list(dict). A list of commands, describing changes
made in this model, should give sufficient information to
reconstruct the commit. Each dict always contains:
cmd: str. Unique command.
and then additional arguments for that command.
"""
super(ExplorationRightsModel, self)._trusted_commit(
committer_id, commit_type, commit_message, commit_cmds)
# Create and delete events will already be recorded in the
# ExplorationModel.
if commit_type not in ['create', 'delete']:
committer_user_settings_model = (
user_models.UserSettingsModel.get_by_id(committer_id))
committer_username = (
committer_user_settings_model.username
if committer_user_settings_model else '')
# TODO(msl): test if put_async() leads to any problems (make
# sure summary dicts get updated correctly when explorations
# are changed).
ExplorationCommitLogEntryModel(
id=('rights-%s-%s' % (self.id, self.version)),
user_id=committer_id,
username=committer_username,
exploration_id=self.id,
commit_type=commit_type,
commit_message=commit_message,
commit_cmds=commit_cmds,
version=None,
post_commit_status=self.status,
post_commit_community_owned=self.community_owned,
post_commit_is_private=(
self.status == constants.ACTIVITY_STATUS_PRIVATE)
).put_async()
class ExplorationCommitLogEntryModel(base_models.BaseCommitLogEntryModel):
"""Log of commits to explorations.
A new instance of this model is created and saved every time a commit to
ExplorationModel or ExplorationRightsModel occurs.
The id for this model is of the form
'exploration-{{EXP_ID}}-{{EXP_VERSION}}'.
"""
# The id of the exploration being edited.
exploration_id = ndb.StringProperty(indexed=True, required=True)
@classmethod
def get_multi(cls, exp_id, exp_versions):
"""Gets the ExplorationCommitLogEntryModels for the given exploration
id and exploration versions.
Args:
exp_id: str. The id of the exploration.
exp_versions: list(int). The versions of the exploration.
Returns:
list(ExplorationCommitLogEntryModel). The list of
ExplorationCommitLogEntryModel instances which matches the given
exp_id and exp_versions.
"""
instance_ids = [cls._get_instance_id(exp_id, exp_version)
for exp_version in exp_versions]
return super(ExplorationCommitLogEntryModel, cls).get_multi(
instance_ids)
@classmethod
def _get_instance_id(cls, exp_id, exp_version):
"""Returns ID of the exploration commit log entry model.
Args:
exp_id: str. The exploration id whose states are mapped.
exp_version: int. The version of the exploration.
Returns:
str. A string containing exploration ID and
exploration version.
"""
return 'exploration-%s-%s' % (exp_id, exp_version)
@classmethod
def get_all_non_private_commits(
cls, page_size, urlsafe_start_cursor, max_age=None):
"""Fetches a list of all the non-private commits sorted by their
last updated attribute.
Args:
page_size: int. The maximum number of entities to be returned.
urlsafe_start_cursor: str or None. If provided, the list of
returned entities starts from this datastore cursor.
Otherwise, the returned entities start from the beginning
of the full list of entities.
max_age: datetime.timedelta. The maximum time duration within which
commits are needed.
Returns:
3-tuple of (results, cursor, more) which were created which were
created no earlier than max_age before the current time where:
results: List of query results.
cursor: str or None. A query cursor pointing to the next
batch of results. If there are no more results, this will
be None.
more: bool. If True, there are (probably) more results after
this batch. If False, there are no further results after
this batch.
"""
if not isinstance(max_age, datetime.timedelta) and max_age is not None:
raise ValueError(
'max_age must be a datetime.timedelta instance or None.')
query = cls.query(cls.post_commit_is_private == False) # pylint: disable=singleton-comparison
if max_age:
query = query.filter(
cls.last_updated >= datetime.datetime.utcnow() - max_age)
return cls._fetch_page_sorted_by_last_updated(
query, page_size, urlsafe_start_cursor)
class ExpSummaryModel(base_models.BaseModel):
"""Summary model for an Oppia exploration.
This should be used whenever the content blob of the exploration is not
needed (e.g. in search results, etc).
A ExpSummaryModel instance stores the following information:
id, title, category, objective, language_code, tags,
last_updated, created_on, status (private, public),
community_owned, owner_ids, editor_ids,
viewer_ids, version.
The key of each instance is the exploration id.
"""
# What this exploration is called.
title = ndb.StringProperty(required=True)
# The category this exploration belongs to.
category = ndb.StringProperty(required=True, indexed=True)
# The objective of this exploration.
objective = ndb.TextProperty(required=True, indexed=False)
# The ISO 639-1 code for the language this exploration is written in.
language_code = ndb.StringProperty(required=True, indexed=True)
# Tags associated with this exploration.
tags = ndb.StringProperty(repeated=True, indexed=True)
# Aggregate user-assigned ratings of the exploration.
ratings = ndb.JsonProperty(default=None, indexed=False)
# Scaled average rating for the exploration.
scaled_average_rating = ndb.FloatProperty(indexed=True)
# Time when the exploration model was last updated (not to be
# confused with last_updated, which is the time when the
# exploration *summary* model was last updated).
exploration_model_last_updated = ndb.DateTimeProperty(indexed=True)
# Time when the exploration model was created (not to be confused
# with created_on, which is the time when the exploration *summary*
# model was created).
exploration_model_created_on = ndb.DateTimeProperty(indexed=True)
# Time when the exploration was first published.
first_published_msec = ndb.FloatProperty(indexed=True)
# The publication status of this exploration.
status = ndb.StringProperty(
default=constants.ACTIVITY_STATUS_PRIVATE, indexed=True,
choices=[
constants.ACTIVITY_STATUS_PRIVATE,
constants.ACTIVITY_STATUS_PUBLIC
]
)
# Whether this exploration is owned by the community.
community_owned = ndb.BooleanProperty(required=True, indexed=True)
# The user_ids of owners of this exploration.
owner_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to edit this exploration.
editor_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to voiceover this exploration.
voice_artist_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who are allowed to view this exploration.
viewer_ids = ndb.StringProperty(indexed=True, repeated=True)
# The user_ids of users who have contributed (humans who have made a
# positive (not just a revert) change to the exploration's content).
contributor_ids = ndb.StringProperty(indexed=True, repeated=True)
# A dict representing the contributors of non-trivial commits to this
# exploration. Each key of this dict is a user_id, and the corresponding
# value is the number of non-trivial commits that the user has made.
contributors_summary = ndb.JsonProperty(default={}, indexed=False)
# The version number of the exploration after this commit. Only populated
# for commits to an exploration (as opposed to its rights, etc.).
version = ndb.IntegerProperty()
# DEPRECATED in v2.8.3. Do not use.
translator_ids = ndb.StringProperty(indexed=True, repeated=True)
@classmethod
def get_non_private(cls):
"""Returns an iterable with non-private ExpSummary models.
Returns:
iterable. An iterable with non-private ExpSummary models.
"""
return ExpSummaryModel.query().filter(
ExpSummaryModel.status != constants.ACTIVITY_STATUS_PRIVATE
).filter(
ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison
).fetch(feconf.DEFAULT_QUERY_LIMIT)
@classmethod
def get_top_rated(cls, limit):
"""Fetches the top-rated exp summaries that are public in descending
order of scaled_average_rating.
Args:
limit: int. The maximum number of results to return.
Returns:
iterable. An iterable with the top rated exp summaries that are
public in descending order of scaled_average_rating.
"""
return ExpSummaryModel.query().filter(
ExpSummaryModel.status == constants.ACTIVITY_STATUS_PUBLIC
).filter(
ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison
).order(
-ExpSummaryModel.scaled_average_rating
).fetch(limit)
@classmethod
def get_private_at_least_viewable(cls, user_id):
"""Fetches private exp summaries that are at least viewable by the
given user.
Args:
user_id: The id of the given user.
Returns:
iterable. An iterable with private exp summaries that are at least
viewable by the given user.
"""
return ExpSummaryModel.query().filter(
ExpSummaryModel.status == constants.ACTIVITY_STATUS_PRIVATE
).filter(
ndb.OR(ExpSummaryModel.owner_ids == user_id,
ExpSummaryModel.editor_ids == user_id,
ExpSummaryModel.voice_artist_ids == user_id,
ExpSummaryModel.viewer_ids == user_id)
).filter(
ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison
).fetch(feconf.DEFAULT_QUERY_LIMIT)
@classmethod
def get_at_least_editable(cls, user_id):
"""Fetches exp summaries that are at least editable by the given user.
Args:
user_id: The id of the given user.
Returns:
iterable. An iterable with exp summaries that are at least
editable by the given user.
"""
return ExpSummaryModel.query().filter(
ndb.OR(ExpSummaryModel.owner_ids == user_id,
ExpSummaryModel.editor_ids == user_id)
).filter(
ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison
).fetch(feconf.DEFAULT_QUERY_LIMIT)
@classmethod
def get_recently_published(cls, limit):
"""Fetches exp summaries that are recently published.
Args:
limit: int. The maximum number of results to return.
Returns:
An iterable with exp summaries that are recently published. The
returned list is sorted by the time of publication with latest
being first in the list.
"""
return ExpSummaryModel.query().filter(
ExpSummaryModel.status == constants.ACTIVITY_STATUS_PUBLIC
).filter(
ExpSummaryModel.deleted == False # pylint: disable=singleton-comparison
).order(
-ExpSummaryModel.first_published_msec
).fetch(limit)
class StateIdMappingModel(base_models.BaseModel):
"""DEPRECATED: DO NOT USE.
State ID model for Oppia explorations.
This model maps each exploration version's state to a unique id.
Note: use the state id only for derived data, but not for data that’s
regarded as the source of truth, as the rules for assigning state id may
change in future.
The key of each instance is a combination of exploration id and version.
"""
# The exploration id whose states are mapped.
exploration_id = ndb.StringProperty(indexed=True, required=True)
# The version of the exploration.
exploration_version = ndb.IntegerProperty(indexed=True, required=True)
# A dict which maps each state name to a unique id.
state_names_to_ids = ndb.JsonProperty(required=True)
# Latest state id that has been assigned to any of the states in any of
# of the versions of given exploration. New state IDs should be assigned
# from this value + 1.
largest_state_id_used = ndb.IntegerProperty(indexed=True, required=True)
@classmethod
def create(
cls, exp_id, exp_version, state_names_to_ids,
largest_state_id_used, overwrite=False):
"""Creates a new instance of state id mapping model.
Args:
exp_id: str. The exploration id whose states are mapped.
exp_version: int. The version of that exploration.
state_names_to_ids: dict. A dict storing state name to ids mapping.
largest_state_id_used: int. The largest integer so far that has been
used as a state ID for this exploration.
overwrite: bool. Whether overwriting of an existing model should
be allowed.
Returns:
StateIdMappingModel. Instance of the state id mapping model.
"""
instance_id = cls._generate_instance_id(exp_id, exp_version)
if not overwrite and cls.get_by_id(instance_id):
raise Exception(
'State id mapping model already exists for exploration %s,'
' version %d' % (exp_id, exp_version))
model = cls(
id=instance_id, exploration_id=exp_id,
exploration_version=exp_version,
state_names_to_ids=state_names_to_ids,
largest_state_id_used=largest_state_id_used)
model.put()
return model
@classmethod
def _generate_instance_id(cls, exp_id, exp_version):
"""Generates ID of the state id mapping model instance.
Args:
exp_id: str. The exploration id whose states are mapped.
exp_version: int. The version of the exploration.
Returns:
str. A string containing exploration ID and
exploration version.
"""
return '%s.%d' % (exp_id, exp_version)
@classmethod
def get_state_id_mapping_model(cls, exp_id, exp_version):
"""Retrieve state id mapping model from the datastore.
Args:
exp_id: str. The exploration id.
exp_version: int. The exploration version.
Returns:
StateIdMappingModel. The model retrieved from the datastore.
"""
instance_id = cls._generate_instance_id(exp_id, exp_version)
instance = cls.get(instance_id)
return instance
@classmethod
def delete_state_id_mapping_models(cls, exp_id, exp_versions):
"""Removes state id mapping models present in state_id_mapping_models.
Args:
exp_id: str. The id of the exploration.
exp_versions: list(int). A list of exploration versions for which
the state id mapping model is to be deleted.
"""
keys = [
ndb.Key(cls, cls._generate_instance_id(exp_id, exp_version))
for exp_version in exp_versions]
ndb.delete_multi(keys)<|fim▁end|> | # The version of the states blob schema.
states_schema_version = ndb.IntegerProperty( |
<|file_name|>main-dev.js<|end_file_name|><|fim▁begin|>document.write('<script src="js/goog/base.js"></script>');
document.write('<script src="js/deps.js"></script>');<|fim▁hole|><|fim▁end|> | document.write("<script>goog.require('AlignShop');</script>"); |
<|file_name|>color_world.py<|end_file_name|><|fim▁begin|>from __future__ import division
from direct.showbase.ShowBase import ShowBase
from direct.actor.Actor import ActorNode
from panda3d.core import WindowProperties, NodePath, LVector3
from panda3d.core import LineSegs, OrthographicLens, CardMaker
from inputs import Inputs
from sys import path
import square
try:
path.insert(1, '../pydaq')
import pydaq
except ImportError:
pydaq = None
class ColorWorld(object):
def __init__(self, config=None):
# keep track of velocity, this allows me to counteract joystick with keyboard
self.velocity = LVector3(0)
if config is None:
self.config = {}
execfile('config.py', self.config)
else:
self.config = config
self.reward = None
if pydaq:
self.reward = pydaq.GiveReward()
self.reward_count = 0
# self.color_map always corresponds to (r, g, b)
# does not change during game, each game uses a particular color space
self.color_dict = square.make_color_map(self.config['colors'])
# sets the range of colors for this map
self.c_range = self.config['c_range']
# color variables (make dictionary?)
# color_list is set in beginning, and then after that this is only
# called again for non-random (training)
self.color_list = square.set_start_position_colors(self.config)
self.color_match = [0, 0, 0]
self.color_tolerance = []
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'starting avt position', self.last_avt
print 'map avatar factor', self.avt_factor
self.random = True
if self.config.get('match_direction'):
self.random = False
# adjustment to speed so corresponds to gobananas task
# 7 seconds to cross original environment
# speed needs to be adjusted to both speed in original
# environment and c_range of colors
# self.speed = 0.05 * (self.c_range[1] - self.c_range[0])
# speed is own variable, so can be changed during training.
self.speed = self.config['speed']
# map avatar variables
self.render2d = None
self.match_square = None
self.map_avt_node = []
# need a multiplier to the joystick output to tolerable speed
self.vel_base = 3
self.max_vel = [500, 500, 0]
self.card = None
self.base = ShowBase()
self.base.disableMouse()
# assume we are showing windows unless proven otherwise
if self.config.get('win', True):
# only need inputs if we have a window
self.inputs = Inputs(self.base)
props = WindowProperties()
props.setCursorHidden(True)
props.setForeground(True)
print self.config.get('resolution')
if self.config.get('resolution'):
props.set_size(int(self.config['resolution'][0]), int(self.config['resolution'][1]))
props.set_origin(0, 0)
else:
props.set_size(600, 600)
props.set_origin(400, 50)
self.base.win.requestProperties(props)
# print self.base.win.get_size()
# setup color map on second window
sq_node = square.setup_square(self.config)
self.setup_display2(sq_node)
# print 'background color', self.base.getBackgroundColor()
# create the avatar
self.avatar = NodePath(ActorNode("avatar"))
self.avatar.reparentTo(self.base.render)
self.avatar.setH(self.base.camera.getH())
self.base.camera.reparentTo(self.avatar)
self.base.camera.setPos(0, 0, 0)
# initialize task variables
self.frame_task = None
self.started_game = None
self.showed_match = None
self.gave_reward = None
# initialize and start the game
self.set_next_trial()
# print 'end init'
def start_loop(self):
# need to get new match
print 'start loop'
self.started_game = self.base.taskMgr.doMethodLater(5, self.start_play, 'start_play')
self.showed_match = self.base.taskMgr.add(self.show_match_sample, 'match_image')
# Task methods
def show_match_sample(self, task):
print 'show match sample'
print self.color_match[:]
# match_image.fill(*self.color_match[:])
card = CardMaker('card')
color_match = self.color_match[:]
# add alpha channel
color_match.append(1)
print color_match
card.set_color(*color_match[:])
card.set_frame(-12, -8, 0, 4)
# log this
self.card = self.base.render.attach_new_node(card.generate())
return task.done
def start_play(self, task):
print 'start play'
# log this
self.base.taskMgr.remove('match_image')
self.card.removeNode()
# print self.base.render.ls()
self.frame_task = self.base.taskMgr.add(self.game_loop, "game_loop")
self.frame_task.last = 0 # initiate task time of the last frame
# log this
self.base.setBackgroundColor(self.color_list[:])
return task.done
def game_loop(self, task):
dt = task.time - task.last
task.last = task.time
self.velocity = self.inputs.poll_inputs(self.velocity)
move = self.move_avatar(dt)
stop = self.change_background(move)
self.move_map_avatar(move, stop)
match = self.check_color_match()
if match:
self.give_reward()
return task.done
return task.cont
def reward_loop(self, task):
self.reward_count += 1
if self.reward_count <= self.config['num_beeps']:
if self.reward:
# log this
print 'give a bloody reward already'
self.reward.pumpOut()
print 'give reward'
return task.again
else:
self.end_loop()
return task.done
def move_avatar(self, dt):
# print 'velocity', self.velocity
# this makes for smooth (correct speed) diagonal movement
# print 'velocity', self.velocity
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def check_color_match(self):
# print 'match this', self.color_tolerance
# print self.color_list
check_color = [j[0] < self.color_list[i] < j[1] for i, j in enumerate(self.color_tolerance)]
# print check_color
if all(check_color):
return True
else:
return False
def give_reward(self):
# clear the background
self.base.setBackgroundColor(0.41, 0.41, 0.41)
print 'give first reward'
self.reward_count = 1
if self.reward:
# log this
self.reward.pumpOut()
self.gave_reward = self.base.taskMgr.doMethodLater(self.config['pump_delay'], self.reward_loop, 'reward_loop')
def end_loop(self):
print 'end loop'
# clear avatar map
self.clear_avatar_map()
# if there is a match set, return to center of color gradient,
# set new match, if applicable
self.set_next_trial()
def clear_avatar_map(self):
for i, j in enumerate(self.map_avt_node):
j.removeNode()
self.map_avt_node = []
def plot_match_square(self, corners):
print 'plot match square'
print corners
match = LineSegs()
match.setThickness(1.5)
match.setColor(0, 0, 0)
match.move_to(corners[0][0], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][0])
# print self.render2d
self.match_square = self.render2d.attach_new_node(match.create())
def create_avatar_map_match_square(self, config=None):
print 'make new square for map'
if config is not None:
config_dict = config
else:
config_dict = self.config
# create square on avatar map for new color match
map_color_match, factor = square.translate_color_map(config_dict, self.color_dict, self.color_match)
tolerance = config_dict['tolerance'] * factor
map_color_tolerance = [(i - tolerance, i + tolerance) for i in map_color_match]
print map_color_tolerance
if self.render2d:
if self.match_square:
self.match_square.removeNode()
self.plot_match_square(map_color_tolerance)
def set_next_trial(self):
print 'set next trial'
# move avatar back to beginning position, only matters for
# showing card for next color match
self.avatar.set_pos(-10, -10, 2)
# set color_list with starting color
# if random, won't use this again, but for manual, will
# return to center
# need to update self.config to new direction, if there is one
if self.config.get('match_direction'):
self.check_key_map()
# return to center, otherwise random will start where you left off
self.color_list = square.set_start_position_colors(self.config)
# starting position for map avatar, just translate new color_list
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'start color', self.color_list
print self.color_dict
# again need to update self.config for match if using keys
self.color_match = square.set_match_colors(self.config, self.color_dict)
# sets the tolerance for how close to a color for reward
self.color_tolerance = [(i - self.config['tolerance'], i + self.config['tolerance']) for i in self.color_match]
print 'color match', self.color_match
print 'color tolerance', self.color_tolerance
self.create_avatar_map_match_square(self.config)
# start the game
self.start_loop()
def check_key_map(self):
if self.config['colors'][0]:
if self.inputs.key_map['r']:
self.config['match_direction'] = ['right']
elif self.inputs.key_map['r'] is not None:
self.config['match_direction'] = ['left']
elif self.config['colors'][1]:
if self.inputs.key_map['f']:<|fim▁hole|>
def setup_display2(self, display_node):
print 'setup display2'
props = WindowProperties()
props.set_cursor_hidden(True)
props.set_foreground(False)
if self.config.get('resolution'):
props.setSize(700, 700)
props.setOrigin(-int(self.config['resolution'][0] - 5), 5)
else:
props.setSize(300, 300)
props.setOrigin(10, 10)
window2 = self.base.openWindow(props=props, aspectRatio=1)
lens = OrthographicLens()
lens.set_film_size(2, 2)
lens.setNearFar(-100, 100)
self.render2d = NodePath('render2d')
self.render2d.attach_new_node(display_node)
camera2d = self.base.makeCamera(window2)
camera2d.setPos(0, -10, 0)
camera2d.node().setLens(lens)
camera2d.reparentTo(self.render2d)
if __name__ == "__main__":
CW = ColorWorld()
CW.base.run()<|fim▁end|> | self.config['match_direction'] = ['front']
elif self.inputs.key_map['f'] is not None:
self.config['match_direction'] = ['back'] |
<|file_name|>fetchheaders.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2
#
# Copyright 2012 Abid Hasan Mujtaba
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Author: Abid H. Mujtaba
# Email: [email protected]
#
# Start Date: Aug. 9, 2012
# Last Revised: sep. 24, 2012
#
#
# This script is intended as a program that reads a configuration file and uses the information stored there-in to connect to a variety of IMAP servers and display header information about the emails in various folders (INBOX by default). It also has the capability of deleting selected emails. The advantage is that minimal information needs to be downloaded (i.e. only certain header fields) without needing to download the entire email and one can choose to delete unnecessary emails judging by the sender and/or subject only.
# Enable Python 3.x style print function:
from __future__ import print_function
import re
# Create global variables that implement global settings which are used by the following functions.
maxThreads = 5 # This value will be over-written by the global default and possibly a command-line argument
colorTitle = None
colorFlag = None
colorFrom = None
colorDate = None
colorSubjectSeen = None
colorSubjectUnseen = None
showFlags = None
def setOptions( configFile, configSpecFile ) :
'''
This function reads in the options from the configuration file and validates them using the configuration specification file passed to it. It creates a dictionary of options for each account which are used by the pollAccount() function to carry out its tasks. Additionally this function reads the 'global' section in the configuration file and creates and the globalSettings dictionary that contains the global settings for the program.
'''
from configobj import ConfigObj, ConfigObjError, flatten_errors
from validate import Validator
# Note the following code segment concerned with using ConfigObj and validating the entries has been inspired and in part copied from http://www.voidspace.org.uk/python/articles/configobj.shtml (an excellent tutorial on using ConfigObj by its author(s))
try:
config = ConfigObj( configFile, configspec = configSpecFile, file_error = True )
except (ConfigObjError, IOError), e:
print( 'Could not read "%s": %s' % (configFile, e) )
validator = Validator()
results = config.validate( validator )
if results != True : # Validation failed. Inform user of offending entries.
for (section_list, key, _) in flatten_errors( config, results ) :
if key is not None :
print( 'The "%s" key in the section "%s" failed validation' % (key, ','.join( section_list ) ) )
else :
print( 'The following section was missing: %s' % ','.join( section_list ) )
import sys
sys.exit(1)
# Validation successful so we move on to creating the 'servers' dictionary. We are implementing a default account paradigm which is not natively supported by ConfigObj. We want the ConfigParser ability where any option not provided in a subsection but contained in the 'DEFAULT' subsection are copied in to it. To achieve this we will need to know which entries are missing in each subsection without having them filled in using the default values from the config.spec file. To that end we read in the config file without reading the spec file (hence no spec defaults are read in).
configNoSpec = ConfigObj( configFile ) # Note since config passed validation we automatically know that configNoSpec is also valid.
# The first step is to copy out the default account section dictionary and use it as the basic dictionary for all accounts. We will over-write the options that are provided in each account sub-section as we read them.
listDefaultOptions = configNoSpec[ 'accounts' ][ 'DEFAULT' ].keys() # List of Default options as EXPLICITLY provided in the configuration file (hence the use of configNoSpec as compared to just config)
listAccounts = [ x for x in config[ 'accounts' ].keys() if x != 'DEFAULT' ] # List of Accounts that does NOT contain 'DEFAULT'. We are basically carrying out list subtraction here: completely removing certain elements from the list by using list comprehension along with a predicate
# Note: Everywhere a value needs to be read in we must use 'config' and NOT 'configNoSpec' since 'config' by virtue of knowing the required type of each option reads in the values as the correct type rather than as a string which is what we want.
servers = {} # Empty dictionary which we will populate with account configuration information
for account in listAccounts :
servers[ account ] = {} # Create sub-dictionary for account
servers[ account ][ 'name' ] = account # Saving account name for identification and laster use when the sub-dictionary is passed to pollAccount
for key, value in config[ 'accounts' ][ account ].items() :
servers[ account ][ key ] = value # Copy configuration information
# So far we have stored in the dictionary (for this account) the values specified explicitly and the global defaults from config.spec that are automatically loaded for missing options. Now we must over-write with the options that are not explicitly given but ARE explicitly defined in the 'DEFAULT' section since they carry precedence over the global defaults defined in the config.spec file (which should not ideally be edited by the user but rather represents the creator's fall-back default values in case an option is completely deleted by the user in the config file)
# Now we create a list of the options that are explicitly in DEFAULT but NOT in the specific account (Note the use of configNoSpec rather than config) :
listMissingDefaults = [ x for x in listDefaultOptions if x not in configNoSpec[ 'accounts' ][ account ].keys() ]
for key in listMissingDefaults :
servers[ account ][ key ] = config[ 'accounts' ][ 'DEFAULT' ][ key ]
# Now we read in the global settings:
globalSettings = {} # Create empty dictionary to populate
for key in config[ 'global' ].keys() :
globalSettings[ key ] = config[ 'global' ][ key ]
return servers, globalSettings
def argParse() :
'''
This function reads in the arguments passed to the program, validates them and if validated returns a parser.parse_args() returned object which contains the various arguments passed and which can then be used by the program as it sees fit.
'''
import argparse # This module gives powerful argument parsing abilities along with auto-generation of --help output.
# Specify the various arguments that the program expects and validate them. Additional arguments can be added as required.
parser = argparse.ArgumentParser( description = "A python script which simultaneously polls multiple IMAP accounts to display the subjects of all or only unseen messages in the specified folder (INBOX by default) without downloading complete messages.\n For further details please read the man page." )
parser.add_argument( "-c", "--config", help = "Specify the name and path to the configuration file. If not specified the program will use the default configuration file in $HOME/.fetchheaders/fetchheaders.conf. Note: The configuration specification file (fetchheaders.conf.spec) should not be altered casually and the program will only look for it in $HOME/.fetchheaders/" )
# For --accounts and --exclude which we wish to be mutually exclusive optional arguments we create a mutually exclusive group within the parser to hold them.
group = parser.add_mutually_exclusive_group()
group.add_argument( "-a", "--accounts", help = "Specify the names of IMAP accounts to be polled as a comma-separated list. e.g. -a Gmail,Hotmail. Only accounts specified in the configuration file are allowed." )
group.add_argument( "-x", "--exclude", help = "Specify the names of the IMAP accounts which are NOT to be polled, as a comma-separated list. e.g. -x Gmail,Hotmail. Only accounts specified in the configuration file are allowed to be excluded." )
parser.add_argument( "-n", "--numsonly", help = "Flag: Only show the number of unseen and total number of messages for the specified folder for each account.", action = "store_true" )
parser.add_argument( "--noColor", help = "Flag: Do NOT allow colored output. Useful for shells that don't allow colored text or when the output needs to piped to another application since colored text is implemented by encapsulating the text in xterm color escape codes.", action = "store_true" )
parser.add_argument( "--oldestFirst", help = "Flag: Show oldest email first i.e. chronological order.", action = "store_true" )
parser.add_argument( "-A", "--showAll", help = "Flag: Show all emails in specified folder, not just unseen ones.", action = "store_true" )
parser.add_argument( "--showFlags", help = "Flag: Show mutt-style flags (in square brackets) to indicate new/unseen and deleted emails when ALL emails are displayed (i.e. -A is issued).", action = "store_true" )
parser.add_argument( "-t", "--threads", help = "Specify the maximum number of parallel threads the program will use to simultaneously access IMAP servers. Set to 1 for serial (non-parallel) behaviour.", type = int)
parser.add_argument( "-T", "--terminal", help = "Flag: Show results in the terminal. Do NOT use urwid.", action = "store_true" )
# Begin reading in arguments and validate them:
args = parser.parse_args() # args contains the values of arguments passed. If incorrect arguments are passed the problem will be stopped here and argparse will display the appropriate error and help message.
return args
def applyArgs( args, servers, globalSettings ) :
'''
This function accepts both the arguments read by the script and the 'servers' object (dictionary) created by setOptions(). It will apply the arguments sent via command-line to the 'servers' and 'globalSettings' object to create and return a modified version reflecting these changes.
'''
# This function is where we carry out all operations necessary to implement the settings specified by command-line arguments.
# -a, --acounts. Limit accounts to the specified ones:
if args.accounts : # True if -a or --accounts has been specified
# We must perform some error checking on the arguments passed to the --accounts optional argument
newServers = {} # Create a new dictionary we will populate ONLY with the specified accounts
for item in args.accounts.split( ',' ) : # We are expecting a comma-separated list
# We create a list of servers the START of whose names (lowercase) matches the item in the argument list currently under consideration
matching_servers = [x for x in servers.keys() if re.match('^' + item.lower(), x.lower())]
if matching_servers: # A match has occurred
for server in matching_servers: # All matching servers are added to the list displayed
newServers[ server ] = servers[ server ]
else: # No match has occurred. This is an error.
print( '\nError: ' + item + ' is not the beginning of a valid IMAP account name specified in the configuration file.' )
import sys
sys.exit(1)
servers = newServers
# -x, --exclude. Does NOT poll the accounts specified with this argument:
if args.exclude : # True if -x or --exclude has been specified<|fim▁hole|>
excludedAccounts = [] # Empty list which we will populate with the excluded accounts
newServers = {} # Empty dictionary with which we will construct the new 'servers' dictionary without the excluded accounts
for item in args.exclude.split( ',' ) : # We are expecting a comma-separated list
if not item in servers.keys() : # If this item in the comma-separated list is NOT an account specified in the configuration file
print( '\nError: ' + item + ' is not a vlid IMAP account name specified in the configuration file.' )
import sys
sys.exit(1)
else :
excludedAccounts.append( item )
# Now we remove the excluded accounts when we create the new 'servers' dictionary:
for account in servers.keys() :
if not account in excludedAccounts : # The current account is not in the excluded list and so can be added to the servers dictionary:
newServers[ account ] = servers[ account ]
# Place the newly constructed dicionary (with accounts excluded) in to the original 'servers' dictionary:
servers = newServers
# -n, --numsonly. If specified only the total and unseen number of messages is to be displayed. Similar to 'fetchmail -c'.
if args.numsonly :
for account in servers.keys() :
servers[ account ][ 'showOnlyNums' ] = True
# -T, --terminal. If specified the output is displayed on the terminal (stdout) and 'urwid' is NOT used.
if args.terminal:
globalSettings[ 'terminal' ] = True
else : globalSettings[ 'terminal' ] = False
# --no-color. If specified the output of the program should NOT be colored.
if args.noColor :
globalSettings[ 'color' ] = False
# -A, --showAll. Show all emails not just unseen ones.
if args.showAll :
for account in servers.keys() :
servers[ account ][ 'showUnseen' ] = False
globalSettings[ 'showFlags' ] = True # Flags are shown by default whenever ALL emails are viewed whether --showFlags is passed or not.
# --oldestFirst. Show oldest email first i.e. in chronological order.
if args.oldestFirst :
for account in servers.keys() :
servers[ account ][ 'latestEmailFirst' ] = False
# --showFlags. Show mutt-style flags (in square brackets) when all emails are being displayed.
if args.showFlags :
globalSettings[ 'showFlags' ] = True
# -t, --threads. Set max. number of parallel threads.
if args.threads :
globalSettings[ 'maxThreads' ] = args.threads
return servers, globalSettings
def applyGlobalSettings( globalSettings ) :
'''
This function applies the global settings defined in the dictionary 'globalSettings' (created using the configuration file and command-line arguments).
'''
# Apply maxThreads setting:
global maxThreads
maxThreads = globalSettings[ 'maxThreads' ]
# Apply showFlags settings:
global showFlags
showFlags = globalSettings[ 'showFlags' ]
# Apply color settings:
if globalSettings[ 'color' ] : # output is to be colored
global colorTitle, colorFlag, colorDate, colorFrom, colorSubjectSeen, colorSubjectUnseen # Accessing global text color variables
colorTitle = globalSettings[ 'colorTitle' ]
colorFlag = globalSettings[ 'colorFlag' ]
colorSubjectSeen = globalSettings[ 'colorSubjectSeen' ]
colorSubjectUnseen = globalSettings[ 'colorSubjectUnseen' ]
colorDate = globalSettings[ 'colorDate' ]
colorFrom = globalSettings[ 'colorFrom' ]
def display( out ) :
'''
Accepts an Output data structure and prints out the results to the screen.
Note: This function carries out all formatting for the output using the purely data-oriented Output object as input. The output is in a text format which can be piped forward
'''
from miscClasses import colorWidth as cW # Custom function that sets width of text fields and colors it.
print( cW( out.settings[ 'name' ] + ':', 12, colorTitle ), end = '' ) # Print name of account and allow for further text
if out.settings[ 'showNums' ] :
print( "( total: %d | unseen: %d )" % (out.numAll, out.numUnseen) )
print( '\n' )
# Preamble printed. Now start printing individual email information
if out.settings[ 'showUnseen' ] : # Show only unseen messages
for ii in range( len( out.emails ) ) :
email = out.emails[ ii ]
print( cW( str(ii + 1), out.numDigits, align = '>' ) + '. ' + cW( email.Date, 17, colorDate ) + ' ' + cW( email.From, 30, colorFrom ) + ' ' + cW( email.Subject, 120, colorSubjectUnseen, fill = False ) )
else : # Show ALL messages. Different formatting scheme.
if showFlags : # Global setting which declares that the flags associated with each message must be displayed
flags = lambda x : ' [ ' + cW( x, 2, colorFlag ) + '] '
else :
flags = lambda x : '. '
for ii in range( len( out.emails ) ) :
email = out.emails[ ii ]
if email.Seen : # Email has a Seen flag.
flag = ' '
colorSubject = colorSubjectSeen
else :
flag = 'N'
colorSubject = colorSubjectUnseen
print( cW( str(ii + 1), out.numDigits, align = '>' ) + flags( flag ) + cW( email.Date, 17, colorDate ) + ' ' + cW( email.From, 30, colorFrom ) + ' ' + cW( email.Subject, 120, colorSubject ) )
def main() :
'''
Main function that starts the execution of all of the code.
'''
args = argParse()
# Specify default locations for configuration and specification files:
import os
homeFolder = os.getenv( "HOME" ) # Basically the value in $HOME
packageFolder = '/usr/local/share/fetchheaders' # Location of folder containing all package files
# packageFolder = '.'
fileConf = homeFolder + '/.fetchheaders.conf'
fileSpec = packageFolder + '/fetchheaders.conf.spec' # Path to config specification file
# Check if a configuration file has been specified using the -c or --config flag.
if args.config : # A configuration file has been provided
fileConf = args.config
# Read in settings and options from configuration files :
servers, globalSettings = setOptions( fileConf, fileSpec )
# Override settings and options from command-line arguments :
servers, globalSettings = applyArgs( args, servers, globalSettings )
# Apply Global Settings. These are applied outside of pollAccount which acts on each account independantly.
applyGlobalSettings( globalSettings ) # Apply the global settings contained in the 'globalSettings' dictionary we created from the configuration file and command-line arguments
# Now we determine whether the output is intended to go to the terminal (stdout) straight or passed on to urwid
if globalSettings[ 'terminal' ]: # Do NOT use urwid
from miscClasses import threadedExec
for out in threadedExec( servers, maxThreads ):
if out.error: # If an error occurs while constructing the Output object the exception is caught and the error flag is set
from miscClasses import colorWidth as cW
print( cW( out.settings[ 'name' ] + ':', 12, colorTitle ), end = '' ) # We indicate in the output that an Error has occurred.
print( "Error!\n\n" )
else:
display(out)
else:
# Use urwid to display the results, interact with the display and possibly flag messages for deletion:
from urwidDisplay import urwidDisplay
# Create instance of the imported class to create and start the urwid loop to display emails
settings = { 'maxThreads': maxThreads, 'showFlags': showFlags }
urwidDisplay( servers, settings )
# Main execution of the program begins here:
main()<|fim▁end|> |
# We must perform some error checking on the arguments passed to the --exclude optional argument |
<|file_name|>pages.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 smurail
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
from weboob.exceptions import BrowserIncorrectPassword
from weboob.browser.pages import HTMLPage, JsonPage, pagination, LoggedPage
from weboob.browser.elements import ListElement, ItemElement, TableElement, method
from weboob.browser.filters.standard import CleanText, CleanDecimal, DateGuesser, Env, Field, Filter, Regexp, Currency, Date
from weboob.browser.filters.html import Link, Attr, TableCell
from weboob.capabilities.bank import Account, Investment
from weboob.capabilities.base import NotAvailable
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
from weboob.tools.compat import urljoin
from weboob.tools.capabilities.bank.investments import is_isin_valid
__all__ = ['LoginPage']
class UselessPage(HTMLPage):
pass
class PasswordCreationPage(HTMLPage):
def get_message(self):
xpath = '//div[@class="bienvenueMdp"]/following-sibling::div'
return '%s%s' % (CleanText(xpath + '/strong')(self.doc), CleanText(xpath, children=False)(self.doc))
class ErrorPage(HTMLPage):
pass
class SubscriptionPage(LoggedPage, JsonPage):
pass
class LoginPage(HTMLPage):
pass
class CMSOPage(HTMLPage):
@property
def logged(self):
if len(self.doc.xpath('//b[text()="Session interrompue"]')) > 0:
return False
return True
class AccountsPage(CMSOPage):
TYPES = {'COMPTE CHEQUES': Account.TYPE_CHECKING,
'COMPTE TITRES': Account.TYPE_MARKET,
"ACTIV'EPARGNE": Account.TYPE_SAVINGS,
"TRESO'VIV": Account.TYPE_SAVINGS,
}
@method
class iter_accounts(ListElement):
item_xpath = '//div[has-class("groupe-comptes")]//li'
class item(ItemElement):
klass = Account
class Type(Filter):
def filter(self, label):
for pattern, actype in AccountsPage.TYPES.items():
if label.startswith(pattern):
return actype
return Account.TYPE_UNKNOWN
obj__history_url = Link('.//a[1]')
obj_id = CleanText('.//span[has-class("numero-compte")]') & Regexp(pattern=r'(\d{3,}[\w]+)', default='')
obj_label = CleanText('.//span[has-class("libelle")][1]')
obj_currency = Currency('//span[has-class("montant")]')
obj_balance = CleanDecimal('.//span[has-class("montant")]', replace_dots=True)
obj_type = Type(Field('label'))
# Last numbers replaced with XX... or we have to send sms to get RIB.
obj_iban = NotAvailable
# some accounts may appear on multiple areas, but the area where they come from is indicated
obj__owner = CleanText('(./preceding-sibling::tr[@class="LnMnTiers"])[last()]')
def validate(self, obj):
if obj.id is None:
obj.id = obj.label.replace(' ', '')
return True
def on_load(self):
if self.doc.xpath('//p[contains(text(), "incident technique")]'):
raise BrowserIncorrectPassword("Vous n'avez aucun compte sur cet espace. " \
"Veuillez choisir un autre type de compte.")
class InvestmentPage(CMSOPage):
def has_error(self):
return CleanText('//span[@id="id_error_msg"]')(self.doc)
@method
class iter_accounts(ListElement):
item_xpath = '//table[@class="Tb" and tr[1][@class="LnTit"]]/tr[@class="LnA" or @class="LnB"]'
class item(ItemElement):
klass = Account
def obj_id(self):
area_id = Regexp(CleanText('(./preceding-sibling::tr[@class="LnMnTiers"][1])//span[@class="CelMnTiersT1"]'),<|fim▁hole|> return '%s.%s' % (area_id, acc_id)
return acc_id
def obj__formdata(self):
js = Attr('./td/a[1]', 'onclick', default=None)(self)
if js is None:
return
args = re.search(r'\((.*)\)', js).group(1).split(',')
form = args[0].strip().split('.')[1]
idx = args[2].strip()
idroot = args[4].strip().replace("'", "")
return (form, idx, idroot)
obj_url = Link('./td/a[1]', default=None)
def go_account(self, form, idx, idroot):
form = self.get_form(name=form)
form['indiceCompte'] = idx
form['idRacine'] = idroot
form.submit()
class CmsoTableElement(TableElement):
head_xpath = '//table[has-class("Tb")]/tr[has-class("LnTit")]/td'
item_xpath = '//table[has-class("Tb")]/tr[has-class("LnA") or has-class("LnB")]'
class InvestmentAccountPage(CMSOPage):
@method
class iter_investments(CmsoTableElement):
col_label = 'Valeur'
col_code = 'Code'
col_quantity = 'Qté'
col_unitvalue = 'Cours'
col_valuation = 'Valorisation'
col_vdate = 'Date cours'
class item(ItemElement):
klass = Investment
obj_label = CleanText(TableCell('label'))
obj_quantity = CleanDecimal(TableCell('quantity'), replace_dots=True)
obj_unitvalue = CleanDecimal(TableCell('unitvalue'), replace_dots=True)
obj_valuation = CleanDecimal(TableCell('valuation'), replace_dots=True)
obj_vdate = Date(CleanText(TableCell('vdate')), dayfirst=True, default=NotAvailable)
def obj_code(self):
if Field('label')(self) == "LIQUIDITES":
return 'XX-liquidity'
code = CleanText(TableCell('code'))(self)
return code if is_isin_valid(code) else NotAvailable
def obj_code_type(self):
return Investment.CODE_TYPE_ISIN if is_isin_valid(Field('code')(self)) else NotAvailable
class Transaction(FrenchTransaction):
PATTERNS = [(re.compile(r'^RET DAB (?P<dd>\d{2})/?(?P<mm>\d{2})(/?(?P<yy>\d{2}))? (?P<text>.*)'),
FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'CARTE (?P<dd>\d{2})/(?P<mm>\d{2}) (?P<text>.*)'),
FrenchTransaction.TYPE_CARD),
(re.compile(r'^(?P<category>VIR(EMEN)?T? (SEPA)?(RECU|FAVEUR)?)( /FRM)?(?P<text>.*)'),
FrenchTransaction.TYPE_TRANSFER),
(re.compile(r'^PRLV (?P<text>.*)( \d+)?$'), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^(CHQ|CHEQUE) .*$'), FrenchTransaction.TYPE_CHECK),
(re.compile(r'^(AGIOS /|FRAIS) (?P<text>.*)'), FrenchTransaction.TYPE_BANK),
(re.compile(r'^(CONVENTION \d+ |F )?COTIS(ATION)? (?P<text>.*)'),
FrenchTransaction.TYPE_BANK),
(re.compile(r'^REMISE (?P<text>.*)'), FrenchTransaction.TYPE_DEPOSIT),
(re.compile(r'^(?P<text>.*)( \d+)? QUITTANCE .*'),
FrenchTransaction.TYPE_ORDER),
(re.compile(r'^.* LE (?P<dd>\d{2})/(?P<mm>\d{2})/(?P<yy>\d{2})$'),
FrenchTransaction.TYPE_UNKNOWN),
(re.compile(r'^.* PAIEMENT (?P<dd>\d{2})/(?P<mm>\d{2}) (?P<text>.*)'),
FrenchTransaction.TYPE_UNKNOWN),
]
class CmsoTransactionElement(ItemElement):
klass = Transaction
def condition(self):
return len(self.el) >= 5 and not self.el.get('id', '').startswith('libelleLong')
class HistoryPage(CMSOPage):
def get_date_range_list(self):
return [d for d in self.doc.xpath('//select[@name="date"]/option/@value') if d]
@pagination
@method
class iter_history(ListElement):
item_xpath = '//div[contains(@class, "master-table")]//ul/li'
def next_page(self):
pager = self.page.doc.xpath('//div[@class="pager"]')
if pager: # more than one page if only enough transactions
assert len(pager) == 1
next_links = pager[0].xpath('./span/following-sibling::a[@class="page"]')
if next_links:
url_next_page = Link('.')(next_links[0])
url_next_page = urljoin(self.page.url, url_next_page)
return self.page.browser.build_request(url_next_page)
class item(CmsoTransactionElement):
def date(selector):
return DateGuesser(Regexp(CleanText(selector), r'\w+ (\d{2}/\d{2})'), Env('date_guesser')) | Transaction.Date(selector)
# CAUTION: this website write a 'Date valeur' inside a div with a class == 'c-ope'
# and a 'Date opération' inside a div with a class == 'c-val'
# so actually i assume 'c-val' class is the real operation date and 'c-ope' is value date
obj_date = date('./div[contains(@class, "c-val")]')
obj_vdate = date('./div[contains(@class, "c-ope")]')
obj_raw = Transaction.Raw(Regexp(CleanText('./div[contains(@class, "c-libelle-long")]'), r'Libellé étendu (.+)'))
obj_amount = Transaction.Amount('./div[contains(@class, "c-credit")]', './div[contains(@class, "c-debit")]')
class UpdateTokenMixin(object):
def on_load(self):
if 'Authentication' in self.response.headers:
self.browser.token = self.response.headers['Authentication'].split(' ')[-1]
class SSODomiPage(JsonPage, UpdateTokenMixin):
def get_sso_url(self):
return self.doc['urlSSO']
class AuthCheckUser(HTMLPage):
pass<|fim▁end|> | r'\((\d+)\)', default='')(self)
acc_id = Regexp(CleanText('./td[1]'), r'(\d+)\s*(\d+)', r'\1\2')(self)
if area_id: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>define(function(require, exports, module) {
exports.init = function(){<|fim▁hole|> }
function Manager(){
$("#manager").click(function(){
location.href = $(this).attr("data-href");
});
}
});<|fim▁end|> | Manager() |
<|file_name|>profesional.ts<|end_file_name|><|fim▁begin|>import * as mongoose from 'mongoose';
import { AndesDocWithAudit, AuditPlugin } from '@andes/mongoose-plugin-audit';
import * as direccionSchema from './direccion';
import * as contactoSchema from './contacto';
import { ObjSIISASchema, EspecialidadSIISASchema } from './siisa';
import { IProfesional } from '../interfaces/profesional.interface';
import { ITokenSearch, TokenSearch } from '@andes/mongoose-token-search';
const matriculacionSchema = new mongoose.Schema({
matriculaNumero: { type: Number, required: false },
libro: { type: String, required: false },
folio: { type: String, required: false },
inicio: Date,
baja: {
motivo: { type: String, required: false },
fecha: { type: String, required: false }
},
notificacionVencimiento: { type: Boolean, required: false },
fin: Date,
revalidacionNumero: Number
});
export const ProfesionalBaseSchema = new mongoose.Schema({
documento: { type: String, required: true },
sexo: { type: String, required: false },
nombre: { type: String, required: true },
apellido: { type: String, required: true },
});
export const ProfesionalSchema = ProfesionalBaseSchema.clone();
ProfesionalSchema.add({
activo: { type: Boolean, required: false },
habilitado: { type: Boolean, default: true },
nombre: { type: String, required: false },
apellido: { type: String, required: false },
tipoDocumento: { type: String, required: false },
documento: { type: String, required: false },
documentoVencimiento: { type: Date, required: false },
cuit: { type: String, required: false },
fechaNacimiento: { type: Date, required: false },
lugarNacimiento: { type: String, required: false },
fechaFallecimiento: { type: Date, required: false },
nacionalidad: { type: ObjSIISASchema, required: false },
sexo: { type: String, required: false },
contactos: [contactoSchema],<|fim▁hole|> validadoRenaper: { type: Boolean, default: false },
foto: { type: String, required: false },
fotoArchivo: { type: String, required: false },
firmas: [{
imgArchivo: { type: String, required: false },
fecha: { type: String, required: false },
}],
incluidoSuperintendencia: { type: Boolean, default: false },
formacionGrado: [{
profesion: { type: ObjSIISASchema, required: false },
entidadFormadora: { type: ObjSIISASchema, required: false },
titulo: { type: String, required: false },
tituloFileId: { type: String, required: false },
fechaTitulo: { type: Date, required: false },
fechaEgreso: { type: Date, required: false },
renovacion: { type: Boolean, default: false },
papelesVerificados: { type: Boolean, default: false },
matriculacion: [matriculacionSchema],
matriculado: { type: Boolean, default: false },
exportadoSisa: Boolean,
fechaDeInscripcion: Date
}],
formacionPosgrado: [{
profesion: { type: ObjSIISASchema, required: false },
institucionFormadora: { type: ObjSIISASchema, required: false },
especialidad: { type: EspecialidadSIISASchema, required: false },
fechaIngreso: { type: Date, required: false },
fechaEgreso: { type: Date, required: false },
tituloFileId: { type: String, required: false },
observacion: String,
certificacion: {
fecha: { type: Date, required: false },
modalidad: { type: ObjSIISASchema, required: false },
establecimiento: { type: ObjSIISASchema, required: false },
},
matriculacion: [{
matriculaNumero: { type: Number, required: false },
libro: { type: String, required: false },
folio: { type: String, required: false },
inicio: Date,
baja: {
motivo: { type: String, required: false },
fecha: { type: String, required: false }
},
notificacionVencimiento: { type: Boolean, required: false },
fin: Date,
revalidacionNumero: Number
}],
fechasDeAltas: [{ fecha: { type: Date, required: false } }],
matriculado: { type: Boolean, default: false },
revalida: { type: Boolean, default: false },
papelesVerificados: { type: Boolean, default: false },
fechaDeVencimiento: { type: Date, required: false },
exportadoSisa: Boolean,
tieneVencimiento: Boolean,
notas: [{ type: String, required: false }]
}],
sanciones: [{
numero: { type: Number, required: false },
sancion: {
id: Number,
nombre: String,
},
motivo: { type: String, required: false },
normaLegal: { type: String, required: false },
fecha: { type: Date, required: false },
vencimiento: { type: Date, required: false }
}],
notas: [{ type: String, required: false }],
rematriculado: { type: Number, default: false },
agenteMatriculador: { type: String, required: false },
supervisor: {
id: String,
nombreCompleto: String,
},
OtrosDatos: [{
matriculaProvincial: { type: Number, required: false },
folio: { type: String, required: false },
libro: { type: String, required: false },
anio: { type: Number, required: false }
}],
idRenovacion: { type: String, required: false },
documentoViejo: { type: Number, required: false },
turno: Date,
profesionalMatriculado: { type: Boolean, default: true },
/* externa significa que no son matriculados con la app de matriculaciones como pueden ser los psicólogos, kinesiólogos, etc
*/
profesionExterna: { type: ObjSIISASchema, required: false },
matriculaExterna: { type: String, required: false },
observaciones: { type: String, required: false },
documentos: [{
fecha: { type: Date, required: false },
tipo: { type: String, required: false },
archivo: {
id: String,
extension: String,
},
}],
});
// Virtuals
ProfesionalSchema.virtual('nombreCompleto').get(function () {
return this.apellido + ', ' + this.nombre;
});
ProfesionalSchema.virtual('fallecido').get(function () {
return this.fechaFallecimiento;
});
ProfesionalSchema.plugin(AuditPlugin);
ProfesionalSchema.plugin(
TokenSearch(['documento', 'nombre', 'apellido'])
);
ProfesionalSchema.index({ documento: 1 });
ProfesionalSchema.index({
apellido: 1,
nombre: 1
});
ProfesionalSchema.index({
'formacionGrado.profesion.codigo': 1
});
export type IProfesionalDoc = AndesDocWithAudit<IProfesional>;
export const Profesional = mongoose.model<IProfesionalDoc, ITokenSearch<IProfesionalDoc>>('profesional', ProfesionalSchema, 'profesional');
export const ProfesionalSubSchema = new mongoose.Schema({
id: mongoose.SchemaTypes.ObjectId,
nombre: String,
apellido: String,
documento: String
}, { _id: false });<|fim▁end|> | domicilios: [direccionSchema], |
<|file_name|>template.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010, Florian Ludwig <[email protected]>
# see LICENSE
"""Helpers for code generation based on genshi [0]
There are good code generator tools out there like cog [1].
But if you already use genshi in your project this module might
help you integrating code generation into your build and
deploy process using familar templating syntax.
If you're not using genshi you probably want to look at cog<|fim▁hole|>and similar tools.
[0] http://genshi.edgewall.org/
[1] http://nedbatchelder.com/code/cog/
"""
import os
import StringIO
import copy
import logging
from genshi.template import TemplateLoader, MarkupTemplate, NewTextTemplate
import genshi.template.loader
class ActionscriptTemplate(NewTextTemplate):
"""Template for langauges with /* ... */ commments
Should work for JavaScript, Action Script, c,..."""
def __init__(self, *args, **kwargs):
kwargs['delims'] = ('/*%', '%*/', '/*###', '###*/')
NewTextTemplate.__init__(self, *args, **kwargs)
class ShellStyleTemplate(NewTextTemplate):
"""Template for languages with # commentars"""
def __init__(self, *args, **kwargs):
kwargs['delims'] = ('#%', '%#', '##*', '*##')
NewTextTemplate.__init__(self, *args, **kwargs)
def get_template(fpath):
"""returns template class for given filename"""
if fpath.endswith('.css') or fpath.endswith('.as') or fpath.endswith('.js'):
return ActionscriptTemplate
elif fpath.endswith('.py') or fpath.endswith('.wsgi'):
return ShellStyleTemplate
elif fpath.endswith('.mxml'):
return MarkupTemplate
else:
logging.warn('WARNING: don\'t know the file type of "%s"' % fpath)
return NewTextTemplate
def numbered_file(fpath, mode='r'):
"""Add filenumbers to every line as comment
Returns filelike object
"""
_fileobj = open(fpath, mode)
tmpl_cls = get_template(fpath)
if tmpl_cls == ActionscriptTemplate:
comment_start = '/*'
comment_end = '*/'
last_symbole = ';'
elif tmpl_cls == MarkupTemplate:
comment_start = '<!--'
comment_end = '-->'
last_symbole = '>'
else:
print 'WARNING: no line numbers for "%s"' % fpath
return _fileobj
data = []
in_comment = False
in_hidden_comment = False
for number, line in enumerate(_fileobj.readlines()):
line = line = line.rstrip()
if not in_comment and comment_start in line:
in_comment = True
s = line.find(comment_start) + len(comment_start)
if line[s:].lstrip().startswith('!'):
in_hidden_comment = True
if in_comment and comment_end in line:
in_comment = False
in_hidden_comment = False
if not line.endswith(last_symbole):
data.append(line)
continue
if in_comment:
line += comment_end
if line.rstrip().endswith('\\'):
# if the lines ends with a \ we might destroy the template syntax
continue
count_line = line.replace('\t', ' ')
white = 83 - len(count_line) if len(count_line) < 78 else 3
comment = comment_start + ' Line: %i ' + comment_end
if in_comment:
comment += comment_start
if in_hidden_comment:
comment += '!'
data.append(line
+ white*' '
+ (comment % (number+1)))
return StringIO.StringIO('\n'.join(data))
# monkey patch template loader
genshi.template.loader.open = numbered_file
class Handler(object):
"""Common handler for templates"""
def __init__(self, path, default_args={}):
if not isinstance(path, list):
path = [path]
self.loader = TemplateLoader(path)
self.default_args = default_args
def gen(self, src, dst, local_args={}):
print src, '->',
tmpl = self.loader.load(src, cls=get_template(src))
args = copy.copy(self.default_args)
args.update(local_args)
stream = tmpl.generate(**args)
print dst
data = stream.render()
# make sure we only touch file if we would change it
dst_data = open(dst).read() if os.path.exists(dst) else ''
if dst_data != data:
open(dst, 'w').write(data)<|fim▁end|> | |
<|file_name|>StrLookup.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.text;
import java.util.Map;
public abstract class StrLookup<V> {
public static StrLookup<?> noneLookup() {
return null;
}
public static StrLookup<String> systemPropertiesLookup() {
return null;
}
public static <V> StrLookup<V> mapLookup(final Map<String, V> map) {
return null;
}
public abstract String lookup(String key);<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>DfsBProgramVerifierTest.java<|end_file_name|><|fim▁begin|>/*
* The MIT License
*
* Copyright 2017 michael.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package il.ac.bgu.cs.bp.bpjs.analysis;
import il.ac.bgu.cs.bp.bpjs.TestUtils;
import static il.ac.bgu.cs.bp.bpjs.TestUtils.eventNamesString;
import il.ac.bgu.cs.bp.bpjs.model.BProgram;
import il.ac.bgu.cs.bp.bpjs.execution.BProgramRunner;
import il.ac.bgu.cs.bp.bpjs.model.ResourceBProgram;
import il.ac.bgu.cs.bp.bpjs.execution.listeners.InMemoryEventLoggingListener;
import il.ac.bgu.cs.bp.bpjs.execution.listeners.PrintBProgramRunnerListener;
import org.junit.Test;
import static il.ac.bgu.cs.bp.bpjs.TestUtils.traceEventNamesString;
import static org.junit.Assert.*;
import il.ac.bgu.cs.bp.bpjs.model.StringBProgram;
import il.ac.bgu.cs.bp.bpjs.analysis.listeners.PrintDfsVerifierListener;
import il.ac.bgu.cs.bp.bpjs.analysis.violations.DeadlockViolation;
import il.ac.bgu.cs.bp.bpjs.analysis.violations.DetectedSafetyViolation;
import il.ac.bgu.cs.bp.bpjs.analysis.violations.JsErrorViolation;
import il.ac.bgu.cs.bp.bpjs.analysis.violations.Violation;
import il.ac.bgu.cs.bp.bpjs.model.BEvent;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static java.util.stream.Collectors.joining;
/**
* @author michael
*/
public class DfsBProgramVerifierTest {
@Test
public void sanity() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/AAABTrace.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setDebugMode(true);
sut.setMaxTraceLength(3);
sut.setIterationCountGap(1);
sut.verify(program);
assertEquals( ExecutionTraceInspections.DEFAULT_SET, sut.getInspections() );
}
@Test
public void simpleAAABTrace_forgetfulStore() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/AAABTrace.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setProgressListener(new PrintDfsVerifierListener());
program.appendSource(Requirements.eventNotSelected("B"));
sut.setVisitedStateStore(new ForgetfulVisitedStateStore());
VerificationResult res = sut.verify(program);
assertTrue(res.isViolationFound());
assertEquals("AAAB", traceEventNamesString(res, ""));
}
@Test
public void simpleAAABTrace() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/AAABTrace.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setDebugMode(true);
sut.setProgressListener(new PrintDfsVerifierListener());
program.appendSource(Requirements.eventNotSelected("B"));
sut.setVisitedStateStore(new BThreadSnapshotVisitedStateStore());
VerificationResult res = sut.verify(program);
assertTrue(res.isViolationFound());
assertEquals("AAAB", traceEventNamesString(res, ""));
}
@Test
public void simpleAAABTrace_hashedNodeStore() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/AAABTrace.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setDebugMode(true);
sut.setProgressListener(new PrintDfsVerifierListener());
program.appendSource(Requirements.eventNotSelected("B"));
VisitedStateStore stateStore = new BProgramSnapshotVisitedStateStore();
sut.setVisitedStateStore(stateStore);
VerificationResult res = sut.verify(program);
assertTrue(res.isViolationFound());
assertEquals("AAAB", traceEventNamesString(res, ""));
//Add trivial getter check
VisitedStateStore retStore = sut.getVisitedStateStore();
assertSame(retStore, stateStore);
}
@Test
public void testAAABRun() {
BProgram program = new ResourceBProgram("DFSVerifierTests/AAABTrace.js");
BProgramRunner rnr = new BProgramRunner(program);
rnr.addListener(new PrintBProgramRunnerListener());
InMemoryEventLoggingListener eventLogger = rnr.addListener(new InMemoryEventLoggingListener());
rnr.run();
eventLogger.getEvents().forEach(System.out::println);
assertTrue(eventNamesString(eventLogger.getEvents(), "").matches("^(AAAB)+$"));
}
@Test
public void deadlockTrace() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/deadlocking.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setVisitedStateStore(new ForgetfulVisitedStateStore());
VerificationResult res = sut.verify(program);
assertTrue(res.isViolationFound());
assertTrue(res.getViolation().get() instanceof DeadlockViolation);
assertEquals("A", traceEventNamesString(res, ""));
}
@Test
public void testDeadlockSetting() throws Exception {
BProgram program = new ResourceBProgram("DFSVerifierTests/deadlocking.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.addInspection(ExecutionTraceInspections.FAILED_ASSERTIONS);
VerificationResult res = sut.verify(program);
assertFalse(res.isViolationFound());
}
@Test
public void deadlockRun() {
BProgram program = new ResourceBProgram("DFSVerifierTests/deadlocking.js");
BProgramRunner rnr = new BProgramRunner(program);
rnr.addListener(new PrintBProgramRunnerListener());
InMemoryEventLoggingListener eventLogger = rnr.addListener(new InMemoryEventLoggingListener());
rnr.run();
eventLogger.getEvents().forEach(System.out::println);
assertTrue(eventNamesString(eventLogger.getEvents(), "").matches("^A$"));
}
@Test
public void testTwoSimpleBThreads() throws Exception {
BProgram bprog = new StringBProgram(
"bp.registerBThread('bt1', function(){bp.sync({ request:[ bp.Event(\"STAM1\") ] });});" +
"bp.registerBThread('bt2', function(){bp.sync({ request:[ bp.Event(\"STAM2\") ] });});");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.addInspection(ExecutionTraceInspections.FAILED_ASSERTIONS);
VerificationResult res = sut.verify(bprog);
assertFalse(res.isViolationFound());
assertEquals(4, res.getScannedStatesCount());
assertEquals(4, res.getScannedEdgesCount());
}
@Test(timeout = 2000)
public void testTwoLoopingBThreads() throws Exception {
BProgram bprog = new StringBProgram("bp.registerBThread('bt1', function(){" + " while(true){\n"
+ " bp.sync({ request:[ bp.Event(\"STAM1\") ] });\n" + "}});\n"
+ "bp.registerBThread('bt2', function(){" + " while(true){\n"
+ " bp.sync({ request:[ bp.Event(\"STAM2\") ] });\n" + "}});\n" + "");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.setDebugMode(true);
VerificationResult res = sut.verify(bprog);
assertFalse(res.isViolationFound());
assertEquals(1, res.getScannedStatesCount());
}
@Test(timeout = 2000)
public void testVariablesInBT() throws Exception {
BProgram bprog = new StringBProgram("bp.registerBThread('bt1', function(){" + //
" for(var i=0; i<10; i++){\n" + //
" bp.sync({ waitFor:[ bp.Event(\"X\") ] });\n" + //
" }\n" + //
" bp.sync({ block:[ bp.Event(\"X\") ] });\n" + //
"});\n" + //
"bp.registerBThread('bt2', function(){" + //
" while(true){\n" + //
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" + //
"}});\n" + //
"" //
);
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.setDebugMode(true);
VerificationResult res = sut.verify(bprog);
assertTrue(res.isViolationFound());
assertTrue(res.getViolation().get() instanceof DeadlockViolation);
assertEquals(11, res.getScannedStatesCount());
}
@Test(timeout = 2000)
public void testVariablesEquailtyInBT() throws Exception {
BProgram bprog = new StringBProgram( //
"bp.registerBThread('bt1', function(){" + //
" bp.sync({ waitFor:[ bp.Event(\"X\") ] });\n" + // 1
" bp.sync({ waitFor:[ bp.Event(\"X\") ] });\n" + // 2
" bp.sync({ waitFor:[ bp.Event(\"X\") ] });\n" + // 3
" bp.sync({ waitFor:[ bp.Event(\"X\") ] });\n" + // 4
"});\n" +
"bp.registerBThread('bt2', function(){" + //
" while(true){\n" + //
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" + //
"}});\n");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.setDebugMode(true);
VerificationResult res = sut.verify(bprog);
assertFalse(res.isViolationFound());
// 10 syncs while bt1 is alive, 1 sync per bt2's infinite loop alone.
assertEquals(5, res.getScannedStatesCount());
// in this case only one option per state
assertEquals(5, res.getScannedEdgesCount());
}
@Test(timeout = 2000)
public void testMaxTraceLength() throws Exception {
String source = "bp.registerBThread('bt1', function(){" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.sync({ request:[ bp.Event(\"X\") ] });\n" +
" bp.ASSERT(false, \"\");" +
"});";
BProgram bprog = new StringBProgram(source);
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.setDebugMode(true);
VerificationResult res = sut.verify(bprog);
assertTrue(res.isViolationFound());
sut.setMaxTraceLength(6);
res = sut.verify(new StringBProgram(source));
assertFalse(res.isViolationFound());
}
@Test(timeout = 6000)
public void testJavaVariablesInBT() throws Exception {
BProgram bprog = new StringBProgram( //
"bp.registerBThread('bt1', function(){" + //
" var sampleArray=[1,2,3,4,5];\n" +
" while(true) \n" + //
" for(var i=0; i<10; i++){\n" + //
" bp.sync({ request:[ bp.Event(\"X\"+i) ] });\n" + //
" if (i == 5) {bp.sync({ request:[ bp.Event(\"X\"+i) ] });}\n" + //
" }\n" + //
"});\n" +
"var xs = bp.EventSet( \"X\", function(e){\r\n" +
" return e.getName().startsWith(\"X\");\r\n" +
"} );\r\n" +
"" +
"bp.registerBThread('bt2', function(){" + //
" var lastE = {name:\"what\"};" + //
" while(true) {\n" + //
" var e = bp.sync({ waitFor: xs});\n" + //
" lastE = e;" + //
" if( e.name == lastE.name) { bp.ASSERT(false,\"Poof\");} " + //
"}});\n"
);
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setIterationCountGap(1);
sut.setProgressListener(new PrintDfsVerifierListener());
sut.setDebugMode(true);
VerificationResult res = sut.verify(bprog);
assertTrue(res.isViolationFound());
assertTrue(res.getViolation().get() instanceof DetectedSafetyViolation);
assertEquals(2, res.getScannedStatesCount());
assertEquals(1, res.getScannedEdgesCount());
}
/**
* Running this transition system. State 3 should be arrived at twice.
* +-»B1»--+
* | |
* -»1--»A»--2 3--»C»----+
* | | | |
* | +-»B2»--+ |
* +------------«------------+
*
* event trace "AB1-" is the result of execution
*
* -»1-»A»-2-»B1»-3
*
* Whose stack is:
*
* +---+----+---+
* |1,A|2,B1|3,-|
* +---+----+---+
*
* With C selected, we get to
*
* +---+----+---+
* |1,A|2,B1|3,C| + cycleTo 0
* +---+----+---+
*
* @throws Exception
*/
@Test
public void testDoublePathDiscovery() throws Exception {
BProgram bprog = new StringBProgram( //
"bp.registerBThread(\"round\", function(){\n" +
" while( true ) {\n" +
" bp.sync({request:bp.Event(\"A\")});\n" +
" bp.sync({waitFor:[bp.Event(\"B1\"), bp.Event(\"B2\")]});\n" +<|fim▁hole|> " }\n" +
"});\n" +
"\n" +
"bp.registerBThread(\"round-s1\", function(){\n" +
" while( true ) {\n" +
" bp.sync({waitFor:bp.Event(\"A\")});\n" +
" bp.sync({request:bp.Event(\"B1\"), waitFor:bp.Event(\"B2\")});\n" +
" }\n" +
"});\n" +
"\n" +
"bp.registerBThread(\"round-s2\", function(){\n" +
" while( true ) {\n" +
" bp.sync({waitFor:bp.Event(\"A\")});\n" +
" bp.sync({request:bp.Event(\"B2\"), waitFor:bp.Event(\"B1\")});\n" +
" }\n" +
"});");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
final Set<String> foundTraces = new HashSet<>();
sut.addInspection( ExecutionTraceInspection.named("DFS trace captures", et->{
String eventTrace = et.getNodes().stream()
.map( n->n.getEvent() )
.map( o->o.map(BEvent::getName).orElse("-") )
.collect( joining("") );
System.out.println("eventTrace = " + eventTrace);
foundTraces.add(eventTrace);
return Optional.empty();
}));
sut.setProgressListener(new PrintDfsVerifierListener());
VerificationResult res = sut.verify(bprog);
assertEquals(3, res.getScannedStatesCount());
assertEquals(4, res.getScannedEdgesCount());
Set<String> expected1 = new TreeSet<>(Arrays.asList("-","A-","AB1-","AB1C", "AB2-"));
Set<String> expected2 = new TreeSet<>(Arrays.asList("-","A-","AB2-","AB2C", "AB1-"));
String eventTraces = foundTraces.stream().sorted().collect( joining(", ") );
assertTrue("Traces don't match expected: " + eventTraces,
foundTraces.equals(expected1) || foundTraces.equals(expected2) );
System.out.println("Event traces: " + eventTraces);
}
/**
* Program graph is same as above, but state "3" is duplicated since a b-thread
* holds the last event that happened in a variable.
*
* +------------«------------+
* | |
* v +-»B1»--3'---»C»--+
* | |
* -»1--»A»--2
* | |
* ^ +-»B2»--3''--»C»--+
* | |
* +------------«------------+
*
*
*
* @throws Exception
*/
@Test
public void testDoublePathWithVariablesDiscovery() throws Exception {
BProgram bprog = new StringBProgram("doubleWithVar", //
"bp.registerBThread(\"round\", function(){\n" +
" var le=null;\n" +
" while( true ) {\n" +
" bp.sync({request:bp.Event(\"A\")});\n" +
" le = bp.sync({waitFor:[bp.Event(\"B1\"), bp.Event(\"B2\")]});\n" +
" bp.sync({request:bp.Event(\"C\")});\n" +
" le=null;\n " +
" }\n" +
"});\n" +
"\n" +
"bp.registerBThread(\"round-s1\", function(){\n" +
" while( true ) {\n" +
" bp.sync({waitFor:bp.Event(\"A\")});\n" +
" bp.sync({request:bp.Event(\"B1\"), waitFor:bp.Event(\"B2\")});\n" +
" }\n" +
"});\n" +
"\n" +
"bp.registerBThread(\"round-s2\", function(){\n" +
" while( true ) {\n" +
" bp.sync({waitFor:bp.Event(\"A\")});\n" +
" bp.sync({request:bp.Event(\"B2\"), waitFor:bp.Event(\"B1\")});\n" +
" }\n" +
"});");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
final Set<String> foundTraces = new HashSet<>();
sut.addInspection( ExecutionTraceInspection.named("DFS trace captures", et->{
String eventTrace = et.getNodes().stream()
.map( n->n.getEvent() )
.map( o->o.map(BEvent::getName).orElse("-") )
.collect( joining("") );
System.out.println("eventTrace = " + eventTrace);
foundTraces.add(eventTrace);
return Optional.empty();
}));
sut.setVisitedStateStore( new BProgramSnapshotVisitedStateStore() );
sut.setProgressListener(new PrintDfsVerifierListener());
VerificationResult res = sut.verify(bprog);
assertEquals(4, res.getScannedStatesCount());
assertEquals(5, res.getScannedEdgesCount());
Set<String> expectedTraces = new TreeSet<>(Arrays.asList("-","A-","AB1-","AB1C","AB2-","AB2C"));
assertEquals("Traces don't match expected: " + foundTraces, expectedTraces, foundTraces );
}
@Test
public void testJavaScriptError() throws Exception {
BProgram bprog = new StringBProgram(
"bp.registerBThread( function(){\n"
+ " bp.sync({request:bp.Event(\"A\")});\n"
+ " bp.sync({request:bp.Event(\"A\")});\n"
+ " bp.sync({request:bp.Event(\"A\")});\n"
+ " var myNullVar;\n"
+ " myNullVar.isNullAndSoThisInvocationShouldCrash();\n"
+ " bp.sync({request:bp.Event(\"A\")});\n"
+ "});"
);
final AtomicBoolean errorCalled = new AtomicBoolean();
final AtomicBoolean errorMadeSense = new AtomicBoolean();
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setProgressListener(new DfsBProgramVerifier.ProgressListener() {
@Override public void started(DfsBProgramVerifier vfr) {}
@Override public void iterationCount(long count, long statesHit, DfsBProgramVerifier vfr) {}
@Override public void maxTraceLengthHit(ExecutionTrace aTrace, DfsBProgramVerifier vfr) {}
@Override public void done(DfsBProgramVerifier vfr) {}
@Override
public boolean violationFound(Violation aViolation, DfsBProgramVerifier vfr) {
errorCalled.set(aViolation instanceof JsErrorViolation );
JsErrorViolation jsev = (JsErrorViolation) aViolation;
errorMadeSense.set(jsev.decsribe().contains("isNullAndSoThisInvocationShouldCrash"));
System.out.println(jsev.getThrownException().getMessage());
return true;
}
});
sut.verify( bprog );
assertTrue( errorCalled.get() );
assertTrue( errorMadeSense.get() );
}
/**
* Test that even with forgetful storage, a circular trace does not get
* use to an infinite run.
* @throws java.lang.Exception
*/
@Test//(timeout=6000)
public void testCircularTraceDetection_forgetfulStorage() throws Exception {
String bprog = "bp.registerBThread(function(){\n"
+ "while (true) {\n"
+ " bp.sync({request:bp.Event(\"X\")});"
+ " bp.sync({request:bp.Event(\"Y\")});"
+ " bp.sync({request:bp.Event(\"Z\")});"
+ " bp.sync({request:bp.Event(\"W\")});"
+ " bp.sync({request:bp.Event(\"A\")});"
+ " bp.sync({request:bp.Event(\"B\")});"
+ "}"
+ "});";
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setVisitedStateStore( new ForgetfulVisitedStateStore() );
final AtomicInteger cycleToIndex = new AtomicInteger(Integer.MAX_VALUE);
final AtomicReference<String> lastEventName = new AtomicReference<>();
sut.addInspection(ExecutionTraceInspection.named("Cycle", t->{
if ( t.isCyclic() ) {
cycleToIndex.set( t.getCycleToIndex() );
lastEventName.set( t.getLastEvent().get().getName() );
System.out.println(TestUtils.traceEventNamesString(t, ", "));
}
return Optional.empty();
}));
VerificationResult res = sut.verify(new StringBProgram(bprog));
System.out.println("states: " + res.getScannedStatesCount());
assertEquals( 0, cycleToIndex.get() );
assertEquals( "B", lastEventName.get() );
}
@Test
public void testThreadStorageEquality() throws Exception {
BProgram program = new ResourceBProgram("hotColdThreadMonitor.js");
DfsBProgramVerifier sut = new DfsBProgramVerifier();
sut.setProgressListener(new PrintDfsVerifierListener());
VisitedStateStore stateStore = new BThreadSnapshotVisitedStateStore();
sut.setVisitedStateStore(stateStore);
VerificationResult res = sut.verify(program);
assertEquals( 9, res.getScannedStatesCount() );
assertEquals( 9, stateStore.getVisitedStateCount() );
}
}<|fim▁end|> | " bp.sync({request:bp.Event(\"C\")});\n" + |
<|file_name|>display.py<|end_file_name|><|fim▁begin|>from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import camera
import time
class Display(object):
# Inheritrance convinience functions
def init(self): pass
def close(self): pass
def mouse(self, mouseButton, buttonState, x, y): pass
def mouseMotion(self, x, y, dx, dy): pass
def passiveMouseMotion(self, x, y, dx, dy): pass
def keyboard(self, key, x, y): pass
def specialKeys(self, key, x, y): pass
def timerFired(self, value): pass
def draw(self): pass
# Initialization function<|fim▁hole|> self.frameName = frameName
self.timerDelay = 20
self.clearColor = (135.0/255, 206.0/255, 250.0/255, 1)
self.defaultColor = (1, 1, 1)
# Camera positioning
self.pos = (0, 0, 0)
self.ypr = (0, 0, 0)
self.init()
# Set up graphics
self.initGL()
self.initGLUT()
self.camera = camera.Camera(self.width, self.height)
# For mouse motion
self._mouseX = None
self._mouseY = None
# One-time GL commands
def initGL(self):
glClearColor(*self.clearColor)
# Initialize the window manager (GLUT)
def initGLUT(self):
glutInit()
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(*self.frameSize)
glutCreateWindow(self.frameName)
# Register all the convenience functions
glutDisplayFunc(self.drawWrapper)
glutIdleFunc(self.drawWrapper)
glutTimerFunc(self.timerDelay, self.timerFired, 0)
glutMouseFunc(self.mouse)
glutMotionFunc(self.mouseMotionWrapper)
glutPassiveMotionFunc(self.passiveMouseMotionWrapper)
glutKeyboardFunc(self.keyboard)
glutSpecialFunc(self.specialKeys)
glutReshapeFunc(self.reshape)
# Try to register a close function (fall back to a different one)
try:
glutCloseFunc(self.close)
except:
glutWMCloseFunc(self.close)
# GL commands executed before drawing
def preGL(self):
glShadeModel(GL_FLAT)
glEnable(GL_DEPTH_TEST)
# Set up colors and clear buffers
glClearColor(*self.clearColor)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3f(*self.defaultColor)
glLoadIdentity()
# Commands after GL is done
def postGL(self):
glutSwapBuffers()
time.sleep(1/60.0)
# Wrapper to re-register timer event
def timerFiredWrapper(self, value):
self.timerFired(value)
glutTimerFunc(self.timerDelay, self.timerFired, value + 1)
# Wrapper to handle as much GL as possible
def drawWrapper(self):
self.preGL()
# Let the camera draw the view
self.camera.draw(self.draw, self.pos, self.ypr)
self.postGL()
# Wrapper to pass change in position as well as position
# Only called when mouse motion and button pressed
def mouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.mouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Wrapper to pass change in position as well as position
# Called when mouse motion and not button pressed
def passiveMouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.passiveMouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Update when resizing the window
def reshape(self, width, height):
if(self.width != width or self.height != height):
glutReshapeWindow(width, height)
self.camera.width = width
self.camera.height = height
# Run the GL
def run(self):
glutMainLoop()<|fim▁end|> | def __init__(self, width = 1280, height = 720, frameName = "OpenGL"):
self.frameSize = (self.width, self.height) = (width, height) |
<|file_name|>prservice.py<|end_file_name|><|fim▁begin|>import unittest
import os
import logging
import re
import shutil
import datetime
import oeqa.utils.ftools as ftools
from oeqa.selftest.base import oeSelfTest
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
from oeqa.utils.decorators import testcase
from oeqa.utils.network import get_free_port
class BitbakePrTests(oeSelfTest):
@classmethod
def setUpClass(cls):
cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
def get_pr_version(self, package_name):
package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
package_data = ftools.read_file(package_data_file)
find_pr = re.search("PKGR: r[0-9]+\.([0-9]+)", package_data)
self.assertTrue(find_pr, "No PKG revision found in %s" % package_data_file)
return int(find_pr.group(1))
def get_task_stamp(self, package_name, recipe_task):
stampdata = get_bb_var('STAMP', target=package_name).split('/')
prefix = stampdata[-1]
package_stamps_path = "/".join(stampdata[:-1])
stamps = []
for stamp in os.listdir(package_stamps_path):
find_stamp = re.match("%s\.%s\.([a-z0-9]{32})" % (re.escape(prefix), recipe_task), stamp)
if find_stamp:
stamps.append(find_stamp.group(1))
self.assertFalse(len(stamps) == 0, msg="Cound not find stamp for task %s for recipe %s" % (recipe_task, package_name))
self.assertFalse(len(stamps) > 1, msg="Found multiple %s stamps for the %s recipe in the %s directory." % (recipe_task, package_name, package_stamps_path))
return str(stamps[0])
def increment_package_pr(self, package_name):
inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\"\n}" % datetime.datetime.now()
self.write_recipeinc(package_name, inc_data)
res = bitbake(package_name, ignore_status=True)
self.delete_recipeinc(package_name)
self.assertEqual(res.status, 0, msg=res.output)
self.assertTrue("NOTE: Started PRServer with DBfile" in res.output, msg=res.output)
def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
self.write_config(config_package_data)
config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
self.append_config(config_server_data)
def run_test_pr_service(self, package_name, package_type='rpm', track_task='do_package', pr_socket='localhost:0'):
self.config_pr_tests(package_name, package_type, pr_socket)
self.increment_package_pr(package_name)
pr_1 = self.get_pr_version(package_name)
stamp_1 = self.get_task_stamp(package_name, track_task)
self.increment_package_pr(package_name)
pr_2 = self.get_pr_version(package_name)
stamp_2 = self.get_task_stamp(package_name, track_task)
self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
def run_test_pr_export_import(self, package_name, replace_current_db=True):
self.config_pr_tests(package_name)
self.increment_package_pr(package_name)
pr_1 = self.get_pr_version(package_name)
exported_db_path = os.path.join(self.builddir, 'export.inc')
export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
if replace_current_db:
current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
os.remove(current_db_path)
import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
os.remove(exported_db_path)
self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
self.increment_package_pr(package_name)
pr_2 = self.get_pr_version(package_name)
self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
@testcase(930)
def test_import_export_replace_db(self):
self.run_test_pr_export_import('m4')
@testcase(931)
def test_import_export_override_db(self):
self.run_test_pr_export_import('m4', replace_current_db=False)
@testcase(932)
def test_pr_service_rpm_arch_dep(self):
self.run_test_pr_service('m4', 'rpm', 'do_package')
@testcase(934)
def test_pr_service_deb_arch_dep(self):
self.run_test_pr_service('m4', 'deb', 'do_package')
@testcase(933)
def test_pr_service_ipk_arch_dep(self):
self.run_test_pr_service('m4', 'ipk', 'do_package')
@testcase(935)
def test_pr_service_rpm_arch_indep(self):
self.run_test_pr_service('xcursor-transparent-theme', 'rpm', 'do_package')
@testcase(937)
def test_pr_service_deb_arch_indep(self):
self.run_test_pr_service('xcursor-transparent-theme', 'deb', 'do_package')
<|fim▁hole|> self.run_test_pr_service('xcursor-transparent-theme', 'ipk', 'do_package')
@testcase(1419)
def test_stopping_prservice_message(self):
port = get_free_port()
runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --start' % port)
ret = runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --stop' % port)
self.assertEqual(ret.status, 0)<|fim▁end|> | @testcase(936)
def test_pr_service_ipk_arch_indep(self): |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import sys
import os
import warnings
import ruamel.yaml as yaml
from fnmatch import fnmatch
__author__ = "Pymatgen Development Team"
__email__ ="[email protected]"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="[email protected]"
__version__ = "2019.7.2"
SETTINGS_FILE = os.path.join(os.path.expanduser("~"), ".pmgrc.yaml")
def _load_pmg_settings():
try:
with open(SETTINGS_FILE, "rt") as f:
d = yaml.safe_load(f)
except IOError:
# If there are any errors, default to using environment variables
# if present.
d = {}<|fim▁hole|> for k, v in os.environ.items():
if k.startswith("PMG_"):
d[k] = v
elif k in ["VASP_PSP_DIR", "MAPI_KEY", "DEFAULT_FUNCTIONAL"]:
d["PMG_" + k] = v
return dict(d)
SETTINGS = _load_pmg_settings()
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
# import spglib
# from . import optimization, util
# del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from pymatgen.core import *
from .electronic_structure.core import Spin, Orbital
from .ext.matproj import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
def get_structure_from_mp(formula):
"""
Convenience method to get a crystal from the Materials Project database via
the API. Requires PMG_MAPI_KEY to be set.
Args:
formula (str): A formula
Returns:
(Structure) The lowest energy structure in Materials Project with that
formula.
"""
m = MPRester()
entries = m.get_entries(formula, inc_structure="final")
if len(entries) == 0:
raise ValueError("No structure with formula %s in Materials Project!" %
formula)
elif len(entries) > 1:
warnings.warn("%d structures with formula %s found in Materials "
"Project. The lowest energy structure will be returned." %
(len(entries), formula))
return min(entries, key=lambda e: e.energy_per_atom).structure
if sys.version_info < (3, 5):
warnings.warn("""
Pymatgen will drop Py2k support from v2019.1.1. Pls consult the documentation
at https://www.pymatgen.org for more details.""")
def loadfn(fname):
"""
Convenience method to perform quick loading of data from a filename. The
type of object returned depends the file type.
Args:
fname (string): A filename.
Returns:
Note that fname is matched using unix-style, i.e., fnmatch.
(Structure) if *POSCAR*/*CONTCAR*/*.cif
(Vasprun) *vasprun*
(obj) if *json* (passthrough to monty.serialization.loadfn)
"""
if (fnmatch(fname, "*POSCAR*") or fnmatch(fname, "*CONTCAR*") or
".cif" in fname.lower()) or fnmatch(fname, "*.vasp"):
return Structure.from_file(fname)
elif fnmatch(fname, "*vasprun*"):
from pymatgen.io.vasp import Vasprun
return Vasprun(fname)
elif fnmatch(fname, "*.json*"):
from monty.serialization import loadfn
return loadfn(fname)<|fim▁end|> | |
<|file_name|>nothing_test.go<|end_file_name|><|fim▁begin|>package nothing
import (
"testing"
)<|fim▁hole|>
func TestPop(t *testing.T) {
var manage TimerManager
var node HeapNode
node.Time = 1
var node1 HeapNode
node1.Time = 2
manage.Push(&node1)
manage.Push(&node)
min := manage.Pop()
if min == nil {
t.Failed()
}
n := min.(*HeapNode)
t.Log(n.Time)
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from distutils.core import setup
execfile('modlunky/version.py')
with open('requirements.txt') as requirements:
required = requirements.read().splitlines()
kwargs = {
"name": "modlunky",
"version": str(__version__),
"packages": ["modlunky"],
"scripts": ["bin/modlunky"],
"description": "Library and Command Line Tool for Spelunky.",
"author": "Gary M. Josack",
"maintainer": "Gary M. Josack",
"author_email": "[email protected]",
"maintainer_email": "[email protected]",
"license": "MIT",
"url": "https://github.com/gmjosack/modlunky",
"download_url": "https://github.com/gmjosack/modlunky/archive/master.tar.gz",
"classifiers": [
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
]
}
<|fim▁hole|>
setup(**kwargs)<|fim▁end|> | if required:
kwargs["install_requires"] = required |
<|file_name|>EditPropertyConditionDialog.js<|end_file_name|><|fim▁begin|>/**
* Created by Jacky.Gao on 2017-02-09.
*/
import {alert} from '../MsgBox.js';
export default class EditPropertyConditionDialog{
constructor(conditions){
this.conditions=conditions;
this.dialog=$(`<div class="modal fade" role="dialog" aria-hidden="true" style="z-index: 11001">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">
×
</button>
<h4 class="modal-title">
${window.i18n.dialog.editPropCondition.title}
</h4>
</div>
<div class="modal-body"></div>
<div class="modal-footer"></div>
</div>
</div>
</div>`);
const body=this.dialog.find('.modal-body'),footer=this.dialog.find(".modal-footer");
this.init(body,footer);
}
init(body,footer){
const _this=this;
this.joinGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.relation}</label></div>`);
this.joinSelect=$(`<select class="form-control" style="display: inline-block;width:430px;">
<option value="and">${window.i18n.dialog.editPropCondition.and}</option>
<option value="or">${window.i18n.dialog.editPropCondition.or}</option>
</select>`);
this.joinGroup.append(this.joinSelect);
body.append(this.joinGroup);
const leftGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.leftValue}</label></div>`);
this.leftTypeSelect=$(`<select class="form-control" style="display: inline-block;width: inherit">
<option value="current">${window.i18n.dialog.editPropCondition.currentValue}</option>
<option value="property">${window.i18n.dialog.editPropCondition.property}</option>
<option value="expression">${window.i18n.dialog.editPropCondition.expression}</option>
</select>`);
leftGroup.append(this.leftTypeSelect);
this.propertyGroup=$(`<span style="margin-left: 10px"><label>${window.i18n.dialog.editPropCondition.propName}</label></span>`);
this.propertySelect=$(`<select class="form-control" style="display: inline-block;width:320px;"></select>`);
this.propertyGroup.append(this.propertySelect);
leftGroup.append(this.propertyGroup);
body.append(leftGroup);
this.exprGroup=$(`<span style="margin-left: 10px"><label>${window.i18n.dialog.editPropCondition.expr}</label></span>`);
this.exprEditor=$(`<input type="text" style="display: inline-block;width:320px;" class="form-control">`);
this.exprGroup.append(this.exprEditor);
leftGroup.append(this.exprGroup);
this.exprEditor.change(function(){
const val=$(this).val();
const url=window._server+'/designer/conditionScriptValidation';
$.ajax({
url,
type:'POST',
data:{content:val},
success:function(errors){
if(errors.length>0){
alert(`${val} ${window.i18n.dialog.editPropCondition.syntaxError}`);
}
}
});
});
this.leftTypeSelect.change(function(){
const val=$(this).val();
if(val==='current'){
_this.exprGroup.hide();
_this.propertyGroup.hide();
}else if(val==='property'){
_this.exprGroup.hide();
_this.propertyGroup.show();
}else{
_this.propertyGroup.hide();
_this.exprGroup.show();
}
});
const operatorGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.operator}</label></div>`);
this.operatorSelect=$(`<select class="form-control" style="display: inline-block;width:490px;">
<option value=">">${window.i18n.dialog.editPropCondition.greater}</option>
<option value=">=">${window.i18n.dialog.editPropCondition.greaterEquals}</option>
<option value="<">${window.i18n.dialog.editPropCondition.less}</option>
<option value="<=">${window.i18n.dialog.editPropCondition.lessEquals}</option>
<option value="==">${window.i18n.dialog.editPropCondition.equals}</option>
<option value="!=">${window.i18n.dialog.editPropCondition.notEquals}</option>
<option value="in">${window.i18n.dialog.editPropCondition.in}</option>
<option value="like">${window.i18n.dialog.editPropCondition.like}</option>
</select>`);
operatorGroup.append(this.operatorSelect);
body.append(operatorGroup);
const valueGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.valueExpr}</label></div>`);
this.valueEditor=$(`<input type="text" class="form-control" style="display: inline-block;width:477px;">`);
valueGroup.append(this.valueEditor);
body.append(valueGroup);
this.valueEditor.change(function(){
const val=$(this).val();
const url=window._server+'/designer/conditionScriptValidation';
$.ajax({
url,
type:'POST',
data:{content:val},
success:function(errors){
if(errors.length>0){
alert(`${val} ${window.i18n.dialog.editPropCondition.syntaxError}`);
}
}
});
});
const button=$(`<button class="btn btn-default">${window.i18n.dialog.editPropCondition.ok}</button>`);
button.click(function(){
let property=_this.propertySelect.val(),op=_this.operatorSelect.val(),value=_this.valueEditor.val(),join=_this.joinSelect.val(),type=_this.leftTypeSelect.val(),expr=_this.exprEditor.val();
if (type === 'property') {
if (property === '') {<|fim▁hole|> if(expr===''){
alert(`${window.i18n.dialog.editPropCondition.leftValueExpr}`);
return;
}
property=expr;
}else{
property = null;
}
if(type==='current'){
type="property";
}
if (op === '') {
alert(`${window.i18n.dialog.editPropCondition.selectOperator}`);
return;
}
if (value === '') {
alert(`${window.i18n.dialog.editPropCondition.inputExpr}`);
return;
}
if (_this.condition) {
if (_this.condition.join) {
_this.callback.call(_this,type, property, op, value, join);
} else {
_this.callback.call(_this,type, property, op, value);
}
} else if (_this.conditions.length > 0) {
_this.callback.call(_this,type, property, op, value, join);
} else {
_this.callback.call(_this,type, property, op, value);
}
_this.dialog.modal('hide');
});
footer.append(button);
}
show(callback,fields,condition){
this.callback=callback;
this.condition=condition;
this.type='current';
if(condition){
this.type=condition.type;
if(condition.join){
this.joinGroup.show();
}else{
this.joinGroup.hide();
}
}else{
if(this.conditions.length>0){
this.joinGroup.show();
}else{
this.joinGroup.hide();
}
}
this.propertySelect.empty();
for(let field of fields){
this.propertySelect.append(`<option>${field.name}</option>`);
}
if(condition){
if(this.type==='expression'){
this.leftTypeSelect.val("expression");
this.exprEditor.val(condition.left);
this.propertyGroup.hide();
this.exprGroup.show();
}else{
if(condition.left && condition.left!==''){
this.propertySelect.val(condition.left);
this.leftTypeSelect.val("property");
this.propertyGroup.show();
}else{
this.leftTypeSelect.val("current");
this.propertyGroup.hide();
}
this.exprGroup.hide();
}
this.operatorSelect.val(condition.operation || condition.op);
this.valueEditor.val(condition.right);
this.joinSelect.val(condition.join);
}else{
this.leftTypeSelect.val("current");
this.propertyGroup.hide();
this.exprGroup.hide();
}
this.dialog.modal('show');
}
}<|fim▁end|> | alert(`${window.i18n.dialog.editPropCondition.selectProp}`);
return;
}
} else if(type==='expression') { |
<|file_name|>any.hpp<|end_file_name|><|fim▁begin|>/**
* @file
* Declares the any type.
*/
#pragma once
<|fim▁hole|>
namespace puppet { namespace runtime { namespace types {
// Forward declaration of recursion_guard
struct recursion_guard;
/**
* Represents the Puppet Any type.
*/
struct any
{
/**
* Gets the name of the type.
* @return Returns the name of the type (i.e. Any).
*/
static char const* name();
/**
* Creates a generalized version of the type.
* @return Returns the generalized type.
*/
values::type generalize() const;
/**
* Determines if the given value is an instance of this type.
* @param value The value to determine if it is an instance of this type.
* @param guard The recursion guard to use for aliases.
* @return Returns true if the given value is an instance of this type or false if not.
*/
bool is_instance(values::value const& value, recursion_guard& guard) const;
/**
* Determines if the given type is assignable to this type.
* @param other The other type to check for assignability.
* @param guard The recursion guard to use for aliases.
* @return Returns true if the given type is assignable to this type or false if the given type is not assignable to this type.
*/
bool is_assignable(values::type const& other, recursion_guard& guard) const;
/**
* Writes a representation of the type to the given stream.
* @param stream The stream to write to.
* @param expand True to specify that type aliases should be expanded or false if not.
*/
void write(std::ostream& stream, bool expand = true) const;
};
/**
* Stream insertion operator for any type.
* @param os The output stream to write the type to.
* @param type The type to write.
* @return Returns the given output stream.
*/
std::ostream& operator<<(std::ostream& os, any const& type);
/**
* Equality operator for any.
* @param left The left type to compare.
* @param right The right type to compare.
* @return Always returns true (Any type is always equal to Any).
*/
bool operator==(any const& left, any const& right);
/**
* Inequality operator for any.
* @param left The left type to compare.
* @param right The right type to compare.
* @return Always returns false (Any type is always equal to Any).
*/
bool operator!=(any const& left, any const& right);
/**
* Hashes the any type.
* @param type The any type to hash.
* @return Returns the hash value for the type.
*/
size_t hash_value(any const& type);
}}} // namespace puppet::runtime::types<|fim▁end|> | #include "../values/forward.hpp"
#include <ostream> |
<|file_name|>generate_queries.py<|end_file_name|><|fim▁begin|>"""
Generate possible queries from Gates Found grant database.
There are 4 filters and >11K possibilities, Bill returns a max of 1000 results per query combo.
So the hope is that by using all potential queries, we will get everything. Otherwise, their
system is broken too!
"""
import json
import requests
from itertools import product
from gates_constants import PAYLOAD, HEADERS, URL
def get_inital_parameters():
"""Request an unfiltered set of data from grant database to get a list of all filters"""
# Request
r = requests.post(URL, data=json.dumps(PAYLOAD), headers=HEADERS)
r.raise_for_status()
# Return raw JSON
try:
return r.json()
except:
raise Exception("Json response empty in intial request!!!")
def extract_facets(raw_json_response):
"""Extract facets dictionaries from response"""
facets_dict = {}
for f in raw_json_response["facets"]:
facets_dict[f["field"]] = f["items"]
return facets_dict<|fim▁hole|> """
Return one dictionary with four entries; each key is a filter category
with the corresponding list of all possible values.
Exclude sets that would return below the minimum_count.
From quick testing:
When minimum_count = 1, unique_queries = 19,403
When minimum_count = 5, unique_queries = 18,521
When minimum_count = 100, unique_queries = 6551
When minimum_count = 150, unique_queries = 4031
"""
master_dict = {}
for key, value in facets_dict.items():
category_set = {key: set()}
for v in value:
if v["count"] >= minimum_count:
category_set[key].add(v["name"])
master_dict.update(category_set)
for k, v in master_dict.items():
master_dict[k] = list(v)
return master_dict
def generate_unique_queries(fields_dict):
"""
Return list of strings, where each string is a unique
possible query for the grant data set
"""
# create sets of each category's options
# Reference: "fieldQueries":
# (@gfocategories==\"US Program\")
# (@gfotopics==\"College-Ready\")
# (@gfoyear==\"2009 and earlier\")
# (@gforegions==\"North America\")
master_query_set = set()
template_query = "(@%s==\"%s\")"
# Convert each plain string into it's query string equivalent
query_string_dict = {}
for category, fields in fields_dict.items():
query_string_dict[category] = []
for f in fields:
query_string = template_query % (category, f)
query_string_dict[category].append(query_string)
# Add each query on it's own (without combining with other
# possibilities)
master_query_set.add(query_string)
# Also, add an empty entry to each dictionary
# (this is so the itertools.product function will return
# possible combos including 0)
query_string_dict[category].append("")
# Generate the product of all possible queries as well
# Why yes, what I did here does make me feel like a fool.
list_of_tuples = product(
query_string_dict.values()[0],
query_string_dict.values()[1],
query_string_dict.values()[2],
query_string_dict.values()[3]
)
# Make tuple into strings
query_strings = []
# Remove empty strings
strings_gone = [tuple(y for y in x if y != "") for x in list_of_tuples]
for item in strings_gone:
# remove empty tuples
if len(item) > 0:
# convert them to actual query strings
query_strings.append(" and ".join(str(i) for i in item))
master_query_set.update(query_strings)
return list(master_query_set)
def save_unique_queries(minimum_count=150):
"""Save/update unique queries on disk"""
print "Retrieving unique query strings, minimum filter count = %d" % minimum_count
raw_json = get_inital_parameters()
facets_dict = extract_facets(raw_json)
fields_dict = create_field_dicts(facets_dict, minimum_count)
unique_queries = generate_unique_queries(fields_dict)
print "Returning a set of %d unique queries" % len(unique_queries)
with open("unique_queries.json", 'w') as outfile:
json.dump(unique_queries, outfile)
if __name__ == "__main__":
queries = save_unique_queries()<|fim▁end|> |
def create_field_dicts(facets_dict, minimum_count): |
<|file_name|>tag-align-shape.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
//
// See issue #1535
tag a_tag {<|fim▁hole|>}
type t_rec = {
c8: u8,
t: a_tag
};
pub fn main() {
let x = {c8: 22u8, t: a_tag(44u64)};
let y = fmt!("%?", x);
debug!("y = %s", y);
assert!(y == "(22, a_tag(44))");
}<|fim▁end|> | a_tag(u64); |
<|file_name|>detector.js<|end_file_name|><|fim▁begin|>var detector = (function () {
function foodCollision(snake, food, ui) {
var snakeHeadX = snake.parts[0].getPosition().x;
var snakeHeadY = snake.parts[0].getPosition().y;
var foodX = food.getPosition().x;
var foodY = food.getPosition().y;
if(snakeHeadX === foodX && snakeHeadY === foodY){
return true;
}
}
function playFieldCollision(selector, snake, ui) {
if(selector instanceof HTMLCanvasElement){
this.canvas = selector;
} else if(typeof selector === 'String' || typeof selector === 'string'){
this.canvas = document.querySelector(selector);
}
var w = this.canvas.width;
var h = this.canvas.height;
if(snake.parts[0].x >= w){
snake.parts[0].x = 0;
ui.updateCollision('Playfield');
; }
if(snake.parts[0].y >= h){
snake.parts[0].y = 0;
ui.updateCollision('Playfield');
}
if(snake.parts[0].x < 0){
snake.parts[0].x = w - 20;
ui.updateCollision('Playfield');
}
if(snake.parts[0].y < 0){
snake.parts[0].y = h - 20;
ui.updateCollision('Playfield');
}
}
function tailCollision (snake, ui) {
for (var i = 1; i < snake.parts.length; i++) {
if(snake.parts[0].x === snake.parts[i].x
&& snake.parts[0].y === snake.parts[i].y){
snake.parts = snake.parts.slice(0,2);
snake.parts[0].x = 0;<|fim▁hole|> ui.scoreValue = 0;
ui.updateScore(0);
ui.updateCollision('Tail');
console.log('Self Tail Collision')
}
}
}
function wallCollision(wallArray, snake, ui) {
for (var i = 1; i < wallArray.length; i++) {
if(snake.parts[0].x === wallArray[i].x
&& snake.parts[0].y === wallArray[i].y){
snake.parts = snake.parts.slice(0,2);
snake.parts[0].x = 0;
snake.parts[0].y = 0;
snake.direction = 1;
ui.updateCollision('Wall');
ui.scoreValue = 0;
ui.updateScore(0);
console.log('Wall Collision')
}
}
}
return {
isFoodCollision: foodCollision,
playFieldCollision: playFieldCollision,
tailCollision: tailCollision,
wallCollision: wallCollision
}
}());<|fim▁end|> | snake.parts[0].y = 0;
snake.direction = 1; |
<|file_name|>itusozluk.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'Eren Turkay <[email protected]>'
from scrapy import log
from scrapy.http import Request
from scrapy.exceptions import CloseSpider
from datetime import datetime
from . import GenericSozlukSpider
from ..items import Girdi
class ItusozlukBaslikSpider(GenericSozlukSpider):
name = 'itusozluk'
def __init__(self, **kwargs):
super(ItusozlukBaslikSpider, self).__init__(**kwargs)
self.allowed_domains = ['itusozluk.com']
def parse(self, response):
self.log("PARSING: %s" % response.request.url, level=log.INFO)
items_to_scrape = response.xpath('//*[@id="entry-list"]/li/article')
if len(items_to_scrape) == 0:
self.log("!!! No item to parse found. It may indicate a problem with HTML !!!",
level=log.ERROR)
raise CloseSpider('no_item_found')
for sel in items_to_scrape:
girdi_id = sel.xpath('./footer/div[@class="entrymenu"]/@data-info').extract()[0].split(',')[0]
baslik_id = response.xpath('//*[@id="canonical_url"]/@value').re(r'--(\d*)')[0]
baslik = response.xpath('//*[@id="title"]/a/text()').extract()[0]
date = sel.xpath('./footer/div[2]/time/a/text()').re(r'\d{2}[.]\d{2}[.]\d{4} \d{2}[:]\d{2}')[0]
text = sel.xpath('string(./div)').extract()[0]
nick = sel.css('a.yazarlink').xpath('text()').extract()[0]
item = Girdi()
item['source'] = self.name
item['baslik'] = baslik
item['girdi_id'] = girdi_id
item['baslik_id'] = baslik_id
item['datetime'] = datetime.strptime(date, '%d.%m.%Y %H:%M')<|fim▁hole|> item['nick'] = nick
yield item
current_url = response.request.url.split('/sayfa')[0]
title_re = response.xpath('//title').re(r'sayfa (\d*)')
current_page = int(title_re[0]) if title_re else 1
page_count = int(response.xpath('//a[@rel="last"]')[0].xpath('text()').extract()[0])
next_page = current_page + 1
if page_count >= next_page:
# if current_page < 2:
yield Request('%s/sayfa/%s' % (current_url, next_page))<|fim▁end|> | item['text'] = text |
<|file_name|>LSAPI.H<|end_file_name|><|fim▁begin|>/*++
Copyright (c) 1995-1996 Microsoft Corporation
Module Name:
lsapi.h
Abstract:
This module defines the 32-Bit Licensing API.
The Licensing API is still pre-release (i.e. beta) code.
Revision History:
--*/
#ifndef LSAPI_H
#define LSAPI_H
#define LS_API_ENTRY WINAPI
/***************************************************/
/* Standard LSAPI C status codes */
/***************************************************/
#define LS_SUCCESS ((LS_STATUS_CODE) 0x0)
#define LS_BAD_HANDLE ((LS_STATUS_CODE) 0xC0001001)
#define LS_INSUFFICIENT_UNITS ((LS_STATUS_CODE) 0xC0001002)
#define LS_SYSTEM_UNAVAILABLE ((LS_STATUS_CODE) 0xC0001003)
#define LS_LICENSE_TERMINATED ((LS_STATUS_CODE) 0xC0001004)
<|fim▁hole|>#define LS_TEXT_UNAVAILABLE ((LS_STATUS_CODE) 0x80001009)
#define LS_UNKNOWN_STATUS ((LS_STATUS_CODE) 0xC000100A)
#define LS_BAD_INDEX ((LS_STATUS_CODE) 0xC000100B)
#define LS_LICENSE_EXPIRED ((LS_STATUS_CODE) 0x8000100C)
#define LS_BUFFER_TOO_SMALL ((LS_STATUS_CODE) 0xC000100D)
#define LS_BAD_ARG ((LS_STATUS_CODE) 0xC000100E)
/* Microsoft provider-specific error codes */
// The name of the current user could not be retrieved.
#define LS_NO_USERNAME ( (LS_STATUS_CODE) 0xC0002000 )
// An unexpected error occurred in a system call.
#define LS_SYSTEM_ERROR ( (LS_STATUS_CODE) 0xC0002001 )
// The provider failed to properly initialize.
#define LS_SYSTEM_INIT_FAILED ( (LS_STATUS_CODE) 0xC0002002 )
// An internal error has occurred in the Micrsoft provider.
#define LS_INTERNAL_ERROR ( (LS_STATUS_CODE) 0xC0002002 )
/***************************************************/
/* standard LS API c datatype definitions */
/***************************************************/
typedef unsigned long LS_STATUS_CODE;
typedef unsigned long LS_HANDLE;
typedef char LS_STR;
typedef unsigned long LS_ULONG;
typedef long LS_LONG;
typedef void LS_VOID;
typedef struct {
LS_STR MessageDigest[16]; /* a 128-bit message digest */
} LS_MSG_DIGEST;
typedef struct {
LS_ULONG SecretIndex; /* index of secret, X */
LS_ULONG Random; /* a random 32-bit value, R */
LS_MSG_DIGEST MsgDigest; /* the message digest h(in,R,S,Sx) */
} LS_CHALLDATA;
typedef struct {
LS_ULONG Protocol; /* Specifies the protocol */
LS_ULONG Size; /* size of ChallengeData structure */
LS_CHALLDATA ChallengeData; /* challenge & response */
} LS_CHALLENGE;
/***************************************************/
/* Standard LSAPI C constant definitions */
/***************************************************/
#define LS_DEFAULT_UNITS ((LS_ULONG) 0xFFFFFFFF)
#define LS_ANY ((LS_STR FAR *) "")
#define LS_USE_LAST ((LS_ULONG) 0x0800FFFF)
#define LS_INFO_NONE ((LS_ULONG) 0)
#define LS_INFO_SYSTEM ((LS_ULONG) 1)
#define LS_INFO_DATA ((LS_ULONG) 2)
#define LS_UPDATE_PERIOD ((LS_ULONG) 3)
#define LS_LICENSE_CONTEXT ((LS_ULONG) 4)
#define LS_BASIC_PROTOCOL ((LS_ULONG) 0x00000001)
#define LS_SQRT_PROTOCOL ((LS_ULONG) 0x00000002)
#define LS_OUT_OF_BAND_PROTOCOL ((LS_ULONG) 0xFFFFFFFF)
#define LS_NULL ((LS_VOID FAR *) NULL)
// maximum length of a provider name returned by LSEnumProviders()
#define LS_MAX_PROVIDER_NAME ( 255 )
// if returned by a call to LSQuery() against LS_UPDATE_PERIOD,
// indicates that no interval recommendation is being made
#define LS_NO_RECOMMENDATION ( (LS_ULONG) 0xFFFFFFFF )
/***************************************************/
/* Standard LSAPI C function definitions */
/***************************************************/
LS_STATUS_CODE
LS_API_ENTRY
LSRequest( LS_STR *LicenseSystem,
LS_STR *PublisherName,
LS_STR *ProductName,
LS_STR *Version,
LS_ULONG TotUnitsReserved,
LS_STR *LogComment,
LS_CHALLENGE *Challenge,
LS_ULONG *TotUnitsGranted,
LS_HANDLE *LicenseHandle );
LS_STATUS_CODE
LS_API_ENTRY
LSRelease( LS_HANDLE LicenseHandle,
LS_ULONG TotUnitsConsumed,
LS_STR *LogComment);
LS_STATUS_CODE
LS_API_ENTRY
LSUpdate( LS_HANDLE LicenseHandle,
LS_ULONG TotUnitsConsumed,
LS_ULONG TotUnitsReserved,
LS_STR *LogComment,
LS_CHALLENGE *Challenge,
LS_ULONG *TotUnitsGranted);
LS_STATUS_CODE
LS_API_ENTRY
LSGetMessage( LS_HANDLE LicenseHandle,
LS_STATUS_CODE Value,
LS_STR *Buffer,
LS_ULONG BufferSize);
LS_STATUS_CODE
LS_API_ENTRY
LSQuery( LS_HANDLE LicenseHandle,
LS_ULONG Information,
LS_VOID *InfoBuffer,
LS_ULONG BufferSize,
LS_ULONG *ActualBufferSize);
LS_STATUS_CODE
LS_API_ENTRY
LSEnumProviders( LS_ULONG Index,
LS_STR *Buffer);
LS_VOID
LS_API_ENTRY
LSFreeHandle( LS_HANDLE LicenseHandle );
/***************************************************/
/* Extension LSAPI C function definitions */
/***************************************************/
LS_STATUS_CODE
LS_API_ENTRY
LSInstall( LS_STR * ProviderPath );
/*++
Routine Description:
Install the given DLL as a license system provider.
NOTE: This API is a Microsoft extension to the LSAPI standard.
Arguments:
ProviderPath (LS_STR *)
Path to the provider DLL to install. This should be a full
path, and the DLL should be in the %SystemRoot%\System32
directory.
Return Value:
(LS_STATUS_CODE)
LS_SUCCESS
Provider is already installed or was successfully added.
LS_BAD_ARG
The parameters passed to the function were invalid.
other
An error occurred while attempting to install the provider.
--*/
/***************************************************/
/* Extension LSAPI C function definitions */
/* (these will be supported only for the BETA SDK) */
/***************************************************/
// license types (node assignment, user assignment, or concurrent use assignment)
typedef DWORD LS_LICENSE_TYPE;
#define LS_LICENSE_TYPE_NODE ( 0 )
#define LS_LICENSE_TYPE_USER ( 1 )
#define LS_LICENSE_TYPE_SERVER ( 2 )
LS_STATUS_CODE
LS_API_ENTRY
LSLicenseUnitsSet( LS_STR * LicenseSystem,
LS_STR * PublisherName,
LS_STR * ProductName,
LS_STR * Version,
LS_LICENSE_TYPE LicenseType,
LS_STR * UserName,
LS_ULONG NumUnits,
LS_ULONG NumSecrets,
LS_ULONG * Secrets );
/*++
Routine Description:
Set the number of units for the given license to the designated value.
NOTE: This API is a Microsoft extension to the LSAPI standard, and
WILL ONLY BE SUPPORTED FOR THE BETA RELEASE OF THE LSAPI SDK.
Thereafter, licenses must be added using the common certicate
format (CCF). APIs will be exposed to allow licenses to be
auotmatically added by an application's SETUP program.
Arguments:
LicenseSystem (LS_STR *)
License system to which to set the license information. If LS_ANY,
the license will be offered to each installed license system until
one returns success.
PublisherName (LS_STR *)
Publisher name for which to set the license info.
ProductName (LS_STR *)
Product name for which to set the license info.
Version (LS_STR *)
Product version for which to set the license info.
LicenseType (LS_LICENSE_TYPE)
Type of license for which to set the license info.
UserName (LS_STR *)
User for which to set the license info. If LS_NULL and the the license
type is LS_LICENSE_TYPE_USER, the license info will be set for the
user corresponding to the current thread.
NumUnits (LS_ULONG)
Units purchased for this license.
NumSecrets (LS_ULONG)
Number of application-specific secrets corresponding to this license.
Secrets (LS_ULONG *)
Array of application-specific secrets corresponding to this license.
Return Value:
(LS_STATUS_CODE)
LS_SUCCESS
License successfully installed.
LS_BAD_ARG
The parameters passed to the function were invalid.
other
An error occurred whil attempting to install the license.
--*/
LS_STATUS_CODE
LS_API_ENTRY
LSLicenseUnitsGet( LS_STR * LicenseSystem,
LS_STR * PublisherName,
LS_STR * ProductName,
LS_STR * Version,
LS_STR * UserName,
LS_ULONG * NumUnits );
/*++
Routine Description:
Get the number of units for the given license.
NOTE: This API is a Microsoft extension to the LSAPI standard, and
WILL ONLY BE SUPPORTED FOR THE BETA RELEASE OF THE LSAPI SDK.
Thereafter, licenses must be accessed using the common certicate
format (CCF).
Arguments:
LicenseSystem (LS_STR *)
License system for which to get the license information. If LS_ANY,
each installed license system will be queried until one returns success.
PublisherName (LS_STR *)
Publisher name for which to get the license info.
ProductName (LS_STR *)
Product name for which to get the license info.
Version (LS_STR *)
Product version for which to get the license info.
UserName (LS_STR *)
User for which to get the license info. If LS_NULL and the the license
type is LS_LICENSE_TYPE_USER, license info will be retrieved for the
user corresponding to the current thread.
NumUnits (LS_ULONG *)
On return, the number of units purchased for this license.
Return Value:
(LS_STATUS_CODE)
LS_SUCCESS
Success.
LS_BAD_ARG
The parameters passed to the function were invalid.
other
An error occurred whil attempting to retrieve the license.
--*/
#endif /* LSAPI_H */<|fim▁end|> | #define LS_AUTHORIZATION_UNAVAILABLE ((LS_STATUS_CODE) 0xC0001005)
#define LS_LICENSE_UNAVAILABLE ((LS_STATUS_CODE) 0xC0001006)
#define LS_RESOURCES_UNAVAILABLE ((LS_STATUS_CODE) 0xC0001007)
#define LS_NETWORK_UNAVAILABLE ((LS_STATUS_CODE) 0xC0001008)
|
<|file_name|>events.rs<|end_file_name|><|fim▁begin|>// extern crate sdl2;
macro_rules! struct_events {
(
keyboard: { $( $k_alias:ident : $k_sdl:ident ),* },
// match against a pattern
else: { $( $e_alias:ident : $e_sdl:pat ),* }
) => {
use sdl2::EventPump;
pub struct ImmediateEvents {
// for every keyboard event, we have an Option<bool>
// Some(true) => was just pressed
// Some(false) => was just released
// None => nothing happening right now.
$( pub $k_alias: Option<bool> , )*
$( pub $e_alias : bool ),* ,
resize: Option<(u32, u32)>
}
impl ImmediateEvents {
pub fn new() -> ImmediateEvents {
ImmediateEvents {
// when initialized, nothing has happened yet, so all are
// set to None
$( $k_alias: None , )*
$( $e_alias: false ),* ,
resize: None
}
}
}
pub struct Events {
pump: EventPump,
pub now: ImmediateEvents,
// true => pressed
// false => not pressed
$( pub $k_alias: bool ),*
}
impl Events {
pub fn new(pump: EventPump) -> Events {
Events {
pump: pump,
now: ImmediateEvents::new(),
// by default, initialize evy key with not pressed
$( $k_alias: false ),*
}
}
pub fn pump(&mut self, renderer: &mut ::sdl2::render::Renderer) {
self.now = ImmediateEvents::new();
use sdl2::event::Event::*;
use sdl2::event::WindowEvent::*;
use sdl2::keyboard::Keycode::*;
for event in self.pump.poll_iter() {
match event {
Window { window_id, win_event: Resized(x, y), .. } => {
println!("Resized window {} to {}, {}", window_id, x, y);
self.now.resize = Some(renderer.output_size().unwrap());
},
KeyDown { keycode, .. } => match keycode {
// $( .. ),* containing $k_sdl and $k_alias means:
// "for every element ($k_alias : $k_sdl) pair,
// check whether the keycode is Some($k_sdl). If
// it is, then set the $k_alias fields to true."
$(
Some($k_sdl) => {
// prevent multiple presses when keeping a
// key down; was it previously pressed or not?
if !self.$k_alias {
// key pressed
self.now.$k_alias = Some(true);
}
self.$k_alias = true;
}
),*
_ => {}
},
KeyUp { keycode, .. } => match keycode {
$(
Some($k_sdl) => {
// key released<|fim▁hole|> }
),*
_ => {}
},
$(
$e_sdl => {
self.now.$e_alias = true;
}
)*,
_ => {}
}
}
}
}
}
}<|fim▁end|> | self.now.$k_alias = Some(false);
self.$k_alias = false; |
<|file_name|>argparse.go<|end_file_name|><|fim▁begin|>package argparse
// Copyright (c) 2017 by Gilbert Ramirez <[email protected]>
import (
"fmt"
"io"
"os"
)
type ArgumentParser struct {
// If this is set, instead of printing the help statement,
// when --help is requested, to os.Stdout, the output goes here.
Stdout io.Writer
// If this is set, instead of printing the usage statement,
// when a ParseErr is encountered, to os.Stderr, the output goes here.
Stderr io.Writer
// Allow the user to modify strings produced by argparse.
// This is essential for i18n
Messages Messages
// The switch strings that can invoke help
HelpSwitches []string
// The root Command object.
Root *Command
// The first time a parse is run, a finalization step need to be
// performed to fill out inherited Arguments. This flag ensures
// we do that only once.
finalized bool
}
// Create a new ArgumentParser, with the Command as its root Command
func New(cmd *Command) *ArgumentParser {
ap := &ArgumentParser{
Stdout: os.Stdout,
Stderr: os.Stderr,
Messages: DefaultMessages_en,
HelpSwitches: []string{"-h", "--help"},
Root: cmd,
}
cmd.init(nil, ap)
if cmd.Name == "" {
cmd.Name = os.Args[0]
}
return ap
}
// Add an argument to the root command
func (self *ArgumentParser) Add(arg *Argument) {
self.Root.Add(arg)
}
// Add a command to the root command
func (self *ArgumentParser) New(c *Command) *Command {
return self.Root.New(c)
}
// Parse the os.Argv arguments and return, having filled out Values.
// On a request for help (-h), print the help and exit with os.Exit(0).
// On a user input error, print the error message and exit with os.Exit(1).
func (self *ArgumentParser) Parse() {
results := self.parseArgv(os.Args[1:])
cmd := results.triggeredCommand
if results.helpRequested {
helpString := self.helpString(cmd, results.ancestorCommands)
fmt.Fprintln(self.Stdout, helpString)
os.Exit(0)
} else if results.parseError != nil {
fmt.Fprintln(self.Stderr, results.parseError.Error())
os.Exit(1)<|fim▁hole|> err := cmd.Function(cmd, cmd.Values)
if err != nil {
fmt.Fprintln(self.Stderr, err.Error())
os.Exit(1)
}
}
}
// Parse the os.Argv arguments, call the Function for the triggered
// Command, and then exit. An error returned from the Function causes us
// to exit with 1, otherwise, exit with 0.
// On a request for help (-h), print the help and exit with os.Exit(0).
// On a user input error, print the error message and exit with os.Exit(1).
func (self *ArgumentParser) ParseAndExit() {
self.Parse()
os.Exit(0)
}
func (self *ArgumentParser) parseArgv(argv []string) *parseResults {
parser := parserState{}
results := parser.runParser(self, argv)
return results
}<|fim▁end|> | }
if cmd.Function != nil { |
<|file_name|>initial_consul_data.py<|end_file_name|><|fim▁begin|>initial_consul_data = {
"update" : {
"providers/va_standalone_servers" : {"username": "admin", "servers": [], "sec_groups": [], "images": [], "password": "admin", "ip_address": "127.0.0.1", "networks": [], "sizes": [], "driver_name": "generic_driver", "location": "", "defaults": {}, "provider_name": "va_standalone_servers"},
"users" : [],
},
"overwrite" : {
"va_flavours" : {"va-small": {"num_cpus": 1, "max_memory": 1048576, "vol_capacity": 5, "memory": 1048576}, "debian": {"num_cpus": 1, "max_memory": 1048576, "vol_capacity": 5, "memory": 1048576}},
"service_presets/highstate_preset":{"name": "highstate", "script": "salt {server} state.highstate test=True | perl -lne 's\/^Failed:\\s+\/\/ or next; s\/\\s.*\/\/; print'"},
"service_presets/ping_preset":{"name": "ping_preset", "script" : "ping -c1 {address} > /dev/null", "interval": "30s", "timeout": "10s"},
"service_presets/tcp_preset":{"name": "TCP", "tcp": "{address}", "interval": "30s", "timeout": "10s"},
"managed_actions/ssh/root" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
# {'name' : 'delete', 'type' : 'confirm'},
{'name' : 'remove_server', 'type' : 'confirm', 'kwargs' : ['datastore_handler', 'server_name'], 'requires_ssh' : False},
{'name' : 'stop', 'type' : 'confirm'},
{'name' : 'show_processes', 'type' : 'text', 'label' : 'Show processes'},
{'name' : 'show_usage', 'type' : 'text', 'label' : 'Show usage'},
{'name' : 'get_users', 'type' : 'text', 'label' : 'Get users'},
{'name' : 'restart_service', 'type' : 'form', 'label' : 'Restart service'}
]
},
"managed_actions/ssh/user" : { #Temporarily, we have all functions avialable for non-root users but we may change this in the future.
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'remove_server', 'type' : 'confirm', 'kwargs' : ['datastore_handler', 'server_name'], 'requires_ssh' : False},
{'name' : 'stop', 'type' : 'action'},
{'name' : 'show_processes', 'type' : 'text', 'label' : 'Show processes'},
{'name' : 'show_usage', 'type' : 'text', 'label' : 'Show usage'},
{'name' : 'get_users', 'type' : 'text', 'label' : 'Get users'},
{'name' : 'restart_service', 'type' : 'form', 'label' : 'Restart process'}
]
},
"managed_actions/winexe/administrator" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
]
},
"managed_actions/winexe/user" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/openstack" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/aws" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/lxc" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/digital_ocean" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/libvirt" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/century_link_driver" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/provider/generic_driver" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'},
{'name' : 'start', 'type' : 'action'},
{'name' : 'stop', 'type' : 'action'}
],
},
"managed_actions/salt/" : {
"actions" : [
{'name' : 'reboot', 'type' : 'confirm'},
{'name' : 'delete', 'type' : 'confirm'}, <|fim▁hole|> }
}<|fim▁end|> | {'name' : 'stop', 'type' : 'action'}
],
} |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>fn main(){
println!("hello world!")
}<|fim▁end|> | |
<|file_name|>jquery.elastislide.js<|end_file_name|><|fim▁begin|>(function (e, t, n) {
t.fn.touchwipe = function (e) {
var n = {
min_move_x: 20,
min_move_y: 20,
wipeLeft: function () {},
wipeRight: function () {},
wipeUp: function () {},
wipeDown: function () {},
preventDefaultEvents: !0
};
e && t.extend(n, e);
this.each(function () {
function i() {
this.removeEventListener("touchmove", s);
e = null;
r = !1
}
function s(s) {
n.preventDefaultEvents && s.preventDefault();
if (r) {
var o = s.touches[0].pageX,
u = s.touches[0].pageY,
a = e - o,
f = t - u;
if (Math.abs(a) >= n.min_move_x) {
i();
a > 0 ? n.wipeLeft() : n.wipeRight()
} else if (Math.abs(f) >= n.min_move_y) {
i();
f > 0 ? n.wipeDown() : n.wipeUp()
}
}
}
function o(n) {
if (n.touches.length == 1) {
e = n.touches[0].pageX;
t = n.touches[0].pageY;
r = !0;
this.addEventListener("touchmove", s, !1)
}
}
var e, t, r = !1;
"ontouchstart" in document.documentElement && this.addEventListener("touchstart", o, !1)
});
return this
};
t.elastislide = function (e, n) {
this.$el = t(n);
this._init(e)
};
t.elastislide.defaults = {
speed: 450,
easing: "",
imageW: 190,
margin: 3,
border: 2,
minItems: 1,
current: 0,
onClick: function () {
return !1
}
};
t.elastislide.prototype = {
_init: function (e) {
this.options = t.extend(!0, {}, t.elastislide.defaults, e);
this.$slider = this.$el.find("ul");
this.$items = this.$slider.children("li");
this.itemsCount = this.$items.length;
this.$esCarousel = this.$slider.parent();
this._validateOptions();
this._configure();
this._addControls();
this._initEvents();
this.$slider.show();
this._slideToCurrent(!1)
},
_validateOptions: function () {
this.options.speed < 0 && (this.options.speed = 450);
this.options.margin < 0 && (this.options.margin = 4);
this.options.border < 0 && (this.options.border = 1);
if (this.options.minItems < 1 || this.options.minItems > this.itemsCount) this.options.minItems = 1;
this.options.current > this.itemsCount - 1 && (this.options.current = 0)
},
_configure: function () {
this.current = this.options.current;
this.visibleWidth = this.$esCarousel.width();
if (this.visibleWidth < this.options.minItems * (this.options.imageW + 2 * this.options.border) + (this.options.minItems - 1) * this.options.margin) {
this._setDim((this.visibleWidth - (this.options.minItems - 1) * this.options.margin) / this.options.minItems);
this._setCurrentValues();
this.fitCount = this.options.minItems
} else {
this._setDim();
this._setCurrentValues()
}
this.$slider.css({
width: this.sliderW + 24
})
},
_setDim: function (e) {
this.$items.css({
marginRight: this.options.margin,
width: e ? e : this.options.imageW + 2 * this.options.border
}).children("a").css({
borderWidth: this.options.border
})
},
_setCurrentValues: function () {
this.itemW = this.$items.outerWidth(!0);
this.sliderW = this.itemW * this.itemsCount;
this.visibleWidth = this.$esCarousel.width();
this.fitCount = Math.floor(this.visibleWidth / this.itemW)
},
_addControls: function () {
this.$navNext = t('<span class="es-nav-next"><i class="icon-right-open"></i><span>Next</span></span>');
this.$navPrev = t('<span class="es-nav-prev"><i class="icon-left-open"></i><span>Previous</span></span>');
t('<div class="es-nav"/>').append(this.$navPrev).append(this.$navNext).appendTo(this.$el)
},
_toggleControls: function (e, t) {
e && t ? t === 1 ? e === "right" ? this.$navNext.show() : this.$navPrev.show() : e === "right" ? this.$navNext.hide() : this.$navPrev.hide() : (this.current === this.itemsCount - 1 || this.fitCount >= this.itemsCount) && this.$navNext.hide()
},
_initEvents: function () {
var n = this;
t(e).bind("resize.elastislide", function (e) {
refresh();
});
this.$navNext.bind("click.elastislide", function (e) {
n._slide("right")
});
this.$navPrev.bind("click.elastislide", function (e) {
n._slide("left")
});
this.$items.bind("click.elastislide", function (e) {
n.options.onClick(t(this));
return !1
});
n.$slider.touchwipe({
wipeLeft: function () {
n._slide("right")
},
wipeRight: function () {
n._slide("left")
}
})
},
_slide: function (e, r, i, s) {
if (this.$slider.is(":animated")) return !1;
var o = parseFloat(this.$slider.css("margin-left"));
if (r === n) {
var u = this.fitCount * this.itemW,
r;
if (u < 0) return !1;
if (e === "right" && this.sliderW - (Math.abs(o) + u) < this.visibleWidth) {
u = this.sliderW - (Math.abs(o) + this.visibleWidth) - this.options.margin;
this._toggleControls("right", -1);
this._toggleControls("left", 1)
} else if (e === "left" && Math.abs(o) - u < 0) {
u = Math.abs(o);
this._toggleControls("left", -1);
this._toggleControls("right", 1)
} else {
var a;
e === "right" ? a = Math.abs(o) + this.options.margin + Math.abs(u) : a = Math.abs(o) - this.options.margin - Math.abs(u);
a > 0 ? this._toggleControls("left", 1) : this._toggleControls("left", -1);
a < this.sliderW - this.visibleWidth ? this._toggleControls("right", 1) : this._toggleControls("right", -1)
}
e === "right" ? r = "-=" + u : r = "+=" + u
} else {
var a = Math.abs(r);
if (Math.max(this.sliderW, this.visibleWidth) - a < this.visibleWidth) {
r = -(Math.max(this.sliderW, this.visibleWidth) - this.visibleWidth);
r !== 0 && (r += this.options.margin);
this._toggleControls("right", -1);
a = Math.abs(r)
}
a > 0 ? this._toggleControls("left", 1) : this._toggleControls("left", -1);
Math.max(this.sliderW, this.visibleWidth) - this.visibleWidth > a + this.options.margin ? this._toggleControls("right", 1) : this._toggleControls("right", -1)
}
t.fn.applyStyle = i === n ? t.fn.animate : t.fn.css;
var f = {
marginLeft: r
}, l = this;
this.$slider.applyStyle(f, t.extend(!0, [], {
duration: this.options.speed,
easing: this.options.easing,
complete: function () {
s && s.call()
}
}))
},
_slideToCurrent: function (e) {
var t = this.current * this.itemW;
this._slide("", -t, e)
},
add: function (e, t) {
this.$items = this.$items.add(e);
this.itemsCount = this.$items.length;
this._setDim();
this._setCurrentValues();
this.$slider.css({
width: this.sliderW
});
this._slideToCurrent();
t && t.call(e)
},
destroy: function (e) {
this._destroy(e)
},
_destroy: function (n) {
this.$el.unbind(".elastislide").removeData("elastislide");
t(e).unbind(".elastislide");
n && n.call()
},
refresh: function(n) {
var n = this;
n._setCurrentValues();
if (n.visibleWidth < n.options.minItems * (n.options.imageW + 2 * n.options.border) + (n.options.minItems - 1) * n.options.margin) {
n._setDim((n.visibleWidth - (n.options.minItems - 1) * n.options.margin) / n.options.minItems);
n._setCurrentValues();
n.fitCount = n.options.minItems
} else {
n._setDim();
n._setCurrentValues()
}
n.$slider.css({
width: n.sliderW + 10
});
clearTimeout(n.resetTimeout);
n.resetTimeout = setTimeout(function () {
n._slideToCurrent()
}, 200);
}
};
var r = function (e) {
this.console && console.error(e)
};
t.fn.elastislide = function (e) {
if (typeof e == "string") {
var n = Array.prototype.slice.call(arguments, 1);
this.each(function () {
var i = t.data(this, "elastislide");
if (!i) {
r("cannot call methods on elastislide prior to initialization; attempted to call method '" + e + "'");
return
}
if (!t.isFunction(i[e]) || e.charAt(0) === "_") {
r("no such method '" + e + "' for elastislide instance");
return
}
i[e].apply(i, n)
})
} else this.each(function () {
var n = t.data(this, "elastislide");<|fim▁hole|>})(window, jQuery);<|fim▁end|> | n || t.data(this, "elastislide", new t.elastislide(e, this))
});
return this
} |
<|file_name|>ShellCommand.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015-2016 Maven Source Dependencies
* Plugin contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.l2x6.srcdeps.core.shell;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import org.l2x6.srcdeps.core.util.SrcdepsCoreUtils;
/**
* A definition of a shell command that can be executed by {@link Shell#execute(ShellCommand)}.
*
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
public class ShellCommand {
private final List<String> arguments;
private final Map<String, String> environment;
private final String executable;
private final IoRedirects ioRedirects;<|fim▁hole|> Map<String, String> environment, IoRedirects ioRedirects, long timeoutMs) {
super();
SrcdepsCoreUtils.assertArgNotNull(executable, "executable");
SrcdepsCoreUtils.assertArgNotNull(arguments, "arguments");
SrcdepsCoreUtils.assertArgNotNull(workingDirectory, "workingDirectory");
SrcdepsCoreUtils.assertArgNotNull(environment, "environment");
SrcdepsCoreUtils.assertArgNotNull(ioRedirects, "ioRedirects");
this.executable = executable;
this.arguments = arguments;
this.workingDirectory = workingDirectory;
this.environment = environment;
this.ioRedirects = ioRedirects;
this.timeoutMs = timeoutMs;
}
/**
* @return an array containing the executable and its arguments that can be passed e.g. to
* {@link ProcessBuilder#command(String...)}
*/
public String[] asCmdArray() {
String[] result = new String[arguments.size() + 1];
int i = 0;
result[i++] = executable;
for (String arg : arguments) {
result[i++] = arg;
}
return result;
}
/**
* @return the {@link List} arguments for the executable. Cannot be {@code null}.
*/
public List<String> getArguments() {
return arguments;
}
/**
* @return a {@link Map} of environment variables that should be used when executing this {@link ShellCommand}.
* Cannot be {@code null}. Note that these are just overlay variables - when a new {@link Process} is
* spawned, the environment is copied from the present process and only the variables the provided by the
* present method are overwritten.
*/
public Map<String, String> getEnvironment() {
return environment;
}
/**
* @return the executable file that should be called
*/
public String getExecutable() {
return executable;
}
/**
* @return the {@link IoRedirects} to use when the {@link Shell} spawns a new {@link Process}
*/
public IoRedirects getIoRedirects() {
return ioRedirects;
}
/**
* @return timeout in milliseconds
*/
public long getTimeoutMs() {
return timeoutMs;
}
/**
* @return the directory in which this {@link ShellCommand} should be executed
*/
public Path getWorkingDirectory() {
return workingDirectory;
}
}<|fim▁end|> | private final long timeoutMs;
private final Path workingDirectory;
public ShellCommand(String executable, List<String> arguments, Path workingDirectory, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Automatic config nagios configurations.
Copyright (C) 2015 Canux CHENG
All rights reserved
Name: __init__.py
Author: Canux [email protected]
Version: V1.0
Time: Wed 09 Sep 2015 09:20:51 PM EDT
<|fim▁hole|>Exaple:
./nagios -h
"""
__version__ = "3.1.0.0"
__description__ = """Config nagios automatic. Any question contact the author Canux CHENG. Email: [email protected]."""
__author__ = "Canux CHENG"<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.