prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package main
import (
"log"
"os"
"github.com/docopt/docopt.go"
"github.com/go-martini/martini"
"github.com/martini-contrib/binding"
"github.com/martini-contrib/render"
"labix.org/v2/mgo"
"labix.org/v2/mgo/bson"
)
// Note contains a title and content.
type note struct {
ID bson.ObjectId `json:"id" bson:"_id"`
Subject string `json:"subject" bson:"subject" binding:"required"`
Title string `json:"title" bson:"title" binding:"required"`
Content string `json:"content" bson:"content"`
}
type image struct {
ID bson.ObjectId `json:"id" bson:"_id"`
NoteID bson.ObjectId `json:"note_id" bson:"note_id"`
Type string `json:"Type" bson:"type" binding:"required"`
Content string `json:"Content" bson:"content" binding:"required"`
}
type imagelist struct {
ID bson.ObjectId `json:"id" bson:"_id"`
}
type subject struct {
Subject string `json:"subject" bson:"subject"`
}
type query struct {
Query string `json:"query" bson:"query"`
Subject []string `json:"subject" bson:"subject"`
Show int `json:"show" bson:"show"`
}
// Return object for title query
type titlelist struct {
ID bson.ObjectId `json:"id" bson:"_id"`
Title string `json:"title" bson:"title"`
}
// App returns the ClassicMartini application.
func App() *martini.ClassicMartini {
m := martini.Classic()
m.Use(DB())
m.Use(render.Renderer())
m.Use(martini.Static("app"))<|fim▁hole|> m.Post("/search", binding.Bind(query{}), func(q query, r render.Render, log *log.Logger, db *mgo.Database) {
if q.Query == "" {
r.JSON(400, map[string]interface{}{"error": "query required"})
return
}
n := []note{}
if len(q.Subject) > 0 {
search := bson.M{
"$and": []bson.M{
bson.M{"subject": bson.M{"$in": q.Subject}},
bson.M{"$or": []bson.M{
bson.M{"title": bson.M{"$regex": q.Query, "$options": "i"}},
bson.M{"content": bson.M{"$regex": q.Query, "$options": "i"}},
}},
},
}
err := db.C("notes").Find(search).All(&n)
if err != nil {
r.JSON(500, nil)
}
} else {
search := []bson.M{
bson.M{"title": bson.M{"$regex": q.Query, "$options": "i"}},
bson.M{"content": bson.M{"$regex": q.Query, "$options": "i"}},
}
err := db.C("notes").Find(bson.M{"$or": search}).All(&n)
if err != nil {
r.JSON(500, nil)
}
}
total := len(n)
beg := 0
end := 10
if q.Show > 0 {
end += 10
}
if end > total {
end = total
}
r.JSON(200, map[string]interface{}{"count": total, "skip": beg, "results": n[beg:end]})
})
m.Group("/notes", func(r martini.Router) {
r.Post("", binding.Bind(note{}), addNote)
r.Post("/subject", binding.Bind(subject{}), getNotesBySubject)
r.Post("/(.*)", noteNotFound)
r.Get("/subject/:sub", getNotesByTitlelist)
r.Get("/sublist", getSubList)
r.Get("/:id", getNote)
r.Get("(.*)", noteNotFound)
r.Put("/:id", binding.Bind(note{}), updateNote)
r.Put("(.*)", noteNotFound)
r.Delete("/:id", deleteNote)
r.Delete("(.*)", noteNotFound)
})
m.Group("/img", func(r martini.Router) {
r.Post("/:id", binding.Bind(image{}), addIMG)
r.Post("/(.*)", noteNotFound)
r.Get("/:id/list", getIMGList)
r.Get("/:id/content", getIMGContent)
r.Get("/:id", getIMG)
r.Get("(.*)", noteNotFound)
r.Delete("/:id", deleteIMG)
r.Delete("(.*)", noteNotFound)
})
m.NotFound(func(r render.Render) {
r.Redirect("/", 302)
})
return m
}
// DB clones a mongodb session and maps it to the current context.
func DB() martini.Handler {
session, err := mgo.Dial(os.Getenv("PAM_MONGO_URL"))
if err != nil {
log.Fatal(err)
}
return func(c martini.Context) {
s := session.Clone()
c.Map(s.DB("pam"))
defer s.Close()
c.Next()
}
}
func main() {
arguments, err := docopt.Parse(usage, nil, true, "pam 2.2.1", false)
if err != nil {
log.Fatal("Error parsing usage. Error: ", err.Error())
}
err = os.Setenv("HOST", arguments["--bind_ip"].(string))
if err != nil {
log.Fatal(err.Error())
}
err = os.Setenv("PORT", arguments["--bind_port"].(string))
if err != nil {
log.Fatal(err.Error())
}
App().Run()
}<|fim▁end|> | |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * RS232
*/
package kernel.modbus;<|fim▁end|> | /**
* Contains methods for working with device using the MODBUS protocol over |
<|file_name|>api.go<|end_file_name|><|fim▁begin|>package api
import (
"errors"
"io/ioutil"
"log"
"net/http"
"github.com/bitly/go-simplejson"
)
func Request(req *http.Request) (*simplejson.Json, error) {
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
body, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
log.Printf("got response code %d - %s", resp.StatusCode, body)
return nil, errors.New("api request returned non 200 status code")
}
data, err := simplejson.NewJson(body)
if err != nil {
return nil, err
}
return data, nil
}
func RequestUnparsedResponse(url string, header http.Header) (
response *http.Response, err error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, errors.New("failed building request for " +
url + ": " + err.Error())
}
req.Header = header
httpclient := &http.Client{}
if response, err = httpclient.Do(req); err != nil {
return nil, errors.New("request failed for " +<|fim▁hole|> return
}<|fim▁end|> | url + ": " + err.Error())
} |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
PROJECT_MAIL_SUBJECT_PREFIX = '[Project]'
PROJECT_MAIL_SENDER = 'Project Admin <[email protected]>'
PROJECT_ADMIN = os.environ.get('PROJECT_ADMIN')
CELERY_BROKER_URL = 'amqp://localhost//'
CELERY_RESULT_BACKEND = 'amqp://'
CELERY_INCLUDE = ['celery_worker']
SQL_USERNAME = os.environ.get('MYSQL_USERNAME')
SQL_PASSWORD = os.environ.get('MYSQL_PASSWORD')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True<|fim▁hole|> Config.SQL_PASSWORD) + '@localhost/testproject'
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}<|fim▁end|> | SQLALCHEMY_DATABASE_URI = 'postgresql://' + str(Config.SQL_USERNAME) + ':' + str( |
<|file_name|>router.js<|end_file_name|><|fim▁begin|>'use strict'
require('./controllers/listCtrl.js');
require('./controllers/loginCtrl.js');
require('./services/pageService.js');
angular.module('app.router',
['ui.router', 'app.list', 'app.login'])
.config(configFn);
configFn.$inject = ['$locationProvider', '$stateProvider', '$urlRouterProvider'];
function configFn($locationProvider, $stateProvider, $urlRouterProvider){
$urlRouterProvider.when('', '/');
$urlRouterProvider.otherwise("/404");
$stateProvider
.state('list', {
url: "/",
template: require('ng-cache!./views/list.html'),
// controller: 'listCtrl'
})
.state('signin', {
url: "/login",
template: require('ng-cache!./views/login.html'),
// controller: 'loginCtrl'
})
.state('404', {
url: "/404",
template: require('ng-cache!./views/404.html'),
controller: function(pageService) {
pageService.setTitle('404');
}<|fim▁hole|><|fim▁end|> | });
} |
<|file_name|>networkstyle.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 The GreenCoin developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "networkstyle.h"
#include "guiconstants.h"
#include <QApplication>
static const struct {
const char *networkId;<|fim▁hole|> const char *titleAddText;
const char *splashImage;
} network_styles[] = {
{"main", QAPP_APP_NAME_DEFAULT, ":/icons/greencoin", "", ":/images/splash"},
{"test", QAPP_APP_NAME_TESTNET, ":/icons/greencoin_testnet", QT_TRANSLATE_NOOP("SplashScreen", "[testnet]"), ":/images/splash_testnet"},
{"regtest", QAPP_APP_NAME_TESTNET, ":/icons/greencoin_testnet", "[regtest]", ":/images/splash_testnet"}
};
static const unsigned network_styles_count = sizeof(network_styles)/sizeof(*network_styles);
// titleAddText needs to be const char* for tr()
NetworkStyle::NetworkStyle(const QString &appName, const QString &appIcon, const char *titleAddText, const QString &splashImage):
appName(appName),
appIcon(appIcon),
titleAddText(qApp->translate("SplashScreen", titleAddText)),
splashImage(splashImage)
{
}
const NetworkStyle *NetworkStyle::instantiate(const QString &networkId)
{
for (unsigned x=0; x<network_styles_count; ++x)
{
if (networkId == network_styles[x].networkId)
{
return new NetworkStyle(
network_styles[x].appName,
network_styles[x].appIcon,
network_styles[x].titleAddText,
network_styles[x].splashImage);
}
}
return 0;
}<|fim▁end|> | const char *appName;
const char *appIcon; |
<|file_name|>workspace.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package workspace
# Module caffe2.python.workspace
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import contextlib
from google.protobuf.message import Message
from multiprocessing import Process
import os
from collections import defaultdict
import logging
import numpy as np
from past.builtins import basestring
import shutil
import socket
import tempfile
from caffe2.proto import caffe2_pb2
from caffe2.python import scope, utils
import caffe2.python._import_c_extension as C
logger = logging.getLogger(__name__)
Blobs = C.blobs
CreateBlob = C.create_blob
CurrentWorkspace = C.current_workspace
DeserializeBlob = C.deserialize_blob
GlobalInit = C.global_init
HasBlob = C.has_blob
RegisteredOperators = C.registered_operators
SerializeBlob = C.serialize_blob
SwitchWorkspace = C.switch_workspace
RootFolder = C.root_folder
Workspaces = C.workspaces
BenchmarkNet = C.benchmark_net
GetStats = C.get_stats
operator_tracebacks = defaultdict(dict)
is_asan = C.is_asan
has_gpu_support = C.has_gpu_support
if has_gpu_support:
NumCudaDevices = C.num_cuda_devices
SetDefaultGPUID = C.set_default_gpu_id
GetDefaultGPUID = C.get_default_gpu_id
GetCUDAVersion = C.get_cuda_version
GetCuDNNVersion = C.get_cudnn_version
def GetCudaPeerAccessPattern():
return np.asarray(C.get_cuda_peer_access_pattern())
GetDeviceProperties = C.get_device_properties
else:
NumCudaDevices = lambda: 0 # noqa
SetDefaultGPUID = lambda x: None # noqa
GetDefaultGPUID = lambda: 0 # noqa
GetCuDNNVersion = lambda: 0 # noqa
GetCuDNNVersion = lambda: 0 # noqa
GetCudaPeerAccessPattern = lambda: np.array([]) # noqa
GetDeviceProperties = lambda x: None # noqa
def _GetFreeFlaskPort():
"""Get a free flask port."""
# We will prefer to use 5000. If not, we will then pick a random port.
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('127.0.0.1', 5000))
if result == 0:
return 5000
else:
s = socket.socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
# Race condition: between the interval we close the socket and actually
# start a mint process, another process might have occupied the port. We
# don't do much here as this is mostly for convenience in research
# rather than 24x7 service.
return port
def StartMint(root_folder=None, port=None):
"""Start a mint instance.
TODO(Yangqing): this does not work well under ipython yet. According to
https://github.com/ipython/ipython/issues/5862
writing up some fix is a todo item.
"""
from caffe2.python.mint import app
if root_folder is None:
# Get the root folder from the current workspace
root_folder = C.root_folder()
if port is None:
port = _GetFreeFlaskPort()
process = Process(
target=app.main,
args=(
['-p', str(port), '-r', root_folder],
)
)
process.start()
print('Mint running at http://{}:{}'.format(socket.getfqdn(), port))
return process
def StringifyProto(obj):
"""Stringify a protocol buffer object.
Inputs:
obj: a protocol buffer object, or a Pycaffe2 object that has a Proto()
function.
Outputs:
string: the output protobuf string.
Raises:
AttributeError: if the passed in object does not have the right attribute.
"""
if isinstance(obj, basestring):
return obj
else:
if isinstance(obj, Message):
# First, see if this object is a protocol buffer, which we can
# simply serialize with the SerializeToString() call.
return obj.SerializeToString()
elif hasattr(obj, 'Proto'):
return obj.Proto().SerializeToString()
else:
raise ValueError("Unexpected argument to StringifyProto of type " +
type(obj).__name__)
def ResetWorkspace(root_folder=None):
if root_folder is None:
# Reset the workspace, but keep the current root folder setting.
return C.reset_workspace(C.root_folder())
else:
if not os.path.exists(root_folder):
os.makedirs(root_folder)
return C.reset_workspace(root_folder)
def CreateNet(net, overwrite=False, input_blobs=None):
if input_blobs is None:
input_blobs = []
for input_blob in input_blobs:
C.create_blob(input_blob)
return CallWithExceptionIntercept(
C.create_net,
C.Workspace.current._last_failed_op_net_position,
GetNetName(net),
StringifyProto(net), overwrite,
)
def Predictor(init_net, predict_net):
return C.Predictor(StringifyProto(init_net), StringifyProto(predict_net))
def GetOperatorCost(operator, blobs):
return C.get_operator_cost(StringifyProto(operator), blobs)
def RunOperatorOnce(operator):
return C.run_operator_once(StringifyProto(operator))
def RunOperatorsOnce(operators):
for op in operators:
success = RunOperatorOnce(op)
if not success:
return False
return True
def CallWithExceptionIntercept(func, op_id_fetcher, net_name, *args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
op_id = op_id_fetcher()
net_tracebacks = operator_tracebacks.get(net_name, None)
print("Traceback for operator {} in network {}".format(op_id, net_name))
if net_tracebacks and op_id in net_tracebacks:
tb = net_tracebacks[op_id]
for line in tb:
print(':'.join(map(str, line)))
raise
def RunNetOnce(net):
return CallWithExceptionIntercept(
C.run_net_once,
C.Workspace.current._last_failed_op_net_position,
GetNetName(net),
StringifyProto(net),
)
def RunNet(name, num_iter=1, allow_fail=False):
"""Runs a given net.
Inputs:
name: the name of the net, or a reference to the net.
num_iter: number of iterations to run
allow_fail: if True, does not assert on net exec failure but returns False
Returns:
True or an exception.
"""
return CallWithExceptionIntercept(
C.run_net,
C.Workspace.current._last_failed_op_net_position,
GetNetName(name),
StringifyNetName(name), num_iter, allow_fail,
)
def RunPlan(plan_or_step):
# TODO(jiayq): refactor core.py/workspace.py to avoid circular deps
import caffe2.python.core as core
if isinstance(plan_or_step, core.ExecutionStep):
plan_or_step = core.Plan(plan_or_step)
return C.run_plan(StringifyProto(plan_or_step))
def InferShapesAndTypes(nets, blob_dimensions=None):
"""Infers the shapes and types for the specified nets.
Inputs:
nets: the list of nets
blob_dimensions (optional): a dictionary of blobs and their dimensions.
If not specified, the workspace blobs are used.
Returns:
A tuple of (shapes, types) dictionaries keyed by blob name.
"""
net_protos = [StringifyProto(n.Proto()) for n in nets]
if blob_dimensions is None:
blobdesc_prototxt = C.infer_shapes_and_types_from_workspace(net_protos)
else:
blobdesc_prototxt = C.infer_shapes_and_types_from_map(
net_protos, blob_dimensions
)
blobdesc_proto = caffe2_pb2.TensorShapes()
blobdesc_proto.ParseFromString(blobdesc_prototxt)
shapes = {}
types = {}
for ts in blobdesc_proto.shapes:
if not ts.unknown_shape:
shapes[ts.name] = list(ts.dims)
types[ts.name] = ts.data_type
return (shapes, types)
def _StringifyName(name, expected_type):
if isinstance(name, basestring):
return name
assert type(name).__name__ == expected_type, \
"Expected a string or %s" % expected_type
return str(name)
def StringifyBlobName(name):
return _StringifyName(name, "BlobReference")
def StringifyNetName(name):
return _StringifyName(name, "Net")
def GetNetName(net):
if isinstance(net, basestring):
return net
if type(net).__name__ == "Net":
return net.Name()
if isinstance(net, caffe2_pb2.NetDef):
return net.name
raise Exception("Not a Net object: {}".format(str(net)))
def FeedBlob(name, arr, device_option=None):
"""Feeds a blob into the workspace.
Inputs:
name: the name of the blob.
arr: either a TensorProto object or a numpy array object to be fed into
the workspace.
device_option (optional): the device option to feed the data with.
Returns:
True or False, stating whether the feed is successful.
"""
if type(arr) is caffe2_pb2.TensorProto:
arr = utils.Caffe2TensorToNumpyArray(arr)
if type(arr) is np.ndarray and arr.dtype.kind in 'SU':
# Plain NumPy strings are weird, let's use objects instead
arr = arr.astype(np.object)
if device_option is None:
device_option = scope.CurrentDeviceScope()
if device_option and device_option.device_type == caffe2_pb2.CUDA:
if arr.dtype == np.dtype('float64'):
logger.warning(
"CUDA operators do not support 64-bit doubles, " +
"please use arr.astype(np.float32) or np.int32 for ints." +
" Blob: {}".format(name) +
" type: {}".format(str(arr.dtype))
)
name = StringifyBlobName(name)
if device_option is not None:
return C.feed_blob(name, arr, StringifyProto(device_option))
else:
return C.feed_blob(name, arr)
def FetchBlobs(names):
"""Fetches a list of blobs from the workspace.
Inputs:
names: list of names of blobs - strings or BlobReferences
Returns:
list of fetched blobs
"""
return [FetchBlob(name) for name in names]
def FetchBlob(name):<|fim▁hole|>
Inputs:
name: the name of the blob - a string or a BlobReference
Returns:
Fetched blob (numpy array or string) if successful
"""
return C.fetch_blob(StringifyBlobName(name))
def ApplyTransform(transform_key, net):
"""Apply a Transform to a NetDef protobuf object, and returns the new
transformed NetDef.
Inputs:
transform_key: the name of the transform, as it is stored in the registry
net: a NetDef protobuf object
Returns:
Transformed NetDef protobuf object.
"""
transformed_net = caffe2_pb2.NetDef()
transformed_str = C.apply_transform(
str(transform_key).encode('utf-8'),
net.SerializeToString(),
)
transformed_net.ParseFromString(transformed_str)
return transformed_net
def ApplyTransformIfFaster(transform_key, net, init_net, **kwargs):
"""Apply a Transform to a NetDef protobuf object, and returns the new
transformed NetDef, only if it runs faster than the original.
The runs are performed on the current active workspace (gWorkspace).
You should initialize that workspace before making a call to this function.
Inputs:
transform_key: the name of the transform, as it is stored in the registry
net: a NetDef protobuf object
init_net: The net to initialize the workspace.
warmup_runs (optional):
Determines how many times the net is run before testing.
Will be 5 by default.
main_runs (optional):
Determines how many times the net is run during testing.
Will be 10 by default.
improvement_threshold (optional):
Determines the factor which the new net needs to be faster
in order to replace the old. Will be 1.01 by default.
Returns:
Either a Transformed NetDef protobuf object, or the original netdef.
"""
warmup_runs = kwargs['warmup_runs'] if 'warmup_runs' in kwargs else 5
main_runs = kwargs['main_runs'] if 'main_runs' in kwargs else 10
improvement_threshold = kwargs['improvement_threshold'] \
if 'improvement_threshold' in kwargs else 1.01
transformed_net = caffe2_pb2.NetDef()
transformed_str = C.apply_transform_if_faster(
str(transform_key).encode('utf-8'),
net.SerializeToString(),
init_net.SerializeToString(),
warmup_runs,
main_runs,
float(improvement_threshold),
)
transformed_net.ParseFromString(transformed_str)
return transformed_net
def GetNameScope():
"""Return the current namescope string. To be used to fetch blobs"""
return scope.CurrentNameScope()
class _BlobDict(object):
"""Provides python dict compatible way to do fetching and feeding"""
def __getitem__(self, key):
return FetchBlob(key)
def __setitem__(self, key, value):
return FeedBlob(key, value)
def __len__(self):
return len(C.blobs())
def __iter__(self):
return C.blobs().__iter__()
def __contains__(self, item):
return C.has_blob(item)
blobs = _BlobDict()
################################################################################
# Utilities for immediate mode
#
# Caffe2's immediate mode implements the following behavior: between the two
# function calls StartImmediate() and StopImmediate(), for any operator that is
# called through CreateOperator(), we will also run that operator in a workspace
# that is specific to the immediate mode. The user is explicitly expected to
# make sure that these ops have proper inputs and outputs, i.e. one should not
# run an op where an external input is not created or fed.
#
# Users can use FeedImmediate() and FetchImmediate() to interact with blobs
# in the immediate workspace.
#
# Once StopImmediate() is called, all contents in the immediate workspace is
# freed up so one can continue using normal runs.
#
# The immediate mode is solely for debugging purposes and support will be very
# sparse.
################################################################################
_immediate_mode = False
_immediate_workspace_name = "_CAFFE2_IMMEDIATE"
_immediate_root_folder = ''
def IsImmediate():
return _immediate_mode
@contextlib.contextmanager
def WorkspaceGuard(workspace_name):
current = CurrentWorkspace()
SwitchWorkspace(workspace_name, True)
yield
SwitchWorkspace(current)
def StartImmediate(i_know=False):
global _immediate_mode
global _immediate_root_folder
if IsImmediate():
# already in immediate mode. We will kill the previous one
# and start from fresh.
StopImmediate()
_immediate_mode = True
with WorkspaceGuard(_immediate_workspace_name):
_immediate_root_folder = tempfile.mkdtemp()
ResetWorkspace(_immediate_root_folder)
if i_know:
# if the user doesn't want to see the warning message, sure...
return
print("""
Enabling immediate mode in caffe2 python is an EXTREMELY EXPERIMENTAL
feature and may very easily go wrong. This is because Caffe2 uses a
declarative way of defining operators and models, which is essentially
not meant to run things in an interactive way. Read the following carefully
to make sure that you understand the caveats.
(1) You need to make sure that the sequences of operators you create are
actually runnable sequentially. For example, if you create an op that takes
an input X, somewhere earlier you should have already created X.
(2) Caffe2 immediate uses one single workspace, so if the set of operators
you run are intended to be under different workspaces, they will not run.
To create boundaries between such use cases, you can call FinishImmediate()
and StartImmediate() manually to flush out everything no longer needed.
(3) Underlying objects held by the immediate mode may interfere with your
normal run. For example, if there is a leveldb that you opened in immediate
mode and did not close, your main run will fail because leveldb does not
support double opening. Immediate mode may also occupy a lot of memory esp.
on GPUs. Call FinishImmediate() as soon as possible when you no longer
need it.
(4) Immediate is designed to be slow. Every immediate call implicitly
creates a temp operator object, runs it, and destroys the operator. This
slow-speed run is by design to discourage abuse. For most use cases other
than debugging, do NOT turn on immediate mode.
(5) If there is anything FATAL happening in the underlying C++ code, the
immediate mode will immediately (pun intended) cause the runtime to crash.
Thus you should use immediate mode with extra care. If you still would
like to, have fun [https://xkcd.com/149/].
""")
def StopImmediate():
"""Stops an immediate mode run."""
# Phew, that was a dangerous ride.
global _immediate_mode
global _immediate_root_folder
if not IsImmediate():
return
with WorkspaceGuard(_immediate_workspace_name):
ResetWorkspace()
shutil.rmtree(_immediate_root_folder)
_immediate_root_folder = ''
_immediate_mode = False
def ImmediateBlobs():
with WorkspaceGuard(_immediate_workspace_name):
return Blobs()
def RunOperatorImmediate(op):
with WorkspaceGuard(_immediate_workspace_name):
RunOperatorOnce(op)
def FetchImmediate(*args, **kwargs):
with WorkspaceGuard(_immediate_workspace_name):
return FetchBlob(*args, **kwargs)
def FeedImmediate(*args, **kwargs):
with WorkspaceGuard(_immediate_workspace_name):
return FeedBlob(*args, **kwargs)
# CWorkspace utilities
def _Workspace_create_net_with_exception_intercept(ws, net, overwrite=False):
return CallWithExceptionIntercept(
ws._create_net,
ws._last_failed_op_net_position,
GetNetName(net),
StringifyProto(net), overwrite,
)
C.Workspace.create_net = _Workspace_create_net_with_exception_intercept
def _Workspace_run(ws, obj):
if hasattr(obj, 'Proto'):
obj = obj.Proto()
if isinstance(obj, caffe2_pb2.PlanDef):
return ws._run_plan(obj.SerializeToString())
if isinstance(obj, caffe2_pb2.NetDef):
return CallWithExceptionIntercept(
ws._run_net,
ws._last_failed_op_net_position,
GetNetName(obj),
obj.SerializeToString(),
)
# return ws._run_net(obj.SerializeToString())
if isinstance(obj, caffe2_pb2.OperatorDef):
return ws._run_operator(obj.SerializeToString())
raise ValueError(
"Don't know how to do Workspace.run() on {}".format(type(obj)))
C.Workspace.run = _Workspace_run
def _Blob_feed(blob, arg, device_option=None):
if device_option is not None:
device_option = StringifyProto(device_option)
return blob._feed(arg, device_option)
C.Blob.feed = _Blob_feed<|fim▁end|> | """Fetches a blob from the workspace. |
<|file_name|>paned.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (17af302) from gir-files (11e0e6d)
// DO NOT EDIT
use Buildable;
use Container;
use Orientable;
use Orientation;
use Widget;
use ffi;
use gdk;
use glib::object::Downcast;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct Paned(Object<ffi::GtkPaned>): Widget, Container, Buildable, Orientable;
match fn {
get_type => || ffi::gtk_paned_get_type(),
}
}
impl Paned {
pub fn new(orientation: Orientation) -> Paned {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_paned_new(orientation)).downcast_unchecked()
}
}
pub fn add1<T: IsA<Widget>>(&self, child: &T) {
unsafe {
ffi::gtk_paned_add1(self.to_glib_none().0, child.to_glib_none().0);
}
}
pub fn add2<T: IsA<Widget>>(&self, child: &T) {
unsafe {
ffi::gtk_paned_add2(self.to_glib_none().0, child.to_glib_none().0);
}
}
pub fn get_child1(&self) -> Option<Widget> {
unsafe {
from_glib_none(ffi::gtk_paned_get_child1(self.to_glib_none().0))
}
}
pub fn get_child2(&self) -> Option<Widget> {
unsafe {
from_glib_none(ffi::gtk_paned_get_child2(self.to_glib_none().0))
}
}
pub fn get_handle_window(&self) -> Option<gdk::Window> {
unsafe {
from_glib_none(ffi::gtk_paned_get_handle_window(self.to_glib_none().0))
}
}
<|fim▁hole|> }
}
#[cfg(feature = "3.16")]
pub fn get_wide_handle(&self) -> bool {
unsafe {
from_glib(ffi::gtk_paned_get_wide_handle(self.to_glib_none().0))
}
}
pub fn pack1<T: IsA<Widget>>(&self, child: &T, resize: bool, shrink: bool) {
unsafe {
ffi::gtk_paned_pack1(self.to_glib_none().0, child.to_glib_none().0, resize.to_glib(), shrink.to_glib());
}
}
pub fn pack2<T: IsA<Widget>>(&self, child: &T, resize: bool, shrink: bool) {
unsafe {
ffi::gtk_paned_pack2(self.to_glib_none().0, child.to_glib_none().0, resize.to_glib(), shrink.to_glib());
}
}
pub fn set_position(&self, position: i32) {
unsafe {
ffi::gtk_paned_set_position(self.to_glib_none().0, position);
}
}
#[cfg(feature = "3.16")]
pub fn set_wide_handle(&self, wide: bool) {
unsafe {
ffi::gtk_paned_set_wide_handle(self.to_glib_none().0, wide.to_glib());
}
}
}<|fim▁end|> | pub fn get_position(&self) -> i32 {
unsafe {
ffi::gtk_paned_get_position(self.to_glib_none().0) |
<|file_name|>make_fits_catalogs.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, the GREAT3 executive committee (http://www.great3challenge.info/?q=contacts)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for making catalogs of galaxy fit data corresponding to a real galaxy training set used by
GalSim. It has to collect information from several large files."""
import pyfits
import numpy as np
# Define filenames, etc.
galsim_catfile = 'real_galaxy_catalog_23.5.fits'
fit_catfiles = ['BRIGHTtotalRAW00000.26113.fits',
'totalRAW00000.29949.fits.gz']
n_catfiles = len(fit_catfiles)
cosmos_catfile = 'lensing14.fits.gz'
out_fitfile = 'real_galaxy_catalog_23.5_fits.fits'
out_catfile = 'real_galaxy_catalog_23.5.fits'
# Read in real galaxy catalog.
galsim_cat = pyfits.getdata(galsim_catfile)
n_galsim_cat = len(galsim_cat)
print 'Read in ',n_galsim_cat,' from GalSim catalog ',galsim_catfile
galsim_ident = galsim_cat.field('ident')
# Fields: ('IDENT', 'RA', 'DEC', 'MAG', 'BAND', 'WEIGHT', 'GAL_FILENAME', 'PSF_FILENAME', 'GAL_HDU',
# 'PSF_HDU', 'PIXEL_SCALE', 'NOISE_MEAN', 'NOISE_VARIANCE')
# Read in the full COSMOS catalog.
cosmos_cat = pyfits.getdata(cosmos_catfile)
n_cosmos_cat = len(cosmos_cat)
print 'Read in ',n_cosmos_cat,' from COSMOS catalog ',cosmos_catfile
# Fields: ('IDENT', 'MAG_AUTO', 'FLUX_AUTO', 'MAGERR_AUTO', 'FLUX_RADIUS', 'FLUXERR_AUTO',
# 'KRON_RADIUS', 'MU_MAX', 'MU_CLASS', 'CLEAN', 'GOOD', 'FLAGS', 'SN', 'SN_NON_CORR', 'FWHM_IMAGE',
# 'ALPHA_J2000', 'DELTA_J2000', 'X_IMAGE', 'Y_IMAGE', 'A_IMAGE', 'B_IMAGE', 'THETA_IMAGE',
# 'PETRO_RADIUS', 'RRG_XX', 'RRG_YY', 'XXC', 'YYC', 'XYC', 'D', 'E1_R', 'E2_R', 'E1_RU', 'E2_RU',
# 'GAMMA1', 'GAMMA2', 'FOCUS_MODEL', 'IXX', 'IYY', 'IXY', 'WEIGHT_FUNCT_RADIUS', 'VAR_E1', 'VAR_E2',
# 'BOX', 'SPECZ', 'SPECZ_MARA', 'SPECZ_CLASS', 'SPECZ_ORIGIN', 'GOOD_SPECZ', 'SPECZ_BL_AGN',
# 'SPECZ_SELECTION', 'MIPS_Z', 'MIPS_LOG_L', 'MIPS_MASS', 'ZEST_TYPE', 'ZEST_BULGE',
# 'ZEST_IRREGULARITY', 'ZEST_ELONGATION', 'ZEST_GINI', 'ZEST_M20', 'ZEST_CONCENTRATION',
# 'ZEST_ASYMMETRY', 'BULGE', 'KT', 'OLD_ZPHOT', 'OLD_GOOD_ZPHOT', 'HL_KPC', 'MARA_AGN',
# 'MARA_AGN_ZPHOT', 'MARA_AGN_ZPHOT_LOW68', 'MARA_AGN_ZPHOT_HIGH68', 'KNUD_AGN', 'G1_TS', 'G2_TS',
# 'WEIGHT_TS', 'CHANDRA_GOOD', 'CHANDRA_AGN', 'CHANDRA_LX_HARD', 'CHANDRA_LX_SOFT',
# 'CHANDRA_LX_FULL', 'CHANDRA_ZETA', 'CHANDRA_ZSPEC', 'CHANDRA_CLASSZSPEC', 'CHANDRA_MODEL',
# 'CHANDRA_XMM_ID', 'XMM_GOOD', 'XMM_AGN', 'XMM_LX_HARD', 'XMM_LX_SOFT', 'XMM_LX_FULL', 'XMM_ZETA',
# 'XMM_ZSPEC', 'XMM_CLASSZSPEC', 'XMM_MODEL', 'XMM_CHANDRA_ID', 'EZE_AGN_SPECZ', 'EZE_AGN_PHOTOZ',
# 'EZE_LX', 'EZE_HR', 'EZE_SPECZ', 'EZE_PHOTOZ', 'K_CFHT', 'MATCH_CFHT', 'ERR_K_CFHT',
# 'KEVIN_MSTAR', 'KEVIN_MSTAR2', 'KEVIN_MASSERR', 'OLIV_MSTAR', 'MVIR', 'COLOR', 'TYPE2_ZPHOT_MARA',
# 'PETER_PASSIVE', 'PETER_ANGLE_PA', 'PETER_ELLIP', 'PHOTOZ_ORDER', 'PHOTOZ_NON_COMB',
# 'PHOTOZ_NON_COMB_LOW_68', 'PHOTOZ_NON_COMB_HIGH_68', 'PBZK', 'PBZK_ZPHOT', 'PBZK_MK', 'PBZK_MASS',
# 'SIGNALTONOISERATIO', 'QUASIPETROSIANAREAFRACTION', 'QUASIPETROSIANFRACTION', 'AXISRATIO', 'GINI',
# 'CONCENTRATION', 'BOB_E', 'BOB_GOOD', 'BOB_S0', 'FLUX_GIM2D', 'R_GIM2D', 'ELL_GIM2D', 'PA_GIM2D',
# 'DX_GIM2D', 'DY_GIM2D', 'SERSIC_N_GIM2D', 'R_0P5_GIM2D', 'CHI_GIM2D', 'CECILE_SL_Z',
# 'CECILE_SL_SAT', 'CECILE_SL', 'CECILE_SL_FLAG1', 'CECILE_SL_FLAG2', 'ISOLATED', 'BCG_SCALE',
# 'BCG_R200', 'ALL_P_MEM', 'ALL_GROUP_ID', 'N_GROUP_OVERLAP', 'BEST_P_MEM', 'BEST_GROUP_ID',
# 'ZPHOT', 'TYPE', 'ZPDF', 'PHOTZ_LOW_68', 'PHOTZ_HIGH_68', 'CHI', 'MODD', 'EBV', 'NBFILT',
# 'ZMINCHI2', 'ZL68_MINCHI2', 'ZU68_MINCHI2', 'ZP2', 'CHI2', 'NUV', 'U', 'SUBARU_R', 'SUBARU_I',
# 'J_WFCAM', 'K_WIRCAM', 'M36', 'DNUV', 'DU', 'DJ_WFCAM', 'DK_WIRCAM', 'DM36', 'AUTO_OFFSET',
# 'AUTO_FLAG', 'MNUV', 'MU', 'MB', 'MV', 'MG', 'MR', 'MI', 'MJ', 'MK', 'MNUV_MR', 'SFR_MED',
# 'STR_INF', 'SFR_SUP', 'SSFR_MED', 'SSFR_INF', 'SSFR_SUP', 'MATCH_S', 'MASK_S', 'GOOD_ZPHOT_LENS',
# 'GOOD_ZPHOT_SOURCE')
# That's a lot of info, so let's just pick out the things we care about: galaxy identifier, apparent
# magnitude, size, photo-z.
cos_ident = cosmos_cat.field('ident')
cos_mag_auto = cosmos_cat.field('mag_auto')
cos_flux_rad = cosmos_cat.field('flux_radius')
cos_zphot = cosmos_cat.field('zphot')
# Read in catalogs with fit parameters from Lackner & Gunn.
print "Reading in catalogs of fit parameters"
n_fit_tot = 0
for i_cat in range(n_catfiles):
# Get this catalog
dat = pyfits.getdata(fit_catfiles[i_cat])
n = len(dat)
print "Read in ",n," fit results from file ",fit_catfiles[i_cat]
# Just extract the columns we want, and append to previous if i_cat!=0.
if i_cat == 0:
fit_ident = dat.field('ident')
fit_sersicfit = dat.field('sersicfit')
fit_bulgefit = dat.field('bulgefit')
fit_status = dat.field('mpfit_status')
fit_mag_auto = dat.field('mag_auto')
fit_mad_s = dat.field('mad_sersic_mask')
fit_mad_b = dat.field('mad_dvcb_mask')
fit_dvc_btt = dat.field('dvc_btt')
if i_cat > 0:
fit_ident = np.append(fit_ident, dat.field('galid'))
fit_sersicfit = np.append(fit_sersicfit, dat.field('sersicfit'), axis=0)
fit_bulgefit = np.append(fit_bulgefit, dat.field('bulgefit'), axis=0)
fit_status = np.append(fit_status, dat.field('mpfit_status'), axis=0)
fit_mag_auto = np.append(fit_mag_auto, np.zeros_like(dat.field('galid')), axis=0)
fit_mad_s = np.append(fit_mad_s, dat.field('mad_sersic_mask'), axis=0)
fit_mad_b = np.append(fit_mad_b, dat.field('mad_dvcb_mask'), axis=0)
fit_dvc_btt = np.append(fit_dvc_btt, dat.field('dvc_btt'), axis=0)
# Increment counter.
n_fit_tot += n
# Unfortunately, the files do not have the same column names. Here are their contents -
# Fields in first file: ('IDENT', 'MAG_AUTO', 'FLUX_AUTO', 'MAGERR_AUTO', 'FLUX_RADIUS',
# 'FLUXERR_AUTO', 'KRON_RADIUS', 'MU_MAX', 'MU_CLASS', 'CLEAN', 'GOOD', 'FLAGS', 'SN',
# 'SN_NON_CORR', 'FWHM_IMAGE', 'ALPHA_J2000', 'DELTA_J2000', 'X_IMAGE', 'Y_IMAGE', 'A_IMAGE',
# 'B_IMAGE', 'THETA_IMAGE', 'PETRO_RADIUS', 'D', 'E1_R', 'E2_R', 'E1_RU', 'E2_RU', 'GAMMA1',
# 'GAMMA2', 'FOCUS_MODEL', 'IXX', 'IYY', 'IXY', 'WEIGHT_FUNCT_RADIUS', 'VAR_E1', 'VAR_E2',
# 'BOX', 'SPECZ', 'SPECZ_MARA', 'SPECZ_CLASS', 'SPECZ_ORIGIN', 'GOOD_SPECZ', 'SPECZ_BL_AGN',
# 'FORS2_OBJECT_FLAG', 'MIPS_Z', 'MIPS_LOG_L', 'MIPS_MASS', 'ZEST_TYPE', 'ZEST_BULGE',
# 'ZEST_IRREGULARITY', 'ZEST_ELONGATION', 'ZEST_GINI', 'ZEST_M20', 'ZEST_CONCENTRATION',
# 'ZEST_ASYMMETRY', 'BULGE', 'KT', 'OLD_ZPHOT', 'OLD_GOOD_ZPHOT', 'HL_KPC', 'CHANDRA_GOOD',
# 'CHANDRA_AGN', 'CHANDRA_LX_HARD', 'CHANDRA_LX_SOFT', 'CHANDRA_LX_FULL', 'CHANDRA_ZETA',
# 'CHANDRA_ZSPEC', 'CHANDRA_CLASSZSPEC', 'CHANDRA_MODEL', 'CHANDRA_TYPE', 'CHANDRA_LUSSO_MASS',
# 'XMM_GOOD', 'XMM_AGN', 'XMM_LX_HARD', 'XMM_LX_SOFT', 'XMM_ZETA', 'XMM_ZSPEC',
# 'XMM_CLASSZSPEC', 'XMM_MODEL', 'XMM_TYPE', 'XMM_LUSSO_MASS', 'AGN_GOOD', 'AGN_Z', 'AGN_TYPE',
# 'AGN_LX', 'AGN_LX_SOFT', 'AGN_LX_HARD', 'AGN_LUSSO_MASS', 'BOSS_LRG', 'K_CFHT', 'MATCH_CFHT',
# 'ERR_K_CFHT', 'KEVIN_MSTAR', 'KEVIN_MSTAR2', 'KEVIN_MASSERR', 'KEVIN_QUENCH_FLAG', 'MVIR',
# 'TYPE2_ZPHOT_MARA', 'PHOTOZ_ORDER', 'PHOTOZ_NON_COMB', 'PHOTOZ_NON_COMB_LOW_68',
# 'PHOTOZ_NON_COMB_HIGH_68', 'FLUX_GIM2D', 'R_GIM2D', 'ELL_GIM2D', 'PA_GIM2D', 'DX_GIM2D',
# 'DY_GIM2D', 'SERSIC_N_GIM2D', 'R_0P5_GIM2D', 'CHI_GIM2D', 'CECILE_SL_Z', 'CECILE_SL_SAT',
# 'CECILE_SL', 'CECILE_SL_FLAG1', 'CECILE_SL_FLAG2', 'GROUP_PROJECTION_MMGG',
# 'GROUP_PROJECTION_MMGG_SPECZ', 'MMGG_SCALE', 'P_MEM_BEST', 'GROUP_ID_BEST', 'GROUP_FLAG_BEST',
# 'P_MEM_ALL', 'GROUP_ID_ALL', 'GROUP_FLAG_ALL', 'DIST_BCG_R200', 'MMGG_SCALE_SPECZ',
# 'P_MEM_BEST_SPECZ', 'GROUP_ID_BEST_SPECZ', 'GROUP_FLAG_BEST_SPECZ', 'P_MEM_ALL_SPECZ',
# 'GROUP_ID_ALL_SPECZ', 'GROUP_FLAG_ALL_SPECZ', 'DIST_BCG_R200_SPECZ', 'ZPHOT', 'TYPE', 'ZPDF',
# 'PHOTZ_LOW_68', 'PHOTZ_HIGH_68', 'CHI', 'MODD', 'EBV', 'NBFILT', 'ZMINCHI2', 'ZL68_MINCHI2',
# 'ZU68_MINCHI2', 'ZP2', 'CHI2', 'NUV', 'U', 'B', 'SUBARU_R', 'SUBARU_I', 'J_WFCAM', 'K_WIRCAM',
# 'M36', 'DNUV', 'DU', 'DJ_WFCAM', 'DK_WIRCAM', 'DM36', 'AUTO_OFFSET', 'AUTO_FLAG', 'MNUV',
# 'MU', 'MB', 'MV', 'MG', 'MR', 'MI', 'MJ', 'MK', 'MNUV_MR', 'SFR_MED', 'STR_INF', 'SFR_SUP',
# 'SSFR_MED', 'SSFR_INF', 'SSFR_SUP', 'MATCH_S', 'MASK_S', 'GOOD_ZPHOT_LENS',
# 'GOOD_ZPHOT_SOURCE', 'RA', 'DEC', 'GALID', 'BULGEFIT', 'DISKFIT', 'SERSICFIT', 'CHISQ_BULGE',
# 'CHISQ_DISK', 'CHISQ_SERSIC', 'COVAR_BULGE', 'COVAR_DISK', 'COVAR_SERSIC', 'PERR_BULGE',
# 'PERR_DISK', 'PERR_SERSIC', 'MPFIT_STATUS', 'DOF_BULGE', 'DOF_DISK', 'DOF_SERSIC', 'DOF_DVC',
# 'DOF_EXP', 'EXPFIT', 'DVCFIT', 'CHISQ_EXP', 'CHISQ_DVC', 'PERR_EXP', 'PERR_DVC', 'COVAR_EXP',
# 'COVAR_DVC', 'FRACDEV', 'XCROP', 'YCROP', 'XLEN', 'YLEN', 'DVC_BTT', 'EXP_BTT', 'MAD_SKY',
# 'MAD_SERSIC', 'MAD_SERSIC_MASK', 'MAD_DVCB', 'MAD_DVCB_MASK', 'MAD_EXPB', 'MAD_EXPB_MASK',
# 'MAD_EXP', 'MAD_EXP_MASK', 'MAD_DVC', 'MAD_DVC_MASK', 'CHISQ_BULGE_MASK', 'CHISQ_DISK_MASK',
# 'CHISQ_EXP_MASK', 'CHISQ_SERSIC_MASK', 'CHISQ_DVC_MASK', 'DOF_BULGE_MASK', 'DOF_DISK_MASK',
# 'DOF_EXP_MASK', 'DOF_SERSIC_MASK', 'DOF_DVC_MASK', 'SN_REFF_SERSIC', 'SKY_SERSIC',
# 'SKY_SERSIC_ERR', 'SKY_SERSIC_COVAR', 'DVC_BTT_ERR', 'EXP_BTT_ERR')
print "Read in ",n_fit_tot," from ",n_catfiles," fit files"
print "Making correspondence between IDENT values for all inputs"
cos_ind = np.zeros_like(galsim_ident)
fit_ind = np.zeros_like(galsim_ident)
cos_ident_list = list(cos_ident)
fit_ident_list = list(fit_ident)
n_fail_cos = 0
n_fail_fit = 0
for i in range(n_galsim_cat):
if i % 1000 == 0:
print "... object ",i
if galsim_ident[i] in cos_ident_list:
cos_ind[i] = cos_ident_list.index(galsim_ident[i])
else:
cos_ind[i] = -1
n_fail_cos += 1
if galsim_ident[i] in fit_ident_list:
fit_ind[i] = fit_ident_list.index(galsim_ident[i])
else:
fit_ind[i] = -1
n_fail_fit += 1
print "Number of match failures for COSMOS, fits: ",n_fail_cos, n_fail_fit
print "Rearranging arrays into proper order"
use_ind = (fit_ind >= 0) & (cos_ind >= 0)
out_ident = galsim_ident[use_ind]
print "Actually using ",len(out_ident)
out_mag_auto = cos_mag_auto[cos_ind[use_ind]]
out_flux_rad = cos_flux_rad[cos_ind[use_ind]]
out_zphot = cos_zphot[cos_ind[use_ind]]
test_mag_auto = fit_mag_auto[fit_ind[use_ind]]
print 'Mag auto test:'
print out_mag_auto[0:9]
print test_mag_auto[0:9]
# Rearrange the FIT arrays with fit quantities in the same order as galsim_ident.
out_sersicfit = fit_sersicfit[fit_ind[use_ind],:]
out_bulgefit = fit_bulgefit[fit_ind[use_ind],:]
out_fit_status = fit_status[fit_ind[use_ind],:]
out_fit_mad_s = fit_mad_s[fit_ind[use_ind],:]
out_fit_mad_b = fit_mad_b[fit_ind[use_ind],:]
out_fit_dvc_btt = fit_dvc_btt[fit_ind[use_ind],:]
# Make output data structure with IDENT, photo-z, magnitude, flux_radius, SERSICFIT, BULGEFIT, fit
# status. SERSICFIT and BULGEFIT are actually arrays of fit parameters from single Sersic fits and
# two-component fits, respectively.
tbhdu = pyfits.new_table(pyfits.ColDefs([pyfits.Column(name='IDENT',
format='J',
array=out_ident),
pyfits.Column(name='mag_auto',
format='D',
array=out_mag_auto),
pyfits.Column(name='flux_radius',
format='D',
array=out_flux_rad),
pyfits.Column(name='zphot',
format='D',
array=out_zphot),
pyfits.Column(name='sersicfit',
format='8D',
array=out_sersicfit),
pyfits.Column(name='bulgefit',
format='16D',
array=out_bulgefit),
pyfits.Column(name='fit_status',
format='5J',
array=out_fit_status),
pyfits.Column(name='fit_mad_s',<|fim▁hole|> pyfits.Column(name='fit_mad_b',
format='D',
array=out_fit_mad_b),
pyfits.Column(name='fit_dvc_btt',
format='D',
array=out_fit_dvc_btt)]
))
# Write outputs.
print "Writing to file ",out_fitfile
tbhdu.writeto(out_fitfile, clobber=True)
# Write new subset of catalog file.
print "Re-writing to file ",out_catfile
galsim_cat = pyfits.BinTableHDU(galsim_cat[use_ind])
galsim_cat.writeto(out_catfile, clobber=True)<|fim▁end|> | format='D',
array=out_fit_mad_s), |
<|file_name|>ETransformationState.java<|end_file_name|><|fim▁begin|>/**
*
*/
/*
* Cacheonix Systems licenses this file to You under the LGPL 2.1
* (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.cacheonix.org/products/cacheonix/license-lgpl-2.1.htm
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cacheonix.impl.transformer;
/**
* Enum used for maintaing the state of the reader while reading the bytes in the class file
*/
public enum ETransformationState {
// States ENUMs
INITIAL_STATE {
/*
* (non-Javadoc)
*
* @see java.lang.Objectr#toString()
*/
public String toString() {
return "INITIAL_STATE";
}
},
READING_CONFIG_ANNOTATION // Found Configuration Annotation - Class level
{
/*
* (non-Javadoc)
*
* @see java.lang.Objectr#toString()
*/
public String toString() {
return "READING_CONFIG_ANNOTATION";
}
},
READING_METHOD_ANNOTATION // Found Method Annotation
{
/*
* (non-Javadoc)
*
* @see java.lang.Objectr#toString()
*/
public String toString() {
return "READING_METHOD_ANNOTATION";
}
};
/*
* (non-Javadoc)
*
* @see java.lang.Objectr#toString()
*/<|fim▁hole|>
return "UNKNOWN";
}
}<|fim▁end|> | public String toString() { |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for jQuery.raty 2.7.0
// Project: https://github.com/wbotelhos/raty
// Definitions by: Matt Wheatley <http://github.com/terrawheat>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="jquery"/>
interface JQuery {
raty(): JQuery;
raty(options: JQueryRatyOptions): JQuery;
raty(method: string, parameter: any): any;
raty(method: 'score'): number;
raty(method: 'score', score: number): void;
raty(method: 'click', star: number): void;
raty(method: 'readonly', on: boolean): void;
raty(method: 'cancel', on: boolean): void;
raty(method: 'reload'): void;
raty(method: 'set', options: JQueryRatyOptions): void;
raty(method: 'destroy'): JQuery;
raty(method: 'move', number: number): void;
}
interface JQueryRatyOptions {
cancel?: boolean,
cancelClass?: string,
cancelHint?: string,
cancelOff?: string,
cancelOn?: string,
cancelPlace?: string,
click?: (score: number, event: JQueryEventObject) => void,
half?: boolean,
halfShow?: boolean,
hints?: string[],
iconRange?: any[][],
mouseout?: (score: number, event: JQueryEventObject) => void,
mouseover?: (score: number, event: JQueryEventObject) => void,
noRatedMsg?: string,
number?: number,
numberMax?: number,
path?: string,
precision?: boolean,
readOnly?: boolean,
round?: JQueryRatyRoundingOptions,
score?: number,
scoreName?: string,
single?: boolean,
space?: boolean,
starHalf?: string,
starOff?: string,
starOn?: string,<|fim▁hole|> targetText?: string,
targetType?: string,
starType?: string,
}
interface JQueryRatyRoundingOptions {
down: number,
full: number,
up: number,
}<|fim▁end|> | target?: string,
targetFormat?: string,
targetKeep?: boolean,
targetScore?: string, |
<|file_name|>q17.py<|end_file_name|><|fim▁begin|>"""
问题描述:给定一个矩阵matrix,其中的值有正、负和0,返回子矩阵的最大累加和.
例如,矩阵matrix为
-90 48 78
64 -40 64
-81 -7 66
其中,最大累加和的子矩阵为:
48 78
-40 64
-7 66
所以返回累加和209.
<|fim▁hole|>
其中,最大累加和的子矩阵为:
2 2
所以返回累加和为4.
"""
import sys
from arrandmatrix.q16 import MaxSum
class MaxMatrixSum:
@classmethod
def get_max_sum(cls, matrix):
if not matrix:
return 0
max_value = -sys.maxsize
for i in range(len(matrix)):
j = i
pre_arr = [0 for _ in range(len(matrix[0]))]
while j < len(matrix):
arr = cls.arr_add(matrix[j], pre_arr)
max_value = max([MaxSum.get_max_sum(arr), max_value])
j += 1
pre_arr = arr
return max_value
@classmethod
def arr_add(cls, arr1, arr2):
return [arr1[i]+arr2[i] for i in range(len(arr1))]
if __name__ == '__main__':
my_matrix = [
[-90, 48, 78],
[64, -40, 64],
[-81, -7, 66]
]
print(MaxMatrixSum.get_max_sum(my_matrix))<|fim▁end|> | 例如,matrix为:
-1 -1 -1
-1 2 2
-1 -1 -1 |
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|># Copyright 2016 Hewlett Packard Enterprise Development, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from keystoneauth1 import loading as ks_loading
import netaddr
from neutron_lib.api.definitions import ip_allocation as ipalloc_apidef
from neutron_lib.api.definitions import l2_adjacency as l2adj_apidef
from neutron_lib.api.definitions import network as net_def
from neutron_lib.api.definitions import port as port_def
from neutron_lib.api.definitions import subnet as subnet_def
from neutron_lib.api import validators
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib.exceptions import placement as placement_exc
from neutron_lib.plugins import directory
from novaclient import client as nova_client
from novaclient import exceptions as nova_exc
from oslo_config import cfg
from oslo_log import log
from oslo_utils import excutils
from neutron._i18n import _
from neutron.db import _resource_extend as resource_extend
from neutron.extensions import segment
from neutron.notifiers import batch_notifier
from neutron.objects import network as net_obj
from neutron.objects import subnet as subnet_obj
from neutron.services.segments import db
from neutron.services.segments import exceptions
from neutron.services.segments import placement_client
LOG = log.getLogger(__name__)
NOVA_API_VERSION = '2.41'
IPV4_RESOURCE_CLASS = 'IPV4_ADDRESS'
SEGMENT_NAME_STUB = 'Neutron segment id %s'
MAX_INVENTORY_UPDATE_RETRIES = 10
@resource_extend.has_resource_extenders
@registry.has_registry_receivers
class Plugin(db.SegmentDbMixin, segment.SegmentPluginBase):
_instance = None
supported_extension_aliases = ["segment", "ip_allocation",
l2adj_apidef.ALIAS,
"standard-attr-segment",
"subnet-segmentid-writable",
'segments-peer-subnet-host-routes']
__native_pagination_support = True
__native_sorting_support = True
__filter_validation_support = True
def __init__(self):
self.nova_updater = NovaSegmentNotifier()
self.segment_host_routes = SegmentHostRoutes()
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_network_dict_binding(network_res, network_db):
if not directory.get_plugin('segments'):
return
# TODO(carl_baldwin) Make this work with service subnets when
# it's a thing.
is_adjacent = (not network_db.subnets or
not network_db.subnets[0].segment_id)
network_res[l2adj_apidef.L2_ADJACENCY] = is_adjacent
@staticmethod
@resource_extend.extends([subnet_def.COLLECTION_NAME])
def _extend_subnet_dict_binding(subnet_res, subnet_db):
subnet_res['segment_id'] = subnet_db.get('segment_id')
@staticmethod
@resource_extend.extends([port_def.COLLECTION_NAME])
def _extend_port_dict_binding(port_res, port_db):
if not directory.get_plugin('segments'):
return
value = ipalloc_apidef.IP_ALLOCATION_IMMEDIATE
if port_db.get('ip_allocation'):
value = port_db.get('ip_allocation')
port_res[ipalloc_apidef.IP_ALLOCATION] = value
@classmethod
def get_instance(cls):
if cls._instance is None:
cls._instance = cls()
return cls._instance
@registry.receives(resources.SEGMENT, [events.BEFORE_DELETE])
def _prevent_segment_delete_with_subnet_associated(
self, resource, event, trigger, context, segment,
for_net_delete=False):
"""Raise exception if there are any subnets associated with segment."""
if for_net_delete:
# don't check if this is a part of a network delete operation
return
segment_id = segment['id']
subnets = subnet_obj.Subnet.get_objects(context,
segment_id=segment_id)
subnet_ids = [s.id for s in subnets]
if subnet_ids:
reason = _("The segment is still associated with subnet(s) "
"%s") % ", ".join(subnet_ids)
raise exceptions.SegmentInUse(segment_id=segment_id,
reason=reason)
class Event(object):
def __init__(self, method, segment_ids, total=None, reserved=None,
segment_host_mappings=None, host=None):
self.method = method
if isinstance(segment_ids, set):
self.segment_ids = segment_ids
else:
self.segment_id = segment_ids
self.total = total
self.reserved = reserved
self.segment_host_mappings = segment_host_mappings
self.host = host
@registry.has_registry_receivers
class NovaSegmentNotifier(object):
def __init__(self):
self.p_client, self.n_client = self._get_clients()
self.batch_notifier = batch_notifier.BatchNotifier(
cfg.CONF.send_events_interval, self._send_notifications)
def _get_clients(self):
p_client = placement_client.PlacementAPIClient()
n_auth = ks_loading.load_auth_from_conf_options(cfg.CONF, 'nova')
n_session = ks_loading.load_session_from_conf_options(
cfg.CONF,
'nova',
auth=n_auth)
extensions = [
ext for ext in nova_client.discover_extensions(NOVA_API_VERSION)
if ext.name == "server_external_events"]
n_client = nova_client.Client(
NOVA_API_VERSION,
session=n_session,
region_name=cfg.CONF.nova.region_name,
endpoint_type=cfg.CONF.nova.endpoint_type,
extensions=extensions)
return p_client, n_client
def _send_notifications(self, batched_events):
for event in batched_events:
try:
event.method(event)
except placement_exc.PlacementEndpointNotFound:
LOG.debug('Placement API was not found when trying to '
'update routed networks IPv4 inventories')
return
def _notify_subnet(self, context, subnet, segment_id):
total, reserved = self._calculate_inventory_total_and_reserved(subnet)
if total:
segment_host_mappings = net_obj.SegmentHostMapping.get_objects(
context, segment_id=segment_id)
self.batch_notifier.queue_event(Event(
self._create_or_update_nova_inventory, segment_id, total=total,
reserved=reserved,
segment_host_mappings=segment_host_mappings))
@registry.receives(resources.SUBNET, [events.AFTER_CREATE])
def _notify_subnet_created(self, resource, event, trigger, context,
subnet, **kwargs):
segment_id = subnet.get('segment_id')
if not segment_id or subnet['ip_version'] != constants.IP_VERSION_4:
return
self._notify_subnet(context, subnet, segment_id)
def _create_or_update_nova_inventory(self, event):
try:
self._update_nova_inventory(event)
except placement_exc.PlacementResourceProviderNotFound:
self._create_nova_inventory(event.segment_id, event.total,
event.reserved,
event.segment_host_mappings)
def _update_nova_inventory(self, event):
for count in range(MAX_INVENTORY_UPDATE_RETRIES):
ipv4_inventory = self.p_client.get_inventory(event.segment_id,
IPV4_RESOURCE_CLASS)
if event.total:
ipv4_inventory['total'] += event.total
if event.reserved:
ipv4_inventory['reserved'] += event.reserved
try:
self.p_client.update_inventory(event.segment_id,
ipv4_inventory,
IPV4_RESOURCE_CLASS)
return
except placement_exc.PlacementInventoryUpdateConflict:
LOG.debug('Re-trying to update Nova IPv4 inventory for '
'routed network segment: %s', event.segment_id)
LOG.error('Failed to update Nova IPv4 inventory for routed '
'network segment: %s', event.segment_id)
def _get_nova_aggregate_uuid(self, aggregate):
try:
return aggregate.uuid
except AttributeError:
with excutils.save_and_reraise_exception():
LOG.exception("uuid was not returned as part of the aggregate "
"object which indicates that the Nova API "
"backend does not support microversions. Ensure "
"that the compute endpoint in the service "
"catalog points to the v2.1 API.")
def _create_nova_inventory(self, segment_id, total, reserved,
segment_host_mappings):
name = SEGMENT_NAME_STUB % segment_id
resource_provider = {'name': name, 'uuid': segment_id}
self.p_client.create_resource_provider(resource_provider)
aggregate = self.n_client.aggregates.create(name, None)
aggregate_uuid = self._get_nova_aggregate_uuid(aggregate)
self.p_client.associate_aggregates(segment_id, [aggregate_uuid])
for mapping in segment_host_mappings:
self.n_client.aggregates.add_host(aggregate.id, mapping.host)
ipv4_inventory = {'total': total, 'reserved': reserved,
'min_unit': 1, 'max_unit': 1, 'step_size': 1,
'allocation_ratio': 1.0,
'resource_class': IPV4_RESOURCE_CLASS}
self.p_client.create_inventory(segment_id, ipv4_inventory)
def _calculate_inventory_total_and_reserved(self, subnet):
total = 0
reserved = 0
allocation_pools = subnet.get('allocation_pools') or []
for pool in allocation_pools:
total += int(netaddr.IPAddress(pool['end']) -
netaddr.IPAddress(pool['start'])) + 1
if total:
if subnet['gateway_ip']:
total += 1
reserved += 1
if subnet['enable_dhcp']:
reserved += 1
return total, reserved
@registry.receives(resources.SUBNET, [events.AFTER_UPDATE])
def _notify_subnet_updated(self, resource, event, trigger, context,
subnet, original_subnet, **kwargs):
segment_id = subnet.get('segment_id')
original_segment_id = original_subnet.get('segment_id')
if not segment_id or subnet['ip_version'] != constants.IP_VERSION_4:
return
if original_segment_id != segment_id:
# Migration to routed network, treat as create
self._notify_subnet(context, subnet, segment_id)
return
filters = {'segment_id': [segment_id],
'ip_version': [constants.IP_VERSION_4]}
if not subnet['allocation_pools']:
plugin = directory.get_plugin()
alloc_pools = [s['allocation_pools'] for s in
plugin.get_subnets(context, filters=filters)]
if not any(alloc_pools):
self.batch_notifier.queue_event(Event(
self._delete_nova_inventory, segment_id))
return
original_total, original_reserved = (
self._calculate_inventory_total_and_reserved(original_subnet))
updated_total, updated_reserved = (
self._calculate_inventory_total_and_reserved(subnet))
total = updated_total - original_total
reserved = updated_reserved - original_reserved
if total or reserved:
segment_host_mappings = None
if not original_subnet['allocation_pools']:
segment_host_mappings = net_obj.SegmentHostMapping.get_objects(
context, segment_id=segment_id)
self.batch_notifier.queue_event(Event(
self._create_or_update_nova_inventory, segment_id, total=total,
reserved=reserved,
segment_host_mappings=segment_host_mappings))
@registry.receives(resources.SUBNET, [events.AFTER_DELETE])
def _notify_subnet_deleted(self, resource, event, trigger, context,
subnet, **kwargs):
segment_id = subnet.get('segment_id')
if not segment_id or subnet['ip_version'] != constants.IP_VERSION_4:
return
total, reserved = self._calculate_inventory_total_and_reserved(subnet)
if total:
filters = {'segment_id': [segment_id], 'ip_version': [4]}
plugin = directory.get_plugin()
if plugin.get_subnets_count(context, filters=filters) > 0:
self.batch_notifier.queue_event(Event(
self._update_nova_inventory, segment_id, total=-total,
reserved=-reserved))
else:
self.batch_notifier.queue_event(Event(
self._delete_nova_inventory, segment_id))
def _get_aggregate_id(self, segment_id):
aggregate_uuid = self.p_client.list_aggregates(
segment_id)['aggregates'][0]
aggregates = self.n_client.aggregates.list()
for aggregate in aggregates:
nc_aggregate_uuid = self._get_nova_aggregate_uuid(aggregate)
if nc_aggregate_uuid == aggregate_uuid:
return aggregate.id
def _delete_nova_inventory(self, event):
aggregate_id = self._get_aggregate_id(event.segment_id)
aggregate = self.n_client.aggregates.get_details(
aggregate_id)
for host in aggregate.hosts:
self.n_client.aggregates.remove_host(aggregate_id,
host)
self.n_client.aggregates.delete(aggregate_id)
self.p_client.delete_resource_provider(event.segment_id)
@registry.receives(resources.SEGMENT_HOST_MAPPING, [events.AFTER_CREATE])
def _notify_host_addition_to_aggregate(self, resource, event, trigger,
context, host, current_segment_ids,
**kwargs):
subnets = subnet_obj.Subnet.get_objects(context,
segment_id=current_segment_ids)
segment_ids = {s.segment_id for s in subnets}
self.batch_notifier.queue_event(Event(self._add_host_to_aggregate,
segment_ids, host=host))
def _add_host_to_aggregate(self, event):
for segment_id in event.segment_ids:
try:
aggregate_id = self._get_aggregate_id(segment_id)
except placement_exc.PlacementAggregateNotFound:
LOG.info('When adding host %(host)s, aggregate not found '
'for routed network segment %(segment_id)s',
{'host': event.host, 'segment_id': segment_id})
continue
try:
self.n_client.aggregates.add_host(aggregate_id, event.host)
except nova_exc.Conflict:
LOG.info('Host %(host)s already exists in aggregate for '
'routed network segment %(segment_id)s',
{'host': event.host, 'segment_id': segment_id})
@registry.receives(resources.PORT,
[events.AFTER_CREATE, events.AFTER_DELETE])
def _notify_port_created_or_deleted(self, resource, event, trigger,
context, port, **kwargs):
if not self._does_port_require_nova_inventory_update(port):
return
ipv4_subnets_number, segment_id = (
self._get_ipv4_subnets_number_and_segment_id(port, context))
if segment_id:
if event == events.AFTER_DELETE:
ipv4_subnets_number = -ipv4_subnets_number
self.batch_notifier.queue_event(Event(self._update_nova_inventory,
segment_id, reserved=ipv4_subnets_number))
@registry.receives(resources.PORT, [events.AFTER_UPDATE])
def _notify_port_updated(self, resource, event, trigger, context,
**kwargs):
port = kwargs.get('port')
original_port = kwargs.get('original_port')
does_original_port_require_nova_inventory_update = (
self._does_port_require_nova_inventory_update(original_port))
does_port_require_nova_inventory_update = (
self._does_port_require_nova_inventory_update(port))
if not (does_original_port_require_nova_inventory_update or
does_port_require_nova_inventory_update):
return
original_port_ipv4_subnets_number, segment_id = (
self._get_ipv4_subnets_number_and_segment_id(original_port,
context))
if not segment_id:
return
port_ipv4_subnets_number = len(self._get_ipv4_subnet_ids(port))
if not does_original_port_require_nova_inventory_update:
original_port_ipv4_subnets_number = 0
if not does_port_require_nova_inventory_update:
port_ipv4_subnets_number = 0
update = port_ipv4_subnets_number - original_port_ipv4_subnets_number
if update:
self.batch_notifier.queue_event(Event(self._update_nova_inventory,
segment_id, reserved=update))
def _get_ipv4_subnets_number_and_segment_id(self, port, context):
ipv4_subnet_ids = self._get_ipv4_subnet_ids(port)
if not ipv4_subnet_ids:
return 0, None
subnet = subnet_obj.Subnet.get_object(context, id=ipv4_subnet_ids[0])
if subnet and subnet.segment_id:
return len(ipv4_subnet_ids), subnet.segment_id
return 0, None
def _does_port_require_nova_inventory_update(self, port):
device_owner = port.get('device_owner')
if (device_owner.startswith(constants.DEVICE_OWNER_COMPUTE_PREFIX) or
device_owner == constants.DEVICE_OWNER_DHCP):
return False
return True
def _get_ipv4_subnet_ids(self, port):
ipv4_subnet_ids = []
for ip in port.get('fixed_ips', []):
if netaddr.IPAddress(
ip['ip_address']).version == constants.IP_VERSION_4:
ipv4_subnet_ids.append(ip['subnet_id'])
return ipv4_subnet_ids
@registry.has_registry_receivers
class SegmentHostRoutes(object):
def _get_subnets(self, context, network_id):
return subnet_obj.Subnet.get_objects(context, network_id=network_id)
def _calculate_routed_network_host_routes(self, context, ip_version,
network_id=None, subnet_id=None,
segment_id=None,
host_routes=None,
gateway_ip=None,
old_gateway_ip=None,
deleted_cidr=None):
"""Calculate host routes for routed network.
This method is used to calculate the host routes for routed networks
both when handling the user create or update request and when making
updates to subnets on the network in response to events: AFTER_CREATE
and AFTER_DELETE.
:param ip_version: IP version (4/6).
:param network_id: Network ID.
:param subnet_id: UUID of the subnet.
:param segment_id: Segement ID associated with the subnet.
:param host_routes: Current host_routes of the subnet.
:param gateway_ip: The subnets gateway IP address.
:param old_gateway_ip: The old gateway IP address of the subnet when it
is changed on update.
:param deleted_cidr: The cidr of a deleted subnet.
:returns Host routes with routes for the other subnet's on the routed
network appended unless a route to the destination already
exists.
"""<|fim▁hole|> dest_ip_nets = [netaddr.IPNetwork(route['destination']) for
route in host_routes]
# Drop routes to the deleted cidr, when the subnet was deleted.
if deleted_cidr:
delete_route = {'destination': deleted_cidr, 'nexthop': gateway_ip}
if delete_route in host_routes:
host_routes.remove(delete_route)
for subnet in self._get_subnets(context, network_id):
if (subnet.id == subnet_id or subnet.segment_id == segment_id or
subnet.ip_version != ip_version):
continue
subnet_ip_net = netaddr.IPNetwork(subnet.cidr)
if old_gateway_ip:
old_route = {'destination': str(subnet.cidr),
'nexthop': old_gateway_ip}
if old_route in host_routes:
host_routes.remove(old_route)
dest_ip_nets.remove(subnet_ip_net)
if gateway_ip:
# Use netaddr here in case the user provided a summary route
# (supernet route). I.e subnet.cidr = 10.0.1.0/24 and
# the user provided a host route for 10.0.0.0/16. We don't
# need to append a route in this case.
if not any(subnet_ip_net in ip_net for ip_net in dest_ip_nets):
host_routes.append({'destination': subnet.cidr,
'nexthop': gateway_ip})
return host_routes
def _host_routes_need_update(self, host_routes, calc_host_routes):
"""Compare host routes and calculated host routes
:param host_routes: Current host routes
:param calc_host_routes: Host routes + calculated host routes for
routed network
:returns True if host_routes and calc_host_routes are not equal
"""
return ((set((route['destination'],
route['nexthop']) for route in host_routes) !=
set((route['destination'],
route['nexthop']) for route in calc_host_routes)))
def _update_routed_network_host_routes(self, context, network_id,
deleted_cidr=None):
"""Update host routes on subnets on a routed network after event
Host routes on the subnets on a routed network may need updates after
any CREATE or DELETE event.
:param network_id: Network ID
:param deleted_cidr: The cidr of a deleted subnet.
"""
for subnet in self._get_subnets(context, network_id):
host_routes = [{'destination': str(route.destination),
'nexthop': route.nexthop}
for route in subnet.host_routes]
calc_host_routes = self._calculate_routed_network_host_routes(
context=context,
ip_version=subnet.ip_version,
network_id=subnet.network_id,
subnet_id=subnet.id,
segment_id=subnet.segment_id,
host_routes=copy.deepcopy(host_routes),
gateway_ip=subnet.gateway_ip,
deleted_cidr=deleted_cidr)
if self._host_routes_need_update(host_routes, calc_host_routes):
LOG.debug(
"Updating host routes for subnet %s on routed network %s",
(subnet.id, subnet.network_id))
plugin = directory.get_plugin()
plugin.update_subnet(context, subnet.id,
{'subnet': {
'host_routes': calc_host_routes}})
@registry.receives(resources.SUBNET, [events.BEFORE_CREATE])
def host_routes_before_create(self, resource, event, trigger, context,
subnet, **kwargs):
segment_id = subnet.get('segment_id')
gateway_ip = subnet.get('gateway_ip')
if validators.is_attr_set(subnet.get('host_routes')):
host_routes = subnet.get('host_routes')
else:
host_routes = []
if segment_id is not None and validators.is_attr_set(gateway_ip):
calc_host_routes = self._calculate_routed_network_host_routes(
context=context,
ip_version=netaddr.IPNetwork(subnet['cidr']).version,
network_id=subnet['network_id'],
segment_id=subnet['segment_id'],
host_routes=copy.deepcopy(host_routes),
gateway_ip=gateway_ip)
if (not host_routes or
self._host_routes_need_update(host_routes,
calc_host_routes)):
subnet['host_routes'] = calc_host_routes
@registry.receives(resources.SUBNET, [events.BEFORE_UPDATE])
def host_routes_before_update(self, resource, event, trigger, **kwargs):
context = kwargs['context']
subnet, original_subnet = kwargs['request'], kwargs['original_subnet']
segment_id = subnet.get('segment_id', original_subnet['segment_id'])
gateway_ip = subnet.get('gateway_ip', original_subnet['gateway_ip'])
host_routes = subnet.get('host_routes', original_subnet['host_routes'])
if (segment_id and (host_routes != original_subnet['host_routes'] or
gateway_ip != original_subnet['gateway_ip'])):
calc_host_routes = self._calculate_routed_network_host_routes(
context=context,
ip_version=netaddr.IPNetwork(original_subnet['cidr']).version,
network_id=original_subnet['network_id'],
segment_id=segment_id,
host_routes=copy.deepcopy(host_routes),
gateway_ip=gateway_ip,
old_gateway_ip=original_subnet['gateway_ip'] if (
gateway_ip != original_subnet['gateway_ip']) else None)
if self._host_routes_need_update(host_routes, calc_host_routes):
subnet['host_routes'] = calc_host_routes
@registry.receives(resources.SUBNET, [events.AFTER_CREATE])
def host_routes_after_create(self, resource, event, trigger, **kwargs):
context = kwargs['context']
subnet = kwargs['subnet']
# If there are other subnets on the network and subnet has segment_id
# ensure host routes for all subnets are updated.
if (len(self._get_subnets(context, subnet['network_id'])) > 1 and
subnet.get('segment_id')):
self._update_routed_network_host_routes(context,
subnet['network_id'])
@registry.receives(resources.SUBNET, [events.AFTER_DELETE])
def host_routes_after_delete(self, resource, event, trigger, context,
subnet, **kwargs):
# If this is a routed network, remove any routes to this subnet on
# this networks remaining subnets.
if subnet.get('segment_id'):
self._update_routed_network_host_routes(
context, subnet['network_id'], deleted_cidr=subnet['cidr'])<|fim▁end|> | if host_routes is None:
host_routes = [] |
<|file_name|>run_CNN_SAT.py<|end_file_name|><|fim▁begin|># Copyright 2015 Tianchuan Du University of Delaware
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
import gzip
import numpy
import os
import sys
import theano
from theano.tensor.shared_randomstreams import RandomStreams
import time
from io_func.model_io import _nnet2file, _file2nnet, _cfg2file, log
from learning.sgd import train_sgd_verbose, validate_by_minibatch_verbose
from models.cnn_sat import CNN_SAT
import theano.tensor as T
from utils.network_config import NetworkConfig
from utils.utils import parse_arguments
# Implements the Speaker Adaptive Training of DNNs proposed in the following papers:
# [1] Yajie Miao, Hao Zhang, Florian Metze. "Towards Speaker Adaptive Training of Deep
# Neural Network Acoustic Models". Interspeech 2014.
# [2] Yajie Miao, Lu Jiang, Hao Zhang, Florian Metze. "Improvements to Speaker Adaptive
# Training of Deep Neural Networks". SLT 2014.
if __name__ == '__main__':
# check the arguments
arg_elements = [sys.argv[i] for i in range(1, len(sys.argv))]
arguments = parse_arguments(arg_elements)
required_arguments = ['train_data', 'valid_data', 'si_nnet_spec', 'si_conv_nnet_spec', 'wdir', 'adapt_nnet_spec', 'init_model']
for arg in required_arguments:
if arguments.has_key(arg) == False:
print "Error: the argument %s has to be specified" % (arg); exit(1)
# mandatory arguments
train_data_spec = arguments['train_data']; valid_data_spec = arguments['valid_data']
si_nnet_spec = arguments['si_nnet_spec']
si_conv_nnet_spec = arguments['si_conv_nnet_spec']
adapt_nnet_spec = arguments['adapt_nnet_spec'];
wdir = arguments['wdir']
init_model_file = arguments['init_model']
# parse network configuration from arguments, and initialize data reading
cfg_si = NetworkConfig(); cfg_si.model_type = 'CNN'
cfg_si.parse_config_cnn(arguments, '10:' + si_nnet_spec, si_conv_nnet_spec)
cfg_si.init_data_reading(train_data_spec, valid_data_spec)
# parse the structure of the i-vector network
cfg_adapt = NetworkConfig()
net_split = adapt_nnet_spec.split(':')
adapt_nnet_spec = ''
for n in xrange(len(net_split) - 1):
adapt_nnet_spec += net_split[n] + ':'
cfg_adapt.parse_config_dnn(arguments, adapt_nnet_spec + '0')
numpy_rng = numpy.random.RandomState(89677)
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
log('> ... initializing the model')
# setup up the model
dnn = CNN_SAT(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg_si = cfg_si, cfg_adapt = cfg_adapt)
# read the initial DNN (the SI DNN which has been well trained)
# _file2nnet(dnn.cnn_si.layers, filename = init_model_file)
_file2nnet(dnn.cnn_si.layers, filename = 'BKUP/nnet.param.si')
_file2nnet(dnn.dnn_adapt.layers, filename = 'BKUP/nnet.param.adapt')
# get the training and validation functions for adaptation network training
dnn.params = dnn.dnn_adapt.params # only update the parameters of the i-vector nnet
dnn.delta_params = dnn.dnn_adapt.delta_params
log('> ... getting the finetuning functions for iVecNN')
train_fn, valid_fn = dnn.build_finetune_functions(
(cfg_si.train_x, cfg_si.train_y), (cfg_si.valid_x, cfg_si.valid_y),
batch_size = cfg_adapt.batch_size)
log('> ... learning the adaptation network')
cfg = cfg_adapt
while (cfg.lrate.get_rate() != 0):
# one epoch of sgd training
# train_error = train_sgd_verbose(train_fn, cfg_si.train_sets, cfg_si.train_xy,
# cfg.batch_size, cfg.lrate.get_rate(), cfg.momentum)
# log('> epoch %d, training error %f ' % (cfg.lrate.epoch, 100*numpy.mean(train_error)) + '(%)')
# validation
valid_error = validate_by_minibatch_verbose(valid_fn, cfg_si.valid_sets, cfg_si.valid_xy, cfg.batch_size)
log('> epoch %d, lrate %f, validation error %f ' % (cfg.lrate.epoch, cfg.lrate.get_rate(), 100*numpy.mean(valid_error)) + '(%)')
cfg.lrate.get_next_rate(current_error = 100 * numpy.mean(valid_error))
cfg.lrate.rate = 0
# save the model and network configuration<|fim▁hole|> input_factor = cfg_si.input_dropout_factor, factor = cfg_si.dropout_factor)
log('> ... the final PDNN model parameter is ' + cfg.param_output_file + ' (.si, .adapt)')
if cfg.cfg_output_file != '':
_cfg2file(cfg_adapt, filename=cfg.cfg_output_file + '.adapt')
_cfg2file(cfg_si, filename=cfg.cfg_output_file + '.si')
log('> ... the final PDNN model config is ' + cfg.cfg_output_file + ' (.si, .adapt)')
# output the model into Kaldi-compatible format
if cfg.kaldi_output_file != '':
dnn.cnn_si.fc_dnn.write_model_to_kaldi(cfg.kaldi_output_file + '.si')
dnn.dnn_adapt.write_model_to_kaldi(cfg.kaldi_output_file + '.adapt', with_softmax = False)
log('> ... the final Kaldi model is ' + cfg.kaldi_output_file + ' (.si, .adapt)')<|fim▁end|> | if cfg.param_output_file != '':
_nnet2file(dnn.dnn_adapt.layers, filename = cfg.param_output_file + '.adapt',
input_factor = cfg_adapt.input_dropout_factor, factor = cfg_adapt.dropout_factor)
_nnet2file(dnn.cnn_si.layers, filename = cfg.param_output_file + '.si', |
<|file_name|>string_view.py<|end_file_name|><|fim▁begin|>'''
sc_studio.string_view
Author: Ming Tsang
Copyright (c) 2014-2015 HKUST SmartCar Team
Refer to LICENSE for details
'''
import binascii
import logging
import time
import tkinter
from tkinter import Tk, Text
from sc_studio import config
from sc_studio.view import View
class StringView(View):
def __init__(self, params):
super(StringView, self).__init__(params)
self._tk = Tk()
self._text = Text(self._tk, bg = config.COL_GREY_900,
fg = config.COL_GREY_100)
self._tk.title("String view")
self._text.pack(side = tkinter.LEFT, fill = tkinter.Y)
self._tk.protocol("WM_DELETE_WINDOW", self.on_press_close)
self._file = open("string_" + str(int(time.time() * 1000)) + ".txt", "w")
def run(self):
super(StringView, self).run()
self._tk.mainloop()
def on_new_input(self):
try:
hex_str = self.get_input()
line = self._get_line(hex_str)
except Exception as e:
logging.debug(str(e))
return
string = line.decode("UTF-8")
self._text.insert(tkinter.END, string)
self._text.insert(tkinter.END, '\n')
while self._text.yview()[1] != 1.0:<|fim▁hole|> self._text.delete(1.0, 2.0)
self._file.write(time.strftime("[%x %X] "))
self._file.write(string)
self._file.write('\n')
def on_dismiss(self):
self._tk.after_idle(self.on_press_close)
def on_press_close(self):
self._tk.destroy()
self.join_io_thread()
def _get_line(self, hex_str):
try:
return binascii.unhexlify(hex_str)
except TypeError as e:
logging.debug(str(e))
return<|fim▁end|> | |
<|file_name|>entropy_sample.rs<|end_file_name|><|fim▁begin|>extern crate rust_ml;
use rust_ml::junk::minamoto::entropy;
mod even_odd_class;
fn main() {
let set = vec!((2i32, even_odd_class::EvenOddClass::Even),<|fim▁hole|> (9i32, even_odd_class::EvenOddClass::Odd));
println!("2 {}", entropy::entropy(&set));
println!("gain2 odd {}", entropy::gain(&set, even_odd_class::EvenOddClass::Odd));
}<|fim▁end|> | (4i32, even_odd_class::EvenOddClass::Even),
(6i32, even_odd_class::EvenOddClass::Even),
(8i32, even_odd_class::EvenOddClass::Even), |
<|file_name|>test_stacks.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.orchestration import base
from tempest.common.utils import data_utils
from tempest.openstack.common import log as logging
from tempest.test import attr
LOG = logging.getLogger(__name__)
class StacksTestJSON(base.BaseOrchestrationTest):
_interface = 'json'
empty_template = "HeatTemplateFormatVersion: '2012-12-12'\n"
@classmethod
def setUpClass(cls):
super(StacksTestJSON, cls).setUpClass()
cls.client = cls.orchestration_client
@attr(type='smoke')
def test_stack_list_responds(self):
resp, stacks = self.client.list_stacks()
self.assertEqual('200', resp['status'])
self.assertIsInstance(stacks, list)
@attr(type='smoke')
def test_stack_crud_no_resources(self):
stack_name = data_utils.rand_name('heat')
# create the stack
stack_identifier = self.create_stack(
stack_name, self.empty_template)
stack_id = stack_identifier.split('/')[1]
# wait for create complete (with no resources it should be instant)
self.client.wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE')
# check for stack in list
resp, stacks = self.client.list_stacks()
list_ids = list([stack['id'] for stack in stacks])
self.assertIn(stack_id, list_ids)
# fetch the stack
resp, stack = self.client.get_stack(stack_identifier)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# fetch the stack by name<|fim▁hole|> resp, stack = self.client.get_stack(stack_id)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# delete the stack
resp = self.client.delete_stack(stack_identifier)
self.assertEqual('204', resp[0]['status'])<|fim▁end|> | resp, stack = self.client.get_stack(stack_name)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# fetch the stack by id |
<|file_name|>create_form_test_template.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class CreateFormTestTemplate(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://kf.kbtdev.org/"
self.verificationErrors = []
self.accept_next_alert = True
def test_create_form_test_template(self):
driver = self.driver
driver.get(self.base_url + "")
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms-header__title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertFalse(self.is_element_present(By.CSS_SELECTOR, ".forms__card"))
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".forms-empty__button"))
driver.find_element_by_css_selector(".forms-empty__button").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms__addform__start"): break
except: pass
time.sleep(1)
else: self.fail("time out")
# Click the form creation button using JavaScript to avoid element not visible errors.
# WARNING: The 'runScript' command doesn't export to python, so a manual edit is necessary.
# ERROR: Caught exception [ERROR: Unsupported command [runScript | $(".forms__addform__start").click(); | ]]
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".form-title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".form-title").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".survey-header__title input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".survey-header__title input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".survey-header__title input").send_keys("Selenium test form title.", Keys.ENTER)
self.assertEqual("Selenium test form title.", driver.find_element_by_css_selector(".form-title").text)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".survey-editor .fa-plus"))
driver.find_element_by_css_selector(".survey-editor .fa-plus").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".row__questiontypes__form > input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".row__questiontypes__form > input").send_keys("Selenium test question label.", Keys.TAB)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".row__questiontypes__form > button"))
driver.find_element_by_css_selector(".row__questiontypes__form > button").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".questiontypelist__item[data-menu-item=\"select_one\"]"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".questiontypelist__item[data-menu-item=\"select_one\"]").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(1) span"): break
except: pass<|fim▁hole|> time.sleep(1)
else: self.fail("time out")
self.assertEqual("Selenium test question label.", driver.find_element_by_css_selector(".card__header-title").text)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) .editable-wrapper span:first-child").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(1) input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) input").send_keys("Selenium test question choice 1.", Keys.ENTER)
self.assertEqual("Selenium test question choice 1.", driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(1) span").text)
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(2) span"))
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) span").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".card--selectquestion__expansion li:nth-child(2) input"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) input").send_keys(Keys.SHIFT, Keys.END, Keys.SHIFT, Keys.DELETE)
driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) input").send_keys("Selenium test question choice 2.", Keys.ENTER)
self.assertEqual("Selenium test question choice 2.", driver.find_element_by_css_selector(".card--selectquestion__expansion li:nth-child(2) span").text)
self.assertTrue(self.is_element_present(By.ID, "save"))
driver.find_element_by_id("save").click()
for i in range(60):
try:
if self.is_element_present(By.CSS_SELECTOR, ".forms__card__title"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertEqual("Selenium test form title.", driver.find_element_by_css_selector(".forms__card__title").text)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>scrape_season.py<|end_file_name|><|fim▁begin|>import scrapenhl_globals
import scrape_game
def scrape_games(season, games, force_overwrite = False, pause = 1, marker = 10):
"""
Scrapes the specified games.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
games : iterable of ints (e.g. list)
The game id. This can range from 20001 to 21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If True, will overwrite previously raw html files. If False, will not scrape if files already found.
pause : float or int
The time to pause between requests to the NHL API. Defaults to 1 second
marker : float or int
The number of times to print progress. 10 will print every 10%; 20 every 5%.
"""
import time
import datetime
starttime = time.time()
games = sorted(list(games))
marker_i = [len(games)//marker * i for i in range(marker)]
marker_i[-1] = len(games) - 1
marker_i_set = set(marker_i)
for i in range(len(games)):
game = games[i]
newscrape = scrape_game.scrape_game(season, game, force_overwrite)
if newscrape: #only sleep if had to scrape a new game
time.sleep(pause)
if i in marker_i_set:
print('Done through', season, game, ' ~ ', round((marker_i.index(i)) * 100/marker), '% in',
str(datetime.timedelta(seconds = time.time() - starttime)))
print('Done scraping games in', season)
def scrape_season(season, startgame = None, endgame = None, force_overwrite = False, pause = 1):
"""
Scrapes games for the specified season.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
startgame : int
The game id at which scraping will start. For example, midway through a season, this can be the last game
scraped.
This can range from 20001 to 21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If True, will overwrite previously raw html files. If False, will not scrape if files already found.
pause : float or int
The time to pause between requests to the NHL API. Defaults to 1 second
"""
if season != 2012:
games = [20000 + x for x in range(1, 1231)]
else:
games = [20000 + x for x in range(1, 721)]
for round in range(1, 5):
for series in range(1, 8//round + 1):
for game in range(1, 8):
games.append(int('30{0:d}{1:d}{2:d}'.format(round, series, game)))
if startgame is not None:
games = [g for g in games if g >= startgame]
if endgame is not None:
games = [g for g in games if g <= endgame]
scrape_games(season, games, force_overwrite, pause, 10)
def get_team_pbplog_filename(season, team):
return '{0:s}Team logs/{2:s}{1:d}_pbp.feather'.format(scrapenhl_globals.SAVE_FOLDER, season, team)
def get_team_toilog_filename(season, team):
return '{0:s}Team logs/{2:s}{1:d}_toi.feather'.format(scrapenhl_globals.SAVE_FOLDER, season, team)
def update_teamlogs(season, force_overwrite = False):
import os
import feather
import pandas as pd
import os.path
basic_gamelog = scrapenhl_globals.get_quick_gamelog_file()
teams = {x for x in \
basic_gamelog.query('Season == {0:d}'.format(season))['Home'].drop_duplicates()} | \
{x for x in \
basic_gamelog.query('Season == {0:d}'.format(season))['Away'].drop_duplicates()}
temp = basic_gamelog
### List files in correct format
allfiles = os.listdir(scrapenhl_globals.get_season_folder(season))
pbpfiles = {int(x[:5]): x for x in allfiles if x[-12:] == '_parsed.zlib'}
toifiles = {int(x[:5]): x for x in allfiles if x[-19:] == '_shifts_parsed.zlib'}
for team in teams:
teamgames = {int(g) for g in basic_gamelog.query('Season == {0:d} & (Home == "{1:s}" | Away == "{1:s}")'.format(
season, team))['Game'].values}
current_pbp = None
games_already_done = set()
if os.path.exists(get_team_pbplog_filename(season, team)):
current_pbp = feather.read_dataframe(get_team_pbplog_filename(season, team))
games_already_done = {x for x in current_pbp.Game}
dflist = []
if not force_overwrite and current_pbp is not None:
dflist.append(current_pbp)
teamgames = {int(g) for g in teamgames if g not in games_already_done}
### TODO do I need to flip any columns?
#if force_overwrite:
for game in teamgames:
try:
df = pd.read_hdf(scrape_game.get_parsed_save_filename(season, game))
df = df.assign(Game = game)
if df is not None:
dflist.append(df)
except FileNotFoundError:
pass
if len(dflist) > 0:
new_pbp = pd.concat(dflist)
for col in new_pbp.columns:
if new_pbp[col].dtype == 'object':
new_pbp[col] = new_pbp[col].astype(str)
feather.write_dataframe(new_pbp, get_team_pbplog_filename(season, team))
current_toi = None
games_already_done = set()
if os.path.exists(get_team_toilog_filename(season, team)):
current_toi = feather.read_dataframe(get_team_toilog_filename(season, team))
games_already_done = {x for x in current_toi.Game}
### TODO issues here
dflist = []
if not force_overwrite:
dflist.append(current_toi)
teamgames = {g for g in teamgames if g not in games_already_done}
#if force_overwrite:
for game in teamgames:
try:
df = pd.read_hdf(scrape_game.get_parsed_shifts_save_filename(season, game))
df = df.assign(Game = game)
cols_to_replace = {col for col in df.columns if str.isdigit(col[-1]) if col[:3] != team}
df.rename(columns = {col: 'Opp' + col[3:] for col in cols_to_replace}, inplace = True)
if df is not None:
dflist.append(df)
except FileNotFoundError:
pass
import pandas as pd
dflist = [df for df in dflist if df is not None]
if len(dflist) > 0:
new_toi = pd.concat(dflist)
for col in new_toi.columns:
if new_toi[col].dtype == 'object':
new_toi[col] = new_toi[col].astype(str)
feather.write_dataframe(new_toi, get_team_toilog_filename(season, team))
def get_team_toilog(season, team):
import feather
return feather.read_dataframe(get_team_toilog_filename(season, team))
def get_team_pbplog(season, team):
import feather
return feather.read_dataframe(get_team_pbplog_filename(season, team))
def get_season_schedule_url(season):
return 'https://statsapi.web.nhl.com/api/v1/schedule?startDate={0:d}-09-01&endDate={1:d}-06-25'.format(season,
season + 1)
def parse_games(season, games, force_overwrite = False, marker = 10):
"""
Parses the specified games.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
games : iterable of ints (e.g. list)
The game id. This can range from 20001 to 21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If True, will overwrite previously parsed files. If False, will not parise if files already found.
marker : float or int
The number of times to print progress. 10 will print every 10%; 20 every 5%.
"""
import time
import datetime
starttime = time.time()
games = sorted(list(games))
marker_i = [len(games) // marker * i for i in range(marker)]
marker_i[-1] = len(games) - 1
marker_i_set = set(marker_i)
for i in range(len(games)):
game = games[i]
scrape_game.parse_game(season, game, force_overwrite)
if i in marker_i_set:
print('Done through', season, game, ' ~ ', round((marker_i.index(i)) * 100 / marker), '% in',
str(datetime.timedelta(seconds=time.time() - starttime)))
print('Done parsing games in', season)
def autoupdate(season = scrapenhl_globals.MAX_SEASON):
"""
Scrapes unscraped games for the specified season.
This is a convenience function that finds the highest completed game in a year and scrapes up to that point only.
This reduces unnecessary requests for unplayed games.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
"""
import urllib.request
url = get_season_schedule_url(season)
with urllib.request.urlopen(url) as reader:
page = reader.read().decode('latin-1')
import json
jsonpage = json.loads(page)
completed_games = set()
for gameday in jsonpage['dates']:
for game in gameday['games']:
if game['status']['abstractGameState'] == 'Final':
completed_games.add(int(str(game['gamePk'])[-5:]))
scrape_games(season, completed_games)
parse_games(season, completed_games)
def read_completed_games_from_url(season):
import urllib.request
url = get_season_schedule_url(season)
with urllib.request.urlopen(url) as reader:
page = reader.read().decode('latin-1')
import json
jsonpage = json.loads(page)
completed_games = set()
for gameday in jsonpage['dates']:
for game in gameday['games']:
if game['status']['abstractGameState'] == 'Final':
completed_games.add(int(str(game['gamePk'])[-5:]))
return completed_games
def reparse_season(season = scrapenhl_globals.MAX_SEASON):
"""
Re-parses entire season.
:param season: int
The season of the game. 2007-08 would be 2007.
:return:
"""
completed_games = read_completed_games_from_url(season)
parse_games(season, completed_games, True)
def rewrite_globals(start_from_scratch = True, seasons = None):
"""
Recreates global files: PLAYER_IDS, BASIC_GAMELOG, TEAM_IDS, CORRECTED_PLAYERNAMES
Parameters
-----------
seasons : list of int or None
The seasons of the games. 2007-08 would be 2007. Should only be provided when start_from_scratch is False.
start_from_scratch: bool
If True, will search through all files; if False, will look only at missing games in BASIC_GAMELOG.
False not yet implemented.
"""
import os.path
import zlib
import json
import pandas as pd
import time
import datetime
if seasons is None:
seasons = [i for i in range(2007, scrapenhl_globals.MAX_SEASON + 1)]
elif isinstance(seasons, int):
seasons = [seasons]
if start_from_scratch:
import os
try:
os.remove(scrapenhl_globals.PLAYER_ID_FILE)
except FileNotFoundError:
pass
try:
os.remove(scrapenhl_globals.TEAM_ID_FILE)
except FileNotFoundError:
pass
try:
os.remove(scrapenhl_globals.BASIC_GAMELOG_FILE)
except FileNotFoundError:
pass
for season in seasons:
starttime = time.time()
games = read_completed_games_from_url(season)
marker = 20
games = sorted(list(games))
marker_i = [len(games) // marker * i for i in range(marker)]
marker_i[-1] = len(games) - 1
marker_i_set = set(marker_i)
for i in range(len(games)):
game = games[i]
#print(season, game)
filename = scrape_game.get_parsed_save_filename(season, game)
if os.path.exists(scrape_game.get_json_save_filename(season, game)):
r = open(scrape_game.get_json_save_filename(season, game), 'rb')
page = r.read()
r.close()
page = zlib.decompress(page)
try:
data = json.loads(page.decode('latin-1'))
teamdata = data['liveData']['boxscore']['teams']
<|fim▁hole|> except json.JSONDecodeError:
pass
if i in marker_i_set:
print('Done through', season, game, ' ~ ', round((marker_i.index(i)) * 100 / marker), '% in ',
str(datetime.timedelta(seconds = time.time() - starttime)))
print('Done with', season)
if __name__ == "__main__":
for season in range(2015, 2017):
autoupdate(season)
update_teamlogs(season, True) #have an error at some point in 2013 #and 2014
pass<|fim▁end|> | scrape_game.update_team_ids_from_json(teamdata)
scrape_game.update_player_ids_from_json(teamdata)
scrape_game.update_quick_gamelog_from_json(data) |
<|file_name|>test_util_netstrings.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.util import netstrings
from twisted.protocols import basic
from twisted.trial import unittest
class NetstringParser(unittest.TestCase):
def test_valid_netstrings(self):
p = netstrings.NetstringParser()
p.feed("5:hello,5:world,")
self.assertEqual(p.strings, ['hello', 'world'])
def test_valid_netstrings_byte_by_byte(self):
# (this is really testing twisted's support, but oh well)
p = netstrings.NetstringParser()
[p.feed(c) for c in "5:hello,5:world,"]
self.assertEqual(p.strings, ['hello', 'world'])
def test_invalid_netstring(self):
p = netstrings.NetstringParser()
self.assertRaises(basic.NetstringParseError,
lambda: p.feed("5-hello!"))
def test_incomplete_netstring(self):
p = netstrings.NetstringParser()
p.feed("11:hello world,6:foob")<|fim▁hole|><|fim▁end|> | # note that the incomplete 'foobar' does not appear here
self.assertEqual(p.strings, ['hello world']) |
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use {Span, Spanned};
use lexer::{mod, Lexer, Token};
#[deriving(Clone)]
pub enum ParseError {
UnexpectedEndOfFile,
/// An unexpected token has been encountered.
UnexpectedToken(Spanned<Token>, String),
}
impl ::std::fmt::Show for ParseError {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::FormatError> {
match self {
&UnexpectedEndOfFile => {
"unexpected end of file".fmt(formatter)
},
&UnexpectedToken(ref loc, ref expect) => {
(format!("unexpected token `{}` at line {}:{}, expected {} instead",
loc.content, loc.start.line, loc.start.offset, expect)).fmt(formatter)
}
}
}
}
#[deriving(Show, Clone)]
pub struct TranslationUnit(Vec<ExternalDeclaration>);
#[deriving(Show, Clone)]
pub enum ExternalDeclaration {
/// Function definition.
ExternalDeclarationFunctionDefinition(FunctionDeclaration, Vec<Statement>),
ExternalDeclarationDeclaration(Declaration),
}
#[deriving(Show, Clone)]
pub enum Declaration {
DeclarationVariable(FullySpecifiedType, String, Option<Expression>),
DeclarationFunction(FunctionDeclaration),
/// `precision $2 $1;`
DeclarationPrecisionQualifier(Type, PrecisionQualifier),
}
#[deriving(Show, Clone)]
/// `$retvalue $name($params)`
pub struct FunctionDeclaration(FullySpecifiedType, String, Vec<()>);
#[deriving(Show, Clone)]
pub struct StructDefinition(Vec<()>);
#[deriving(Show, Clone)]
pub struct FullySpecifiedType {
pub ty: Type,
pub storage_qualifiers: Vec<StorageQualifier>,
pub parameter_qualifier: Option<ParameterQualifier>,
pub precision_qualifier: Option<PrecisionQualifier>,
}
#[deriving(Show, Clone)]
pub enum StorageQualifier {
StorageQualifierConst,
StorageQualifierInOut,
StorageQualifierIn,
StorageQualifierOut,
StorageQualifierCentroid,
StorageQualifierPatch,
StorageQualifierSample,
StorageQualifierUniform,
StorageQualifierBuffer,
StorageQualifierShared,
StorageQualifierCoherent,
StorageQualifierVolatile,
StorageQualifierRestrict,
StorageQualifierReadonly,
StorageQualifierWriteonly,
StorageQualifierSubroutine(Vec<()>), // TODO:
}
#[deriving(Show, Clone)]
pub enum PrecisionQualifier {
PrecisionQualifierHigh,
PrecisionQualifierMedium,
PrecisionQualifierLow,
}
#[deriving(Show, Clone)]
pub enum ParameterQualifier {
ParameterQualifierIn,
ParameterQualifierOut,
ParameterQualifierInOut,
}
#[deriving(Show, Clone)]
pub enum Type {
Void,
Float,
Int,
Bool,
Vec2,
Vec3,
Vec4,
BVec2,
BVec3,
BVec4,
IVec2,
IVec3,
IVec4,
Mat2,
Mat3,
Mat4,
Sampler2D,
SamplerCube,
TypeStruct(StructDefinition),
TypeIdentifier(String),
}
#[deriving(Show, Clone)]
pub enum Statement {
StatementExpression(Expression),
/// `{ $statements }`
StatementScope(Vec<Statement>),
/// if `Expression` then `Statement` else `Option<Statement>`.
StatementIf(Expression, Box<Statement>, Option<Box<Statement>>),
/// `continue;`
StatementContinue,
/// `break;`
StatementBreak,
/// `return $expression;`
StatementReturn(Expression),
/// `discard;`
StatementDiscard,
StatementDeclaration(Declaration),
}
#[deriving(Show, Clone)]
pub enum Expression {
/// `$op $2`
ExpressionUnaryOperation(UnaryOperation, Box<Expression>),
/// `$1 $op $2`
ExpressionBinaryOperation(Box<Expression>, BinaryOperation, Box<Expression>),
/// `$1 ? $2 : $3`
ExpressionCondition(Box<Expression>, Box<Expression>, Box<Expression>),
/// `$1($2)`
ExpressionFunctionCall(Box<Expression>, Vec<Expression>),
/// `$1`,
ExpressionIdentifier(String),
/// `__LINE__`
ExpressionLine,
/// `__FILE__`
ExpressionFile,
/// `__VERSION__`
ExpressionVersion,
}
#[deriving(Show, Clone)]
pub enum UnaryOperation {
/// `+`
UnaryOperationPlus,
/// `-`
UnaryOperationMinus,
/// `!`
UnaryOperationNot,
/// `++e`
UnaryOperationPreInc,
/// `--e`
UnaryOperationPreDec,
/// `e++`
UnaryOperationPostInc,
/// `e--`
UnaryOperationPostDec,
}
#[deriving(Show, Clone)]
pub enum BinaryOperation {
/// `+`
BinaryOperationAddition,
/// `-`
BinaryOperationSubstraction,
/// `*`
BinaryOperationMultiplication,
/// `/`
BinaryOperationDivision,
/// `%`
BinaryOperationMod,
/// `^`
BinaryOperationXor,
/// `&&`
BinaryOperationAnd,
/// `||`
BinaryOperationOr,
/// `&`
BinaryOperationBinAnd,
/// `|`
BinaryOperationBinOr,
}
pub fn parse<R: Reader>(data: R) -> Result<TranslationUnit, ParseError> {
let mut parser = Parser {
lexer: Lexer::new(data),
next_token: None,
};
try!(parser.read_next());
parser.parse_translation_unit()
}
struct Parser<R> {
lexer: Lexer<R>,
next_token: Option<Spanned<Token>>,
}
impl<R: Reader> Parser<R> {
fn read_next(&mut self) -> Result<(), ParseError> {
self.next_token = self.lexer.next();
Ok(())
}
fn parse_translation_unit(&mut self) -> Result<TranslationUnit, ParseError> {
let mut result = Vec::new();
loop {
match try!(self.parse_external_declaration()) {
Some(decl) => result.push(decl),
None => break
};
}
Ok(TranslationUnit(result))
}
fn parse_external_declaration(&mut self) -> Result<Option<ExternalDeclaration>, ParseError> {
self.skip_whitespaces();
let token = match self.next_token {
Some(ref token) => token.clone(),
None => return Ok(None),
};
let ty = try!(self.parse_fully_specified_type());
try!(self.expect_whitespace());
let name = try!(self.parse_identifier());
match (try!(self.peek())).content {
lexer::SemiColon => {
try!(self.read_next());
return Ok(Some(ExternalDeclarationDeclaration(
DeclarationVariable(ty, name, None)
)));
},
lexer::LeftParenthesis => {
try!(self.read_next());
// TODO: function parameters
try!(self.expect_token(lexer::RightParenthesis));
self.skip_whitespaces();
match (try!(self.peek())).content {
lexer::LeftBrace => {
try!(self.read_next());
self.skip_whitespaces();
let body = try!(self.parse_statements_list());
self.skip_whitespaces();
try!(self.expect_token(lexer::RightBrace));
return Ok(Some(ExternalDeclarationFunctionDefinition(
FunctionDeclaration(ty, name, Vec::new()),
body
)));
},
lexer::SemiColon => {
try!(self.read_next());
return Ok(Some(ExternalDeclarationDeclaration(
DeclarationFunction(FunctionDeclaration(ty, name, Vec::new()))
)));
},
_ => ()
}
}
_ => ()
};
Err(UnexpectedToken(try!(self.peek()), format!("external declaration")))
}
fn peek(&mut self) -> Result<Spanned<Token>, ParseError> {
match self.next_token {
Some(ref token) => Ok(token.clone()),
None => Err(UnexpectedEndOfFile),
}
}
fn expect_whitespace(&mut self) -> Result<(), ParseError> {
let token = try!(self.peek());
if let lexer::Whitespace(_) = token.content {
try!(self.read_next());
return Ok(());
} else {
return Err(UnexpectedToken(token.clone(), format!("whitespace")));
}
}
fn skip_whitespaces(&mut self) {
loop {
let token = match self.next_token {
Some(ref token) => token.clone(),
None => return,
};
if let lexer::Whitespace(_) = token.content {
self.read_next();
} else {
break;
}
}
}
fn parse_identifier(&mut self) -> Result<String, ParseError> {
let token = try!(self.peek());
if let lexer::Identifier(ref id) = token.content {
let val = Ok(id.clone());
try!(self.read_next());
val
} else {
Err(UnexpectedToken(token.clone(), format!("identifier")))
}
}
// TODO: incomplete
fn parse_fully_specified_type(&mut self) -> Result<FullySpecifiedType, ParseError> {
let mut token;
let mut storage_qualifiers = Vec::new();
loop {
token = try!(self.peek());
match token.content {
lexer::Uniform => {
storage_qualifiers.push(StorageQualifierUniform);
},
lexer::Attribute => {
storage_qualifiers.push(StorageQualifierIn);
},
lexer::Varying => {
//storage_qualifiers.push(StorageQualifierOut);
// TODO: same as `in` in vertex shaders and `out` in fragment shaders
},
_ => break
}
try!(self.read_next());
self.expect_whitespace();
}
token = try!(self.peek());
let ty = match token.content {
lexer::Void => Void,
lexer::Int => Int,
lexer::Mat4 => Mat4,
lexer::Vec2 => Vec2,
_ => return Err(UnexpectedToken(token, format!("type")))<|fim▁hole|> };
try!(self.read_next());
Ok(FullySpecifiedType {
ty: ty,
storage_qualifiers: storage_qualifiers,
parameter_qualifier: None,
precision_qualifier: None,
})
}
fn parse_expression(&mut self) -> Result<Expression, ParseError> {
// TODO: operators precedence
let token = try!(self.peek());
let expression = match token.content {
lexer::Plus | lexer::Dash | lexer::Bang | lexer::Increment | lexer::Decrement => {
try!(self.read_next());
let op = match token.content {
lexer::Plus => UnaryOperationPlus,
lexer::Dash => UnaryOperationMinus,
lexer::Bang => UnaryOperationNot,
lexer::Increment => UnaryOperationPreInc,
lexer::Decrement => UnaryOperationPreDec,
_ => fail!()
};
let expr = try!(self.parse_expression());
ExpressionUnaryOperation(op, box expr)
},
lexer::Identifier(ref id) => {
try!(self.read_next());
ExpressionIdentifier(id.clone())
},
_ => return Err(UnexpectedToken(token.clone(), format!("expression")))
};
match self.next_token.as_ref().map(|e| e.clone()) {
Some(ref a) if a.content == lexer::Increment => {
try!(self.read_next());
Ok(ExpressionUnaryOperation(UnaryOperationPostInc, box expression))
},
Some(ref a) if a.content == lexer::Decrement => {
try!(self.read_next());
Ok(ExpressionUnaryOperation(UnaryOperationPostDec, box expression))
},
Some(ref a) if a.content == lexer::LeftParenthesis => {
try!(self.read_next());
unimplemented!()
},
_ => Ok(expression),
}
}
fn parse_statements_list(&mut self) -> Result<Vec<Statement>, ParseError> {
let mut result = Vec::new();
while self.next_token.as_ref().map(|e| e.content.clone()) != Some(lexer::RightBrace) {
result.push(try!(self.parse_statement()));
self.skip_whitespaces();
}
Ok(result)
}
fn parse_statement(&mut self) -> Result<Statement, ParseError> {
let token = try!(self.peek());
match token.content {
lexer::LeftBrace => {
try!(self.read_next());
let list = try!(self.parse_statements_list());
try!(self.expect_token(lexer::RightBrace));
Ok(StatementScope(list))
},
lexer::Continue => {
try!(self.read_next());
try!(self.expect_semicolon());
Ok(StatementContinue)
},
lexer::Break => {
try!(self.read_next());
try!(self.expect_semicolon());
Ok(StatementBreak)
},
lexer::Return => {
try!(self.read_next());
let expr = try!(self.parse_expression());
try!(self.expect_semicolon());
Ok(StatementReturn(expr))
},
lexer::Discard => {
try!(self.read_next());
try!(self.expect_semicolon());
Ok(StatementDiscard)
},
_ => {
self.parse_expression().map(|e| StatementExpression(e))
}
}
}
fn expect_semicolon(&mut self) -> Result<(), ParseError> {
self.expect_token(lexer::SemiColon)
}
fn expect_token(&mut self, token: Token) -> Result<(), ParseError> {
let val = match self.next_token {
None => Err(UnexpectedEndOfFile),
Some(ref a) if a.content == token => Ok(()),
Some(ref a) => Err(UnexpectedToken(a.clone(), token.to_string())),
};
if val.is_ok() {
try!(self.read_next());
}
val
}
}
#[cfg(test)]
mod test {
use std::io::BufReader;
use super::parse;
#[test]
fn test() {
let src = b"\
uniform mat4 uMatrix;
attribute vec2 iPosition;
attribute vec2 iTexCoords;
varying vec2 vTexCoords;
void main() {
gl_Position = uMatrix * vec4(iPosition, 0.0, 1.0);
vTexCoords = iTexCoords;
}";
let reader = BufReader::new(src);
fail!("{}", parse(reader));
}
}<|fim▁end|> | |
<|file_name|>CP.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):<|fim▁hole|> -0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()<|fim▁end|> | return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[ |
<|file_name|>bitcoind.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "init.h"
#include "bitcoinrpc.h"
#include <boost/algorithm/string/predicate.hpp>
void DetectShutdownThread(boost::thread_group* threadGroup)
{
bool shutdown = ShutdownRequested();
// Tell the main threads to shutdown.
while (!shutdown)
{
MilliSleep(200);
shutdown = ShutdownRequested();
}
if (threadGroup)
threadGroup->interrupt_all();
}
//////////////////////////////////////////////////////////////////////////////
//
// Start
//
bool AppInit(int argc, char* argv[])
{
boost::thread_group threadGroup;
boost::thread* detectShutdownThread = NULL;
bool fRet = false;
try
{
//
// Parameters
//
// If Qt is used, parameters/skidoo.conf are parsed in qt/bitcoin.cpp's main()
ParseParameters(argc, argv);
if (!boost::filesystem::is_directory(GetDataDir(false)))
{
fprintf(stderr, "Error: Specified directory does not exist\n");
Shutdown();
}
ReadConfigFile(mapArgs, mapMultiArgs);
if (mapArgs.count("-?") || mapArgs.count("--help"))
{
// First part of help message is specific to skidood / RPC client
std::string strUsage = _("Skidoo version") + " " + FormatFullVersion() + "\n\n" +
_("Usage:") + "\n" +
" skidood [options] " + "\n" +
" skidood [options] <command> [params] " + _("Send command to -server or skidood") + "\n" +
" skidood [options] help " + _("List commands") + "\n" +
" skidood [options] help <command> " + _("Get help for a command") + "\n";
strUsage += "\n" + HelpMessage();
fprintf(stdout, "%s", strUsage.c_str());
return false;
}
// Command-line RPC
for (int i = 1; i < argc; i++)
if (!IsSwitchChar(argv[i][0]) && !boost::algorithm::istarts_with(argv[i], "skidoo:"))
fCommandLine = true;
if (fCommandLine)
{
if (!SelectParamsFromCommandLine()) {
fprintf(stderr, "Error: invalid combination of -regtest and -testnet.\n");
return false;
}
int ret = CommandLineRPC(argc, argv);
exit(ret);
}
#if !defined(WIN32)
fDaemon = GetBoolArg("-daemon", false);
if (fDaemon)
{
// Daemonize
pid_t pid = fork();
if (pid < 0)
{
fprintf(stderr, "Error: fork() returned %d errno %d\n", pid, errno);
return false;
}
if (pid > 0) // Parent process, pid is child process id
{
CreatePidFile(GetPidFile(), pid);
return true;
}
// Child process falls through to rest of initialization
pid_t sid = setsid();
if (sid < 0)
fprintf(stderr, "Error: setsid() returned %d errno %d\n", sid, errno);
}
#endif
detectShutdownThread = new boost::thread(boost::bind(&DetectShutdownThread, &threadGroup));
fRet = AppInit2(threadGroup);
}
catch (std::exception& e) {
PrintExceptionContinue(&e, "AppInit()");
} catch (...) {
PrintExceptionContinue(NULL, "AppInit()");
}
if (!fRet) {
if (detectShutdownThread)
detectShutdownThread->interrupt();
threadGroup.interrupt_all();
}
if (detectShutdownThread)
{
detectShutdownThread->join();
delete detectShutdownThread;
detectShutdownThread = NULL;
}
Shutdown();
return fRet;
}
extern void noui_connect();
int main(int argc, char* argv[])
{
bool fRet = false;
fHaveGUI = false;
<|fim▁hole|> // Connect skidood signal handlers
noui_connect();
fRet = AppInit(argc, argv);
if (fRet && fDaemon)
return 0;
return (fRet ? 0 : 1);
}<|fim▁end|> | |
<|file_name|>cluster.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
聚类和EM算法
~~~~~~~~~~~~~~~~
聚类
:copyright: (c) 2016 by the huaxz1986.
:license: lgpl-3.0, see LICENSE for more details.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_blobs
# from .agglomerative_clustering import test_AgglomerativeClustering,test_AgglomerativeClustering_nclusters,test_AgglomerativeClustering_linkage
# from .dbscan import test_DBSCAN,test_DBSCAN_epsilon,test_DBSCAN_min_samples
from chapters.Cluster_EM.gmm import test_GMM,test_GMM_cov_type,test_GMM_n_components
# from .kmeans import test_Kmeans,test_Kmeans_n_init,test_Kmeans_nclusters
def create_data(centers,num=100,std=0.7):
'''
生成用于聚类的数据集
:param centers: 聚类的中心点组成的数组。如果中心点是二维的,则产生的每个样本都是二维的。
:param num: 样本数
:param std: 每个簇中样本的标准差
:return: 用于聚类的数据集。是一个元组,第一个元素为样本集,第二个元素为样本集的真实簇分类标记
'''
X, labels_true = make_blobs(n_samples=num, centers=centers, cluster_std=std)
return X,labels_true
def plot_data(*data):
'''
绘制用于聚类的数据集
:param data: 可变参数。它是一个元组。元组元素依次为:第一个元素为样本集,第二个元素为样本集的真实簇分类标记
:return: None
'''
X,labels_true=data
labels=np.unique(labels_true)<|fim▁hole|> colors='rgbyckm' # 每个簇的样本标记不同的颜色
for i,label in enumerate(labels):
position=labels_true==label
ax.scatter(X[position,0],X[position,1],label="cluster %d"%label,
color=colors[i%len(colors)])
ax.legend(loc="best",framealpha=0.5)
ax.set_xlabel("X[0]")
ax.set_ylabel("Y[1]")
ax.set_title("data")
plt.show()
if __name__=='__main__':
centers=[[1,1],[2,2],[1,2],[10,20]] # 用于产生聚类的中心点
X,labels_true=create_data(centers,1000,0.5) # 产生用于聚类的数据集
# plot_data(X,labels_true) # 绘制用于聚类的数据集
# test_Kmeans(X,labels_true) # 调用 test_Kmeans 函数
# test_Kmeans_nclusters(X,labels_true) # 调用 test_Kmeans_nclusters 函数
# test_Kmeans_n_init(X,labels_true) # 调用 test_Kmeans_n_init 函数
# test_DBSCAN(X,labels_true) # 调用 test_DBSCAN 函数
# test_DBSCAN_epsilon(X,labels_true) # 调用 test_DBSCAN_epsilon 函数
# test_DBSCAN_min_samples(X,labels_true) # 调用 test_DBSCAN_min_samples 函数
# test_AgglomerativeClustering(X,labels_true) # 调用 test_AgglomerativeClustering 函数
# test_AgglomerativeClustering_nclusters(X,labels_true) # 调用 test_AgglomerativeClustering_nclusters 函数
# test_AgglomerativeClustering_linkage(X,labels_true) # 调用 test_AgglomerativeClustering_linkage 函数
# test_GMM(X,labels_true) # 调用 test_GMM 函数
# test_GMM_n_components(X,labels_true) # 调用 test_GMM_n_components 函数
test_GMM_cov_type(X,labels_true) # 调用 test_GMM_cov_type 函数<|fim▁end|> | fig=plt.figure()
ax=fig.add_subplot(1,1,1) |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var elixir = require('laravel-elixir');
/*
|--------------------------------------------------------------------------
| Elixir Asset Management
|--------------------------------------------------------------------------
|<|fim▁hole|> |
*/
elixir(function(mix) {
mix.sass('app.scss');
});
elixir(function(mix) {
mix.scriptsIn('resources/assets/js/');
});<|fim▁end|> | | Elixir provides a clean, fluent API for defining some basic Gulp tasks
| for your Laravel application. By default, we are compiling the Sass
| file for our application, as well as publishing vendor resources. |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>try: from setuptools import setup
except: from distutils.core import setup
setup( long_description=open("README.rst").read(),
name="""tinypath""",
license="""MIT""",
author="""Karim Bahgat""",
author_email="""[email protected]""",
py_modules=['tinypath'],
url="""http://github.com/karimbahgat/tinypath""",<|fim▁hole|> version="""0.1.1""",
keywords="""paths files folders organizing""",
classifiers=['License :: OSI Approved', 'Programming Language :: Python', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'Intended Audience :: End Users/Desktop'],
description="""Tinypath is a tiny object-oriented file path module that provides only the most crucial and commonly needed functionality, making it easy to learn and efficient to use.""",
)<|fim▁end|> | |
<|file_name|>createSampledSetFunctionObject.C<|end_file_name|><|fim▁begin|>// OF-extend Revision: $Id$
/*---------------------------------------------------------------------------*\<|fim▁hole|> \\ / A nd | Copyright held by original author
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is based on OpenFOAM.
OpenFOAM is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
\*---------------------------------------------------------------------------*/
#include "createSampledSetFunctionObject.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
{
defineNamedTemplateTypeNameAndDebug(createSampledSetFunctionObject, 0);
addToRunTimeSelectionTable
(
functionObject,
createSampledSetFunctionObject,
dictionary
);
}
// ************************************************************************* //<|fim▁end|> | ========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | |
<|file_name|>eq.rs<|end_file_name|><|fim▁begin|>use malachite_base_test_util::bench::bucketers::pair_rational_sequence_max_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::unsigned_rational_sequence_pair_gen;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_rational_sequence_eq);
register_bench!(runner, benchmark_rational_sequence_eq);
}
fn demo_rational_sequence_eq(gm: GenMode, config: GenConfig, limit: usize) {
for (xs, ys) in unsigned_rational_sequence_pair_gen::<u8>()
.get(gm, &config)
.take(limit)
{
if xs == ys {
println!("{} = {}", xs, ys);
} else {
println!("{} ≠ {}", xs, ys);
}
}
}
#[allow(clippy::no_effect, unused_must_use)]
fn benchmark_rational_sequence_eq(gm: GenMode, config: GenConfig, limit: usize, file_name: &str) {
run_benchmark(
"RationalSequence == RationalSequence",
BenchmarkType::Single,
unsigned_rational_sequence_pair_gen::<u8>().get(gm, &config),
gm.name(),
limit,
file_name,<|fim▁hole|> &pair_rational_sequence_max_len_bucketer("xs", "ys"),
&mut [("Malachite", &mut |(xs, ys)| no_out!(xs == ys))],
);
}<|fim▁end|> | |
<|file_name|>ArrayPrimitiveMenu.java<|end_file_name|><|fim▁begin|>package jaist.css.covis.cls;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JFrame;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import jaist.css.covis.util.FramePopup;
public class ArrayPrimitiveMenu extends JPopupMenu implements FramePopup {
private static final long serialVersionUID = 8027248166373847225L;
Covis_primitive v;
Covis_Array a;
JFrame f;
public ArrayPrimitiveMenu(Covis_primitive _v, Covis_Array _a) {
this.v = _v;
this.a = _a;
JMenuItem menuItem;
setLightWeightPopupEnabled(false);
menuItem = new JMenuItem("cancel");
add(menuItem);
addSeparator();
menuItem = new JMenuItem("edit value");
add(menuItem);
menuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {<|fim▁hole|> input = JOptionPane.showInputDialog(f, "Input Value", v.getValue());
if (input == null) return;
if (!v.setValue(input)){
JOptionPane.showMessageDialog(f,"Value is not accepted.","Error",JOptionPane.WARNING_MESSAGE);
return;
}
v.buffer.putHistoryEditValueArray(v,a); //ÏXµ½ç\[XR[hÉÇÁ
Informer.playSound("Pop.wav");
}
});
}
public void showWithFrame(Component c, int x, int y, JFrame _f) {
f = _f;
show(c, x, y);
}
}<|fim▁end|> | String input; |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>angular.module('luhbot',[
'ui.router',
'dashboard'
])
//TODO: Persistance of socket
.factory('IO',function(Toasts){
var IO = function(){
this.namespace = null;
this.socket = {};
var self = this;
this.connect = function(namespace){
if(namespace){
this.namespace = String('/').concat(namespace);
this.socket = io.connect(String("http://").concat(window.location.hostname).concat(':7171').concat(this.namespace));
return this;
}
this.socket = io.connect(String("http://").concat(window.location.hostname).concat(':7171').concat(this.namespace));
return this;
}<|fim▁hole|> Toasts.makeToast(data.msg);
});
return this;
}
this.joinChannel = function(channel){
this.socket.emit('joinChannel', channel);
console.log('join')
return this;
}
return this;
}
return new IO;
})
.factory('Toasts',function(){
function makeToast(message){
Materialize.toast(message, 4000);
}
return {
makeToast: makeToast
}
})
.factory('BOT',function($http,Toasts){
function connect(){
$http.get('/api/irc/turn/on')
.success(function(data,status){
Toasts.makeToast(data.msg)
})
.error(function(data,status){
Toasts.makeToast(data.msg)
});
$http.get('/api/twitch/update/user')
}
function disconnect(){
$http.get('/api/irc/turn/off')
.success(function(data,status){
Toasts.makeToast(data.msg)
})
.error(function(data,status){
Toasts.makeToast(data.msg)
});
}
function ping(){
$http.get('/api/irc/ping')
.success(function(data,status){
Toasts.makeToast(data.msg)
})
.error(function(data,status){
Toasts.makeToast(data.msg)
});
}
function restart(){
$http.get('/api/irc/force')
.success(function(data,status){
Toasts.makeToast(data.msg)
})
.error(function(data,status){
Toasts.makeToast(data.msg)
});
}
return {
connect : connect,
disconnect: disconnect,
ping : ping
}
})
.factory('User',function($http,$q){
var _user = new Object();
var df = $q.defer();
function getUserApi(){
$http.get('/api/users/me')
.success(function(data,status){
df.resolve(data);
})
.error(function(data,status){
console.error(data);
df.reject(data)
});
return df.promise;
}
function retrieveUserData(property){
if(!Object.keys(_user).length){
return getUserApi().then(function(data){
_user = data;
if(property){
return _user[property];
}
return _user;
});
;
}
if(property){
return _user[property];
}
return _user;
}
return {
getData: retrieveUserData
}
})
.config(function($urlRouterProvider){
$urlRouterProvider.otherwise('/dashboard');
})
.run(function(IO, User){
User.getData('twitchUser').then(function(channel){
IO.connect('alerts').listenNewMessages().joinChannel(channel);
});
});<|fim▁end|> |
this.listenNewMessages = function(){
this.socket.on('newMessage',function(data){ |
<|file_name|>network_tb_gen_parameterized_credit_based.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Siavoosh Payandeh Azad
from math import ceil, log
import random
# -D [size]: sets the size of the network, it can be powers of two
# -Rand: generates random traffic patterns
import sys
if '--help' in sys.argv[1:]:
print "\t-D [network size]: makes a test bench for network of [size]X[size]. Size can be "
print "\t only multiples of two. default value is 4."
print "\t-DW [data_width]: sets the data width of the network!"
print "\t[-Rand/-BR] [PIR]: Uses [Rand]om traffic pattern generator with packet injection rate equal to PIR"
print "\t or Uses [B]it[R]eversal traffic pattern generator with packet injection rate equal to PIR"
print "\t default case is repetitive packets being sent from source to same destination"
print "\t-o: specifies the name and path of the output file. default path is current folder!"
print "\t-PS [min packet size] [max packet size]: specifies packet size. default min value is 3 and default max value is 8."
print "\t-PE: adds processing elements in each node"
print "\t-SHMU: maps shmu on one of the nodes"
print "\t-NI_Test: adds an NI to the nodes and connects a traffic generator to it"
print "\t-trace: adds trackers to network outputs"
print "\t-sim: specifies the length of simulation in clock cycles. which at this time the packet generators will stop sending packets."
print "\t-verbal: prints more details"
print "\t**Example 1: python network_tb_gen_parameterized_credit_based.py -D 2 -SHMU -NI_Test -Rand 0.01 -PS 3 3 -sim 10000 "
print "\t generates a testbench for a 2X2 network and adds NIs and NI_Testers to it which sends packets to random destinations "
print "\t with 0.01 injection rate, and packet size of 3 until 10000 ns"
print "\t**Example 2: python network_tb_gen_parameterized_credit_based.py -D 2 -Rand 0.005 -PS 3 3 -sim 10000 "
print "\t generates a testbench for a 2X2 network which uses random traffic pattern generator with PIR of 0.005 and fixed"
print "\t packet size of 3 and sends packets until 10000 ns"
sys.exit()
network_dime = 4
data_width = 32
random_dest = False
add_tracker = False
add_SHMU = False
add_node = False
add_NI_Test = False
got_finish_time = False
sim_finish_time = None
bit_reversal = False
get_packet_size = False
packet_size_min = 3
packet_size_max = 8
verbal = False
# file_path = file_name+'_'+str(network_dime)+"x"+str(network_dime)+'.vhd'
if '-D' in sys.argv[1:]:
network_dime = int(sys.argv[sys.argv.index('-D')+1])
if '-DW' in sys.argv[1:]:
data_width = int(sys.argv[sys.argv.index('-DW')+1])
if data_width % 2 != 0:
raise ValueError("wrong data width. please choose powers of 2. for example 32!")
if '-Rand' in sys.argv[1:]:
random_dest = True
PIR = float(sys.argv[sys.argv.index('-Rand')+1])
frame_size = int(ceil(1.0/PIR))
if '-SHMU' in sys.argv[1:]:
add_SHMU = True
if '-NI_Test' in sys.argv[1:]:
add_NI_Test = True
if "-PE" in sys.argv[1:]:
add_node = True
if "-trace" in sys.argv[1:]:
add_tracker = True
if '-BR' in sys.argv[1:]:
bit_reversal = True
PIR = float(sys.argv[sys.argv.index('-BR')+1])
frame_size = int(ceil(1.0/PIR))
if random_dest and bit_reversal:
raise ValueError("Can not accept multiple traffic patterns at the same time...")
if '-sim' in sys.argv[1:]:
got_finish_time = True
sim_finish_time = int(sys.argv[sys.argv.index('-sim')+1])
if '-PS' in sys.argv[1:]:
get_packet_size = True
packet_size_min = int(sys.argv[sys.argv.index('-PS')+1])
packet_size_max = int(sys.argv[sys.argv.index('-PS')+2])
if '-verbal' in sys.argv[1:]:
verbal = True
file_name = 'tb_network'
if random_dest:
file_name += '_rand'
elif bit_reversal:
file_name += '_br'
if '-o' in sys.argv[1:]:
file_path = sys.argv[sys.argv.index('-o')+1]
if ".vhd" not in file_path:
raise ValueError("wrong file extention. only vhdl files are accepted!")
else:
file_path = file_name+'_'+str(network_dime)+"x"+str(network_dime)+'.vhd'
noc_file = open(file_path, 'w')
if add_NI_Test and add_node:
raise ValueError("cant have -NI_Test and -PE at the same time")
noc_file.write("--Copyright (C) 2016 Siavoosh Payandeh Azad\n")
noc_file.write("------------------------------------------------------------\n")
noc_file.write("-- This file is automatically generated Please do not change!\n")
noc_file.write("-- Here are the parameters:\n")
noc_file.write("-- \t network size x:"+str(network_dime)+"\n")
noc_file.write("-- \t network size y:"+str(network_dime)+"\n")
noc_file.write("-- \t data width:"+str(data_width))
noc_file.write("-- \t traffic pattern:"+str())
noc_file.write("------------------------------------------------------------\n\n")
noc_file.write("library ieee;\n")
noc_file.write("use ieee.std_logic_1164.all;\n")
noc_file.write("use IEEE.STD_LOGIC_ARITH.ALL;\n")
noc_file.write("use IEEE.STD_LOGIC_UNSIGNED.ALL;\n")
noc_file.write("use work.TB_Package.all;\n\n")
noc_file.write("USE ieee.numeric_std.ALL; \n")
noc_file.write("use IEEE.math_real.\"ceil\";\n")
noc_file.write("use IEEE.math_real.\"log2\";\n\n")
noc_file.write("entity tb_network_"+str(network_dime)+"x"+str(network_dime)+" is\n")
noc_file.write("end tb_network_"+str(network_dime)+"x"+str(network_dime)+"; \n")
noc_file.write("\n\n")
noc_file.write("architecture behavior of tb_network_"+str(network_dime)+"x"+str(network_dime)+" is\n\n")
noc_file.write("-- Declaring network component\n")
string_to_print = ""
noc_file.write("component network_"+str(network_dime)+"x"+str(network_dime)+" is\n")
noc_file.write(" generic (DATA_WIDTH: integer := 32; DATA_WIDTH_LV: integer := 11);\n")
noc_file.write("port (reset: in std_logic; \n")
noc_file.write("\tclk: in std_logic; \n")
if not add_SHMU:
noc_file.write("\tRxy_reconf: in std_logic_vector(7 downto 0);\n")
noc_file.write("\tReconfig : in std_logic;\n")
for i in range(network_dime**2):
noc_file.write("\t--------------\n")
noc_file.write("\tRX_L_"+str(i)+": in std_logic_vector (DATA_WIDTH-1 downto 0);\n")
noc_file.write("\tcredit_out_L_"+str(i)+", valid_out_L_"+str(i)+": out std_logic;\n")
noc_file.write("\tcredit_in_L_"+str(i)+", valid_in_L_"+str(i)+": in std_logic;\n")
if i == network_dime**2-1 and add_SHMU== False:
noc_file.write("\tTX_L_"+str(i)+": out std_logic_vector (DATA_WIDTH-1 downto 0)\n")
else:
noc_file.write("\tTX_L_"+str(i)+": out std_logic_vector (DATA_WIDTH-1 downto 0);\n")
if add_SHMU:
for i in range(0, network_dime**2):
string_to_print +="\t--------------\n"
string_to_print +=" link_faults_"+str(i) +": out std_logic_vector(4 downto 0);\n"
string_to_print +=" turn_faults_"+str(i) +": out std_logic_vector(19 downto 0);\n"
string_to_print +=" Rxy_reconf_PE_"+str(i) +": in std_logic_vector(7 downto 0);\n"
string_to_print +=" Cx_reconf_PE_"+str(i) +": in std_logic_vector(3 downto 0);\n"
string_to_print +=" Reconfig_command_"+str(i) +" : in std_logic;\n\n"
noc_file.write(string_to_print[:len(string_to_print)-3])
noc_file.write("\n ); \n")
noc_file.write("end component; \n")
if add_tracker:
noc_file.write("component flit_tracker is\n")
noc_file.write(" generic (\n")
noc_file.write(" DATA_WIDTH: integer := 32;\n")
noc_file.write(" tracker_file: string :=\"track.txt\"\n")
noc_file.write(" );\n")
noc_file.write(" port (\n")
noc_file.write(" clk: in std_logic;\n")
noc_file.write(" RX: in std_logic_vector (DATA_WIDTH-1 downto 0); \n")
noc_file.write(" valid_in : in std_logic \n")
noc_file.write(" );\n")
noc_file.write("end component;\n")
if add_node and not add_SHMU and not add_NI_Test:
noc_file.write("component NoC_Node is\n")
noc_file.write("generic( current_address : integer := 0; stim_file: string :=\"code.txt\";\n")
noc_file.write("\tlog_file : string := \"output.txt\");\n\n")
noc_file.write("port( reset : in std_logic;\n")
noc_file.write(" clk : in std_logic;\n")
noc_file.write(" \n")
noc_file.write(" credit_in : in std_logic;\n")
noc_file.write(" valid_out: out std_logic;\n")
noc_file.write(" TX: out std_logic_vector(31 downto 0);\n")
noc_file.write("\n")
noc_file.write(" credit_out : out std_logic;\n")
noc_file.write(" valid_in: in std_logic;\n")
noc_file.write(" RX: in std_logic_vector(31 downto 0)\n")
noc_file.write(" );\n")
noc_file.write("end component; --component NoC_Node\n")
elif add_node and add_SHMU and not add_NI_Test:
noc_file.write("-- Declaring Node component\n\n")
noc_file.write("component NoC_Node is\n")
noc_file.write("generic( current_address : integer := 0;\n")
noc_file.write(" stim_file: string :=\"code.txt\";\n")
noc_file.write(" log_file : string := \"output.txt\");\n")
noc_file.write("\n")
noc_file.write("port( reset : in std_logic;\n")
noc_file.write(" clk : in std_logic;\n")
noc_file.write("\n")
noc_file.write(" credit_in : in std_logic;\n")
noc_file.write(" valid_out: out std_logic;\n")
noc_file.write(" TX: out std_logic_vector(31 downto 0);\n")
noc_file.write("\n")
noc_file.write(" credit_out : out std_logic;\n")
noc_file.write(" valid_in: in std_logic;\n")
noc_file.write(" RX: in std_logic_vector(31 downto 0);\n")
noc_file.write("\n")
noc_file.write(" link_faults: in std_logic_vector(4 downto 0);\n")
noc_file.write(" turn_faults: in std_logic_vector(19 downto 0);\n")
noc_file.write("\n")
noc_file.write(" Rxy_reconf_PE: out std_logic_vector(7 downto 0);\n")
noc_file.write(" Cx_reconf_PE: out std_logic_vector(3 downto 0);\n")
noc_file.write(" Reconfig_command : out std_logic\n")
noc_file.write("\n")
noc_file.write(" );\n")
noc_file.write("end component; --component NoC_Node\n")
elif not add_node and add_SHMU and add_NI_Test:
noc_file.write("-- Declaring NI component\n\n")
noc_file.write("component NI is\n")
noc_file.write(" generic(current_address : integer := 10; -- the current node's address\n")
noc_file.write(" SHMU_address : integer := 0); \n")
# noc_file.write(" reserved_address : std_logic_vector(29 downto 0) := \"000000000000000001111111111111\";\n")
# noc_file.write(" flag_address : std_logic_vector(29 downto 0) := \"000000000000000010000000000000\"; -- reserved address for the memory mapped I/O\n")
# noc_file.write(" counter_address : std_logic_vector(29 downto 0) := \"000000000000000010000000000001\";\n")
# noc_file.write(" reconfiguration_address : std_logic_vector(29 downto 0) := \"000000000000000010000000000010\"; -- reserved address for reconfiguration register\n")
# noc_file.write(" self_diagnosis_address : std_logic_vector(29 downto 0) := \"000000000000000010000000000011\"); -- reserved address for self diagnosis register\n")
noc_file.write(" port(clk : in std_logic;\n")
noc_file.write(" reset : in std_logic;\n")
noc_file.write(" enable : in std_logic;\n")
noc_file.write(" write_byte_enable : in std_logic_vector(3 downto 0);\n")
noc_file.write(" address : in std_logic_vector(31 downto 2);\n")
noc_file.write(" data_write : in std_logic_vector(31 downto 0);\n")
noc_file.write(" data_read : out std_logic_vector(31 downto 0);\n")
noc_file.write("\n")
noc_file.write(" -- Flags used by JNIFR and JNIFW instructions\n")
noc_file.write(" --NI_read_flag : out std_logic; -- One if the N2P fifo is empty. No read should be performed if one.\n")
noc_file.write(" --NI_write_flag : out std_logic; -- One if P2N fifo is full. no write should be performed if one.\n")
noc_file.write(" -- interrupt signal: generated evertime a packet is recieved!\n")
noc_file.write(" irq_out : out std_logic;\n")
noc_file.write(" -- signals for sending packets to network\n")
noc_file.write(" credit_in : in std_logic;\n")
noc_file.write(" valid_out: out std_logic;\n")
noc_file.write(" TX: out std_logic_vector(31 downto 0); -- data sent to the NoC\n")
noc_file.write(" -- signals for reciving packets from the network\n")
noc_file.write(" credit_out : out std_logic;\n")
noc_file.write(" valid_in: in std_logic;\n")
noc_file.write(" RX: in std_logic_vector(31 downto 0); -- data recieved form the NoC\n")
noc_file.write(" -- fault information signals from the router\n")
noc_file.write(" link_faults: in std_logic_vector(4 downto 0);\n")
noc_file.write(" turn_faults: in std_logic_vector(19 downto 0);\n")
noc_file.write("\n")
noc_file.write(" Rxy_reconf_PE: out std_logic_vector(7 downto 0);\n")
noc_file.write(" Cx_reconf_PE: out std_logic_vector(3 downto 0); -- if you are not going to update Cx you should write all ones! (it will be and will the current Cx bits)\n")
noc_file.write(" Reconfig_command : out std_logic\n")
noc_file.write(" );\n")
noc_file.write("end component; --component NI\n")
noc_file.write("\n")
noc_file.write("-- generating bulk signals...\n")
for i in range(0, network_dime*network_dime):
noc_file.write("\tsignal RX_L_"+str(i)+", TX_L_"+str(i)+": std_logic_vector ("+str(data_width-1)+" downto 0);\n")
noc_file.write("\tsignal credit_counter_out_"+str(i)+": std_logic_vector (1 downto 0);\n")
noc_file.write("\tsignal credit_out_L_"+str(i)+", credit_in_L_"+str(i)+", valid_in_L_"+str(i)+", valid_out_L_"+str(i) + ": std_logic;\n")
#noc_file.write("\n\nAlias buried_sig is <<signal .NoC.valid_in_E_11 :std_logic>>;\n\n")
if add_SHMU:
for i in range(0, network_dime*network_dime):
noc_file.write("\tsignal link_faults_"+str(i)+ " : std_logic_vector(4 downto 0);\n")
noc_file.write("\tsignal turn_faults_"+str(i)+ " : std_logic_vector(19 downto 0);\n")
noc_file.write("\tsignal Rxy_reconf_PE_"+str(i)+ " : std_logic_vector(7 downto 0);\n")
noc_file.write("\tsignal Cx_reconf_PE_"+str(i)+ " : std_logic_vector(3 downto 0);\n")
noc_file.write("\tsignal Reconfig_command_"+str(i)+ " : std_logic;\n")
noc_file.write("\t-- NI testing signals\n")
if add_NI_Test:
noc_file.write("\tsignal reserved_address : std_logic_vector(29 downto 0):= \"000000000000000001111111111111\";\n")
noc_file.write("\tsignal flag_address : std_logic_vector(29 downto 0):= \"000000000000000010000000000000\" ; -- reserved address for the memory mapped I/O\n")
noc_file.write("\tsignal counter_address : std_logic_vector(29 downto 0):= \"000000000000000010000000000001\";\n")
noc_file.write("\tsignal reconfiguration_address : std_logic_vector(29 downto 0):= \"000000000000000010000000000010\"; -- reserved address for reconfiguration register\n")
noc_file.write("\tsignal self_diagnosis_address : std_logic_vector(29 downto 0):= \"000000000000000010000000000011\";\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):
string_to_print += "irq_out_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic;\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):
string_to_print += "test_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic_vector(31 downto 0);\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):<|fim▁hole|> for i in range(0, network_dime*network_dime):
string_to_print += "write_byte_enable_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic_vector(3 downto 0);\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):
string_to_print += "address_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic_vector(31 downto 2);\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):
string_to_print += "data_write_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic_vector(31 downto 0);\n")
string_to_print = ""
for i in range(0, network_dime*network_dime):
string_to_print += "data_read_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic_vector(31 downto 0);\n")
noc_file.write("\t--------------\n")
if not add_SHMU:
noc_file.write("\tsignal Rxy_reconf: std_logic_vector (7 downto 0) := \"01111101\";\n")
noc_file.write("\tsignal Reconfig: std_logic := '0';\n")
noc_file.write("\t--------------\n")
noc_file.write("\tconstant clk_period : time := 10 ns;\n")
noc_file.write("\tsignal reset, not_reset, clk: std_logic :='0';\n")
noc_file.write("\n")
noc_file.write("begin\n\n")
noc_file.write(" clk_process :process\n")
noc_file.write(" begin\n")
noc_file.write(" clk <= '0';\n")
noc_file.write(" wait for clk_period/2; \n")
noc_file.write(" clk <= '1';\n")
noc_file.write(" wait for clk_period/2; \n")
noc_file.write(" end process;\n")
noc_file.write("\n")
noc_file.write("reset <= '1' after 1 ns;\n")
noc_file.write("-- instantiating the network\n")
if add_tracker:
noc_file.write("-- instantiating the flit trackers\n")
for i in range(0, network_dime**2):
noc_file.write("F_T_"+str(i)+"_T: flit_tracker generic map (\n")
noc_file.write(" DATA_WIDTH => "+str(data_width)+", \n")
noc_file.write(" tracker_file =>\"traces/track"+str(i)+"_T.txt\"\n")
noc_file.write(" )\n")
noc_file.write(" port map (\n")
noc_file.write(" clk => clk, RX => TX_L_"+str(i)+", \n")
noc_file.write(" valid_in => valid_out_L_"+str(i)+"\n")
noc_file.write(" );\n")
string_to_print = ""
string_to_print += "NoC: network_"+str(network_dime)+"x"+str(network_dime)+" generic map (DATA_WIDTH => "+str(data_width)+", DATA_WIDTH_LV => 11)\n"
if not add_SHMU:
string_to_print += "port map (reset, clk, Rxy_reconf, Reconfig, \n"
else:
string_to_print += "port map (reset, clk, \n"
for i in range(network_dime**2):
string_to_print += "\tRX_L_"+str(i)+", credit_out_L_"+str(i)+", valid_out_L_"+str(i)+", credit_in_L_"+str(i)+", valid_in_L_"+str(i)+", TX_L_"+str(i)+", \n"
if add_SHMU:
string_to_print += "\t-- should be connected to NI\n"
for i in range(0, network_dime**2):
string_to_print += "\tlink_faults_"+str(i)+", turn_faults_"+str(i)+","
string_to_print += "\tRxy_reconf_PE_"+str(i)+", Cx_reconf_PE_"+str(i)+", Reconfig_command_"+str(i)+", \n"
noc_file.write(string_to_print[:len(string_to_print)-3])
noc_file.write("\n ); \n")
noc_file.write("not_reset <= not reset; \n")
if add_node and not add_SHMU and not add_NI_Test:
noc_file.write("\n")
noc_file.write("-- connecting the PEs\n")
for node_number in range(0, network_dime*network_dime):
noc_file.write("PE_" + str(node_number) + ": NoC_Node \n")
noc_file.write("generic map( current_address => " + str(node_number) + ",\n")
noc_file.write("\tstim_file => \"code_" + str(node_number).zfill(3) + ".txt\",\n")
noc_file.write("\tlog_file => \"output_" + str(node_number).zfill(3) + ".txt\")\n\n")
noc_file.write("port map( not_reset, clk, \n")
noc_file.write("\n")
noc_file.write(" credit_in => credit_out_L_" + str(node_number) + ", \n")
noc_file.write(" valid_out => valid_in_L_" + str(node_number) + ",\n")
noc_file.write(" TX => RX_L_" + str(node_number) + ", \n")
noc_file.write("\n")
noc_file.write(" credit_out => credit_in_L_" + str(node_number) + ", \n")
noc_file.write(" valid_in => valid_out_L_" + str(node_number) + ",\n")
noc_file.write(" RX => TX_L_" + str(node_number) + "\n")
noc_file.write(" );\n")
if add_SHMU and not add_NI_Test:
noc_file.write("\n")
noc_file.write("-- connecting the PEs\n")
for node_number in range(0, network_dime*network_dime):
noc_file.write("PE_" + str(node_number) + ": NoC_Node \n")
noc_file.write("generic map( current_address => " + str(node_number) + ",\n")
noc_file.write("\tstim_file => \"code_" + str(node_number) + ".txt\",\n")
noc_file.write("\tlog_file => \"output_" + str(node_number) + ".txt\")\n\n")
noc_file.write("port map( not_reset, clk, \n")
noc_file.write("\n")
noc_file.write(" credit_in => credit_out_L_" + str(node_number) + ", \n")
noc_file.write(" valid_out => valid_in_L_" + str(node_number) + ",\n")
noc_file.write(" TX => RX_L_" + str(node_number) + ", \n")
noc_file.write("\n")
noc_file.write(" credit_out => credit_in_L_" + str(node_number) + ", \n")
noc_file.write(" valid_in => valid_out_L_" + str(node_number) + ",\n")
noc_file.write(" RX => TX_L_" + str(node_number) + ",\n")
noc_file.write(" link_faults => link_faults_"+str(node_number)+",\n")
noc_file.write(" turn_faults => turn_faults_"+str(node_number)+",\n")
noc_file.write(" Rxy_reconf_PE => Rxy_reconf_PE_"+str(node_number)+", \n")
noc_file.write(" Cx_reconf_PE => Cx_reconf_PE_"+str(node_number)+",\n")
noc_file.write(" Reconfig_command => Reconfig_command_"+str(node_number)+"\n")
noc_file.write(" );\n")
elif add_NI_Test and add_SHMU:
noc_file.write("\n")
noc_file.write("-- connecting the NIs\n")
for node_number in range(0, network_dime*network_dime):
noc_file.write("NI_" + str(node_number) + ": NI \n")
noc_file.write(" generic map(current_address => " + str(node_number) + "\n")
noc_file.write(" ) \n")
noc_file.write(" port map(clk => clk , reset => not_reset , enable => enable_" + str(node_number) + ", \n")
noc_file.write(" write_byte_enable => write_byte_enable_" + str(node_number) + ", \n")
noc_file.write(" address => address_" + str(node_number) + ", \n")
noc_file.write(" data_write => data_write_" + str(node_number) + ", \n")
noc_file.write(" data_read => data_read_" + str(node_number) + ", \n")
noc_file.write(" -- interrupt signal: generated evertime a packet is recieved!\n")
noc_file.write(" irq_out => irq_out_" + str(node_number) + ", \n")
noc_file.write(" -- signals for sending packets to network\n")
noc_file.write(" credit_in => credit_out_L_" + str(node_number) + ", \n")
noc_file.write(" valid_out => valid_in_L_" + str(node_number) + ",\n")
noc_file.write(" TX => RX_L_" + str(node_number) + ", -- data sent to the NoC\n")
noc_file.write(" -- signals for reciving packets from the network\n")
noc_file.write(" credit_out => credit_in_L_" + str(node_number) + ", \n")
noc_file.write(" valid_in => valid_out_L_" + str(node_number) + ",\n")
noc_file.write(" RX => TX_L_" + str(node_number) + ",\n")
noc_file.write(" -- fault information signals from the router\n")
noc_file.write(" link_faults => link_faults_" + str(node_number) + ", \n")
noc_file.write(" turn_faults => turn_faults_" + str(node_number) + ",\n")
noc_file.write("\n")
noc_file.write(" Rxy_reconf_PE => Rxy_reconf_PE_" + str(node_number) + ", \n")
noc_file.write(" Cx_reconf_PE => Cx_reconf_PE_" + str(node_number) + ",\n")
noc_file.write(" Reconfig_command => Reconfig_command_" + str(node_number) + "\n")
noc_file.write(" );\n")
noc_file.write("\n\n")
noc_file.write("-- connecting the packet generators\n")
for node_number in range(0, network_dime*network_dime):
random_start = random.randint(3, 50)
if got_finish_time:
random_end = sim_finish_time
else:
random_end = random.randint(random_start, 200)
noc_file.write("NI_control("+str(network_dime)+", "+str(frame_size)+", "+str(node_number)+", "+str(random_start)+", " +str(packet_size_min)+", " +str(packet_size_max)+", "+str(random_end)+" ns, clk,\n")
noc_file.write(" -- NI configuration\n")
noc_file.write(" reserved_address, flag_address, counter_address, reconfiguration_address, self_diagnosis_address,\n")
noc_file.write(" -- NI signals\n")
noc_file.write(" enable_" + str(node_number) + ", write_byte_enable_" + str(node_number) + ", address_" + str(node_number) + ", data_write_" + str(node_number) + ", data_read_" + str(node_number) + ", test_"+str(node_number)+"); \n")
noc_file.write("\n")
else:
noc_file.write("\n")
noc_file.write("-- connecting the packet generators\n")
if random_dest or bit_reversal:
for i in range(0, network_dime*network_dime):
random_start = random.randint(3, 50)
if got_finish_time:
random_end = sim_finish_time
else:
random_end = random.randint(random_start, 200)
noc_file.write("credit_counter_control(clk, credit_out_L_"+str(i)+", valid_in_L_"+str(i)+", credit_counter_out_"+str(i)+");\n")
if random_dest:
noc_file.write("gen_random_packet("+str(network_dime)+", "+str(frame_size)+", "+str(i)+", "+str(random_start)+", " +str(packet_size_min)+", " +str(packet_size_max)+", " +
str(random_end)+" ns, clk, credit_counter_out_"+str(i)+", valid_in_L_"+str(i)+", RX_L_"+str(i)+");\n")
elif bit_reversal:
noc_file.write("gen_bit_reversed_packet("+str(network_dime)+", "+str(frame_size)+", "+str(i)+", "+str(random_start)+", " +str(packet_size_min)+", " +str(packet_size_max)+", " +
str(random_end)+" ns, clk, credit_counter_out_"+str(i)+", valid_in_L_"+str(i)+", RX_L_"+str(i)+");\n")
noc_file.write("\n")
if not add_node and not add_NI_Test:
noc_file.write("\n")
noc_file.write("-- connecting the packet receivers\n")
for i in range(0, network_dime*network_dime):
noc_file.write("get_packet("+str(data_width)+", 5, "+str(i)+", clk, credit_in_L_"+str(i)+", valid_out_L_"+str(i)+", TX_L_"+str(i)+");\n")
noc_file.write("\n\n")
noc_file.write("end;\n")<|fim▁end|> | string_to_print += "enable_"+str(i)+ ", "
noc_file.write("\tsignal "+string_to_print[:-2]+": std_logic;\n")
string_to_print = "" |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*!
* hybridify-all <https://github.com/hybridables/hybridify-all>
*
* Copyright (c) 2015 Charlike Mike Reagent, contributors.
* Released under the MIT license.
*/
'use strict'
var reduce = require('object.reduce')
var hybridify = require('hybridify')
/**
* > Hybridifies all the selected functions in an object.
*
* **Example:**
*
* ```js
* var hybridifyAll = require('hybridify-all')
* var fs = require('fs')
*
* fs = hybridifyAll(fs)
* fs.readFile(__filename, 'utf8', function(err, res) {
* //=> err, res
* })
* .then(function(res) {
* //=> res
* return fs.stat(__filename)
* })
* .then(function(stat) {
* assert.strictEqual(stat.size, fs.statSync(__filename).size)
* })
* ```
*
* @name hybridifyAll
* @param {Object|Function} `<source>` the source object for the async functions
* @param {Object|Function} `[dest]` the destination to set all the hybridified methods
* @return {Object|Function}
* @api public
*/
module.exports = function hybridifyAll (source, dest) {
if (!source) {
throw new Error('hybridify-all: should have at least 1 arguments')
}
if (typeOf(source) !== 'function' && typeOf(source) !== 'object') {
throw new TypeError('hybridify-all: expect `source` be object|function')
}
dest = dest || {}
if (typeof source === 'function') {
dest = hybridify(source)
}
return Object.keys(source).length ? reduce(source, function (dest, fn, key) {
if (typeof fn === 'function') {
dest[key] = hybridify(fn)
}
return dest
}, dest) : dest
}
/**
* Get correct type of value
*
* @param {*} `val`
* @return {String}
* @api private
*/
function typeOf (val) {
if (Array.isArray(val)) {
return 'array'
}
if (typeof val !== 'object') {
return typeof val
}
return Object.prototype.toString(val).slice(8, -1).toLowerCase()<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_tds_parameter.py<|end_file_name|><|fim▁begin|>import re
import warnings
import ctds
from .base import TestExternalDatabase
from .compat import PY3, PY36, unicode_
class TestTdsParameter(TestExternalDatabase):
def test___doc__(self):
self.assertEqual(
ctds.Parameter.__doc__,
'''\
Parameter(value, output=False)
Explicitly define a parameter for :py:meth:`.callproc`,
:py:meth:`.execute`, or :py:meth:`.executemany`. This is necessary
to indicate whether a parameter is *SQL* `OUTPUT` or `INPUT/OUTPUT`
parameter.
:param object value: The parameter's value.
:param bool output: Is the parameter an output parameter.
'''
)
def test_parameter(self):
param1 = ctds.Parameter(b'123', output=True)
self.assertEqual(param1.value, b'123')
self.assertTrue(isinstance(param1, ctds.Parameter))
param2 = ctds.Parameter(b'123')
self.assertEqual(param1.value, b'123')
self.assertEqual(type(param1), type(param2))
self.assertTrue(isinstance(param2, ctds.Parameter))
def test___repr__(self):
for parameter, expected in (
(
ctds.Parameter(b'123', output=True),
"ctds.Parameter(b'123', output=True)" if PY3 else "ctds.Parameter('123', output=True)"
),
(
ctds.Parameter(unicode_('123'), output=False),
"ctds.Parameter('123')" if PY3 else "ctds.Parameter(u'123')"
),
(
ctds.Parameter(None),
"ctds.Parameter(None)"
),
(
ctds.Parameter(ctds.SqlVarBinary(b'4321', size=10)),
"ctds.Parameter(ctds.SqlVarBinary(b'4321', size=10))"
if PY3 else
"ctds.Parameter(ctds.SqlVarBinary('4321', size=10))"
)
):
self.assertEqual(repr(parameter), expected)
def _test__cmp__(self, __cmp__, expected, oper):
cases = (
(ctds.Parameter(b'1234'), ctds.Parameter(b'123')),
(ctds.Parameter(b'123'), ctds.Parameter(b'123')),
(ctds.Parameter(b'123'), ctds.Parameter(b'123', output=True)),
(ctds.Parameter(b'123'), ctds.Parameter(b'1234')),
(ctds.Parameter(b'123'), b'123'),
(ctds.Parameter(b'123'), ctds.Parameter(123)),
(ctds.Parameter(b'123'), unicode_('123')),
(ctds.Parameter(b'123'), ctds.SqlBinary(None)),
(ctds.Parameter(b'123'), 123),
(ctds.Parameter(b'123'), None),
)
for index, args in enumerate(cases):
operation = '[{0}]: {1} {2} {3}'.format(index, repr(args[0]), oper, repr(args[1]))
if expected[index] == TypeError:
try:
__cmp__(*args)
except TypeError as ex:
regex = (
r"'{0}' not supported between instances of '[^']+' and '[^']+'".format(oper)
if not PY3 or PY36
else
r'unorderable types: \S+ {0} \S+'.format(oper)
)
self.assertTrue(re.match(regex, str(ex)), ex)
else:
self.fail('{0} did not fail as expected'.format(operation)) # pragma: nocover
else:
self.assertEqual(__cmp__(*args), expected[index], operation)
def test___cmp__eq(self):
self._test__cmp__(
lambda left, right: left == right,
(
False,
True,
True,
False,
True,
False,
not PY3,
False,
False,
False,
),
'=='
)
def test___cmp__ne(self):
self._test__cmp__(
lambda left, right: left != right,
(
True,
False,
False,
True,
False,
True,
PY3,
True,
True,
True,
),
'!='
)
def test___cmp__lt(self):
self._test__cmp__(
lambda left, right: left < right,
(
False,
False,
False,
True,
False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
),
'<'
)
def test___cmp__le(self):
self._test__cmp__(
lambda left, right: left <= right,
(
False,
True,
True,
True,
True,
TypeError if PY3 else False,
TypeError if PY3 else True,
TypeError if PY3 else False,<|fim▁hole|> '<='
)
def test___cmp__gt(self):
self._test__cmp__(
lambda left, right: left > right,
(
True,
False,
False,
False,
False,
TypeError if PY3 else True,
TypeError if PY3 else False,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
),
'>'
)
def test___cmp__ge(self):
self._test__cmp__(
lambda left, right: left >= right,
(
True,
True,
True,
False,
True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
),
'>='
)
def test_typeerror(self):
for case in (None, object(), 123, 'foobar'):
self.assertRaises(TypeError, ctds.Parameter, case, b'123')
self.assertRaises(TypeError, ctds.Parameter)
self.assertRaises(TypeError, ctds.Parameter, output=False)
for case in (None, object(), 123, 'foobar'):
self.assertRaises(TypeError, ctds.Parameter, b'123', output=case)
def test_reuse(self):
with self.connect() as connection:
with connection.cursor() as cursor:
for value in (
None,
123456,
unicode_('hello world'),
b'some bytes',
):
for output in (True, False):
parameter = ctds.Parameter(value, output=output)
for _ in range(0, 2):
# Ignore warnings generated due to output parameters
# used with result sets.
with warnings.catch_warnings(record=True):
cursor.execute(
'''
SELECT :0
''',
(parameter,)
)
self.assertEqual(
[tuple(row) for row in cursor.fetchall()],
[(value,)]
)<|fim▁end|> | TypeError if PY3 else False,
TypeError if PY3 else False,
), |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
from pip.req import parse_requirements
version = "6.27.20"
requirements = parse_requirements("requirements.txt", session="")
setup(
name='frappe',
version=version,<|fim▁hole|> author_email='[email protected]',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link]
)<|fim▁end|> | description='Metadata driven, full-stack web framework',
author='Frappe Technologies', |
<|file_name|>renamer.js<|end_file_name|><|fim▁begin|>"use strict";
var _classCallCheck = require("babel-runtime/helpers/class-call-check")["default"];
var _interopRequireDefault = require("babel-runtime/helpers/interop-require-default")["default"];
var _interopRequireWildcard = require("babel-runtime/helpers/interop-require-wildcard")["default"];
exports.__esModule = true;
var _binding = require("../binding");
var _binding2 = _interopRequireDefault(_binding);
var _babelTypes = require("babel-types");
var t = _interopRequireWildcard(_babelTypes);
var renameVisitor = {
ReferencedIdentifier: function ReferencedIdentifier(_ref, state) {
var node = _ref.node;
if (node.name === state.oldName) {
node.name = state.newName;
}
},
Scope: function Scope(path, state) {
if (!path.scope.bindingIdentifierEquals(state.oldName, state.binding.identifier)) {
path.skip();
}
},
"AssignmentExpression|Declaration": function AssignmentExpressionDeclaration(path, state) {
var ids = path.getOuterBindingIdentifiers();
for (var _name in ids) {
if (_name === state.oldName) ids[_name].name = state.newName;
}
}
};
var Renamer = (function () {
function Renamer(binding, oldName, newName) {
_classCallCheck(this, Renamer);
this.newName = newName;
this.oldName = oldName;
this.binding = binding;
}
Renamer.prototype.maybeConvertFromExportDeclaration = function maybeConvertFromExportDeclaration(parentDeclar) {
var exportDeclar = parentDeclar.parentPath.isExportDeclaration() && parentDeclar.parentPath;
if (!exportDeclar) return;
// build specifiers that point back to this export declaration
var isDefault = exportDeclar.isExportDefaultDeclaration();
if (isDefault && (parentDeclar.isFunctionDeclaration() || parentDeclar.isClassDeclaration()) && !parentDeclar.node.id) {
// Ensure that default class and function exports have a name so they have a identifier to
// reference from the export specifier list.
parentDeclar.node.id = parentDeclar.scope.generateUidIdentifier("default");
}
var bindingIdentifiers = parentDeclar.getOuterBindingIdentifiers();
var specifiers = [];
for (var _name2 in bindingIdentifiers) {
var localName = _name2 === this.oldName ? this.newName : _name2;
var exportedName = isDefault ? "default" : _name2;
specifiers.push(t.exportSpecifier(t.identifier(localName), t.identifier(exportedName)));
}
var aliasDeclar = t.exportNamedDeclaration(null, specifiers);
// hoist to the top if it's a function
if (parentDeclar.isFunctionDeclaration()) {
aliasDeclar._blockHoist = 3;
}
exportDeclar.insertAfter(aliasDeclar);
exportDeclar.replaceWith(parentDeclar.node);
};
Renamer.prototype.maybeConvertFromClassFunctionDeclaration = function maybeConvertFromClassFunctionDeclaration(path) {
return; // TODO
// retain the `name` of a class/function declaration
if (!path.isFunctionDeclaration() && !path.isClassDeclaration()) return;
if (this.binding.kind !== "hoisted") return;
path.node.id = t.identifier(this.oldName);
path.node._blockHoist = 3;
path.replaceWith(t.variableDeclaration("let", [t.variableDeclarator(t.identifier(this.newName), t.toExpression(path.node))]));
};
Renamer.prototype.maybeConvertFromClassFunctionExpression = function maybeConvertFromClassFunctionExpression(path) {
return; // TODO
// retain the `name` of a class/function expression
if (!path.isFunctionExpression() && !path.isClassExpression()) return;
if (this.binding.kind !== "local") return;
path.node.id = t.identifier(this.oldName);
this.binding.scope.parent.push({
id: t.identifier(this.newName)
});
path.replaceWith(t.assignmentExpression("=", t.identifier(this.newName), path.node));
};
Renamer.prototype.rename = function rename(block) {
var binding = this.binding;<|fim▁hole|> var newName = this.newName;
var scope = binding.scope;
var path = binding.path;
var parentDeclar = path.find(function (path) {
return path.isDeclaration() || path.isFunctionExpression();
});
if (parentDeclar) {
this.maybeConvertFromExportDeclaration(parentDeclar);
}
scope.traverse(block || scope.block, renameVisitor, this);
if (!block) {
scope.removeOwnBinding(oldName);
scope.bindings[newName] = binding;
this.binding.identifier.name = newName;
}
if (binding.type === "hoisted") {
// https://github.com/babel/babel/issues/2435
// todo: hoist and convert function to a let
}
if (parentDeclar) {
this.maybeConvertFromClassFunctionDeclaration(parentDeclar);
this.maybeConvertFromClassFunctionExpression(parentDeclar);
}
};
return Renamer;
})();
exports["default"] = Renamer;
module.exports = exports["default"];<|fim▁end|> | var oldName = this.oldName; |
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
title = 'Free Eclipse icons';<|fim▁hole|><|fim▁end|> | } |
<|file_name|>RandomColoringDiv2.java<|end_file_name|><|fim▁begin|>public class RandomColoringDiv2 {
public int getCount(int maxR, int maxG, int maxB, int startR, int startG, int startB, int d1,
int d2) {
int colors = 0;
int minR = Math.max(0, startR - d2);
int minG = Math.max(0, startG - d2);
int minB = Math.max(0, startB - d2);
for (int r = minR; r<maxR; r++) {
int difR = Math.abs(r - startR);
if (difR > d2)
break;
for (int g = minG; g<maxG; g++) {
int difG = Math.abs(g - startG);
if (difG > d2)
break;
for (int b = minB; b<maxB; b++) {
int difB = Math.abs(b - startB);
if (difB > d2)
break;
if (difR >= d1 || difG >= d1 || difB >=d1)
colors++;
}
}
}
return colors;
}
public static void main(String[] args) {
long time;
int answer;
boolean errors = false;
int desiredAnswer;
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(5, 1, 1, 2, 0, 0, 0, 1);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 3;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);
if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(4, 2, 2, 0, 0, 0, 3, 3);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 4;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);
if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(4, 2, 2, 0, 0, 0, 5, 5);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 0;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);
if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(6, 9, 10, 1, 2, 3, 0, 10);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 540;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);
if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(6, 9, 10, 1, 2, 3, 4, 10);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 330;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);<|fim▁hole|> if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
time = System.currentTimeMillis();
answer = new RandomColoringDiv2().getCount(49, 59, 53, 12, 23, 13, 11, 22);
System.out.println("Time: " + (System.currentTimeMillis() - time) / 1000.0 + " seconds");
desiredAnswer = 47439;
System.out.println("Your answer:");
System.out.println("\t" + answer);
System.out.println("Desired answer:");
System.out.println("\t" + desiredAnswer);
if (answer != desiredAnswer) {
errors = true;
System.out.println("DOESN'T MATCH!!!!");
} else
System.out.println("Match :-)");
System.out.println();
if (errors)
System.out.println("Some of the test cases had errors :-(");
else
System.out.println("You're a stud (at least on the test data)! :-D ");
}
}
// Powered by [KawigiEdit] 2.0!<|fim▁end|> | |
<|file_name|>0007_auto_20170423_1937.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-04-23 19:37
''' Миграция '''
from __future__ import unicode_literals
<|fim▁hole|>
class Migration(migrations.Migration):
''' Добавляем опциональные поля '''
dependencies = [
('lawe', '0006_remove_account_unit'),
]
operations = [
migrations.AlterField(
model_name='account',
name='group',
field=models.CharField(blank=True, max_length=200, verbose_name='Основная группа'),
),
migrations.AlterField(
model_name='account',
name='name',
field=models.CharField(blank=True, max_length=200, verbose_name='Название'),
),
migrations.AlterField(
model_name='account',
name='subgroup',
field=models.CharField(blank=True, max_length=200, verbose_name='Подгруппа'),
),
migrations.AlterField(
model_name='transaction',
name='description',
field=models.CharField(blank=True, max_length=200, verbose_name='Описание'),
),
]<|fim▁end|> | from django.db import migrations, models
|
<|file_name|>CharSequences.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.util;
import java.util.Arrays;
import java.nio.CharBuffer;
import org.opengis.metadata.citation.Citation; // For javadoc
import org.opengis.referencing.IdentifiedObject; // For javadoc
import static java.lang.Character.*;
/**
* Static methods working with {@link CharSequence} instances. Some methods defined in this
* class duplicate the functionalities already provided in the standard {@link String} class,
* but works on a generic {@code CharSequence} instance instead of {@code String}.
*
* <h2>Unicode support</h2>
* Every methods defined in this class work on <cite>code points</cite> instead of characters
* when appropriate. Consequently those methods should behave correctly with characters outside
* the <cite>Basic Multilingual Plane</cite> (BMP).
*
* <h2>Policy on space characters</h2>
* Java defines two methods for testing if a character is a white space:
* {@link Character#isWhitespace(int)} and {@link Character#isSpaceChar(int)}.
* Those two methods differ in the way they handle {@linkplain Characters#NO_BREAK_SPACE
* no-break spaces}, tabulations and line feeds. The general policy in the SIS library is:
*
* <ul>
* <li>Use {@code isWhitespace(…)} when separating entities (words, numbers, tokens, <i>etc.</i>)
* in a list. Using that method, characters separated by a no-break space are considered as
* part of the same entity.</li>
* <li>Use {@code isSpaceChar(…)} when parsing a single entity, for example a single word.
* Using this method, no-break spaces are considered as part of the entity while line
* feeds or tabulations are entity boundaries.</li>
* </ul>
*
* <div class="note"><b>Example:</b>
* Numbers formatted in the French locale use no-break spaces as group separators. When parsing a list of numbers,
* ordinary spaces around the numbers may need to be ignored, but no-break spaces shall be considered as part of the
* numbers. Consequently {@code isWhitespace(…)} is appropriate for skipping spaces <em>between</em> the numbers.
* But if there is spaces to skip <em>inside</em> a single number, then {@code isSpaceChar(…)} is a good choice
* for accepting no-break spaces and for stopping the parse operation at tabulations or line feed character.
* A tabulation or line feed between two characters is very likely to separate two distinct values.</div>
*
* In practice, the {@link java.text.Format} implementations in the SIS library typically use
* {@code isSpaceChar(…)} while most of the rest of the SIS library, including this
* {@code CharSequences} class, consistently uses {@code isWhitespace(…)}.
*
* <p>Note that the {@link String#trim()} method doesn't follow any of those policies and should
* generally be avoided. That {@code trim()} method removes every ISO control characters without
* distinction about whether the characters are space or not, and ignore all Unicode spaces.
* The {@link #trimWhitespaces(String)} method defined in this class can be used as an alternative.</p>
*
* <h2>Handling of null values</h2>
* Most methods in this class accept a {@code null} {@code CharSequence} argument. In such cases
* the method return value is either a {@code null} {@code CharSequence}, an empty array, or a
* {@code 0} or {@code false} primitive type calculated as if the input was an empty string.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.1
*
* @see StringBuilders
*
* @since 0.3
* @module
*/
public final class CharSequences extends Static {
/**
* An array of zero-length. This constant play a role equivalents to
* {@link java.util.Collections#EMPTY_LIST}.
*/
public static final String[] EMPTY_ARRAY = new String[0];
/**
* An array of strings containing only white spaces. String lengths are equal to their
* index in the {@code spaces} array. For example, {@code spaces[4]} contains a string
* of length 4. Strings are constructed only when first needed.
*/
private static final String[] SPACES = new String[10];
/**
* Do not allow instantiation of this class.
*/
private CharSequences() {
}
/**
* Returns the code point after the given index. This method completes
* {@link Character#codePointBefore(CharSequence, int)} but is rarely used because slightly
* inefficient (in most cases, the code point at {@code index} is known together with the
* corresponding {@code charCount(int)} value, so the method calls should be unnecessary).
*/
private static int codePointAfter(final CharSequence text, final int index) {
return codePointAt(text, index + charCount(codePointAt(text, index)));
}
/**
* Returns a character sequence of the specified length filled with white spaces.
*
* <h4>Use case</h4>
* This method is typically invoked for performing right-alignment of text on the
* {@linkplain java.io.Console console} or other device using monospaced font.
* Callers compute a value for the {@code length} argument by (<var>desired width</var> - <var>used width</var>).
* Since the <var>used width</var> value may be greater than expected, this method handle negative {@code length}
* values as if the value was zero.
*
* @param length the string length. Negative values are clamped to 0.
* @return a string of length {@code length} filled with white spaces.
*/
public static CharSequence spaces(final int length) {
/*
* No need to synchronize. In the unlikely event of two threads calling this method
* at the same time and the two calls creating a new string, the String.intern() call
* will take care of canonicalizing the strings.
*/
if (length <= 0) {
return "";
}
if (length < SPACES.length) {
String s = SPACES[length - 1];
if (s == null) {
final char[] spaces = new char[length];
Arrays.fill(spaces, ' ');
s = new String(spaces).intern();
SPACES[length - 1] = s;
}
return s;
}
return new CharSequence() {
@Override public int length() {
return length;
}
@Override public char charAt(int index) {
ArgumentChecks.ensureValidIndex(length, index);
return ' ';
}
@Override public CharSequence subSequence(final int start, final int end) {
ArgumentChecks.ensureValidIndexRange(length, start, end);
final int n = end - start;
return (n == length) ? this : spaces(n);
}
@Override public String toString() {
final char[] array = new char[length];
Arrays.fill(array, ' ');
return new String(array);
}
};
}
/**
* Returns the {@linkplain CharSequence#length() length} of the given characters sequence,
* or 0 if {@code null}.
*
* @param text the character sequence from which to get the length, or {@code null}.
* @return the length of the character sequence, or 0 if the argument is {@code null}.
*/
public static int length(final CharSequence text) {
return (text != null) ? text.length() : 0;
}
/**
* Returns the number of Unicode code points in the given characters sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* @param text the character sequence from which to get the count, or {@code null}.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see #codePointCount(CharSequence, int, int)
*/
public static int codePointCount(final CharSequence text) {
return (text != null) ? codePointCount(text, 0, text.length()) : 0;
}
/**
* Returns the number of Unicode code points in the given characters sub-sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* <p>This method performs the same work than the standard
* {@link Character#codePointCount(CharSequence, int, int)} method, except that it tries
* to delegate to the optimized methods from the {@link String}, {@link StringBuilder},
* {@link StringBuffer} or {@link CharBuffer} classes if possible.</p>
*
* @param text the character sequence from which to get the count, or {@code null}.
* @param fromIndex the index from which to start the computation.
* @param toIndex the index after the last character to take in account.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see Character#codePointCount(CharSequence, int, int)
* @see String#codePointCount(int, int)
* @see StringBuilder#codePointCount(int, int)
*/
public static int codePointCount(final CharSequence text, final int fromIndex, final int toIndex) {
if (text == null) return 0;
if (text instanceof String) return ((String) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuilder) return ((StringBuilder) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuffer) return ((StringBuffer) text).codePointCount(fromIndex, toIndex);
if (text instanceof CharBuffer) {
final CharBuffer buffer = (CharBuffer) text;
if (buffer.hasArray() && !buffer.isReadOnly()) {
final int position = buffer.position();
return Character.codePointCount(buffer.array(), position + fromIndex, position + toIndex);
}
}
return Character.codePointCount(text, fromIndex, toIndex);
}
/**
* Returns the number of occurrences of the {@code toSearch} string in the given {@code text}.
* The search is case-sensitive.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the string to search in the given {@code text}.
* It shall contain at least one character.
* @return the number of occurrences of {@code toSearch} in {@code text},
* or 0 if {@code text} was null or empty.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*/
public static int count(final CharSequence text, final String toSearch) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
final int length = toSearch.length();
if (length == 1) {
// Implementation working on a single character is faster.
return count(text, toSearch.charAt(0));
}
int n = 0;
if (text != null) {
int i = 0;
while ((i = indexOf(text, toSearch, i, text.length())) >= 0) {
n++;
i += length;
}
}
return n;
}
/**
* Counts the number of occurrence of the given character in the given character sequence.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the character to count.
* @return the number of occurrences of the given character, or 0 if the {@code text} is null.
*/
public static int count(final CharSequence text, final char toSearch) {
int n = 0;
if (text != null) {
if (text instanceof String) {
final String s = (String) text;
for (int i=s.indexOf(toSearch); ++i != 0; i=s.indexOf(toSearch, i)) {
n++;
}
} else {
// No need to use the code point API here, since we are looking for exact matches.
for (int i=text.length(); --i>=0;) {
if (text.charAt(i) == toSearch) {
n++;
}
}
}
}
return n;
}
/**
* Returns the index within the given strings of the first occurrence of the specified part,
* starting at the specified index. This method is equivalent to the following method call,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only, and that the upper limit can be specified:
*
* {@preformat java
* return text.indexOf(part, fromIndex);
* }
*
* There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the
* {@link String#indexOf(String, int)} behavior.
*
* @param text the string in which to perform the search.
* @param toSearch the substring for which to search.
* @param fromIndex the index from which to start the search.
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of the specified part, starting at the specified index,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*
* @see String#indexOf(String, int)
* @see StringBuilder#indexOf(String, int)
* @see StringBuffer#indexOf(String, int)
*/
public static int indexOf(final CharSequence text, final CharSequence toSearch, int fromIndex, int toIndex) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
if (text != null) {
int length = text.length();
if (toIndex > length) {
toIndex = length;
}
if (toSearch instanceof String && toIndex == length) {
if (text instanceof String) {
return ((String) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuilder) {
return ((StringBuilder) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuffer) {
return ((StringBuffer) text).indexOf((String) toSearch, fromIndex);
}
}
if (fromIndex < 0) {
fromIndex = 0;
}
length = toSearch.length();
toIndex -= length;
search: for (; fromIndex <= toIndex; fromIndex++) {
for (int i=0; i<length; i++) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != toSearch.charAt(i)) {
continue search;
}
}
return fromIndex;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the first occurrence of the
* specified character, starting the search at the specified index. If the character is
* not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the behavior documented
* in {@link String#indexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index to start the search from.
* @param toIndex the index after the last character where to perform the search.
* @return the index of the first occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#indexOf(int, int)
*/
public static int indexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
final int length = text.length();
if (toIndex >= length) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).indexOf(toSearch, fromIndex);
}
toIndex = length;
}
if (fromIndex < 0) {
fromIndex = 0;
}
char head = (char) toSearch;
char tail = (char) 0;
if (head != toSearch) { // Outside BMP plane?
head = highSurrogate(toSearch);
tail = lowSurrogate (toSearch);
toIndex--;
}
while (fromIndex < toIndex) {
if (text.charAt(fromIndex) == head) {
if (tail == 0 || text.charAt(fromIndex+1) == tail) {
return fromIndex;
}
}
fromIndex++;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the last occurrence of the
* specified character, searching backward in the given index range.
* If the character is not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code toIndex}. If greater than the text length
* or less than {@code fromIndex}, then the behavior of this method is as if the search started
* from {@code length} or {@code fromIndex} respectively. This is consistent with the behavior
* documented in {@link String#lastIndexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index of the first character in the range where to perform the search.
* @param toIndex the index after the last character in the range where to perform the search.
* @return the index of the last occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#lastIndexOf(int, int)
*/
public static int lastIndexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
if (fromIndex <= 0) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).lastIndexOf(toSearch, toIndex - 1);
}
fromIndex = 0;
}
final int length = text.length();
if (toIndex > length) {
toIndex = length;
}
char tail = (char) toSearch;
char head = (char) 0;
if (tail != toSearch) { // Outside BMP plane?
tail = lowSurrogate (toSearch);
head = highSurrogate(toSearch);
fromIndex++;
}
while (toIndex > fromIndex) {
if (text.charAt(--toIndex) == tail) {
if (head == 0 || text.charAt(--toIndex) == head) {
return toIndex;
}
}
}
}
return -1;
}
/**
* Returns the index of the first character after the given number of lines.
* This method counts the number of occurrence of {@code '\n'}, {@code '\r'}
* or {@code "\r\n"} starting from the given position. When {@code numLines}
* occurrences have been found, the index of the first character after the last
* occurrence is returned.
*
* <p>If the {@code numLines} argument is positive, this method searches forward.
* If negative, this method searches backward. If 0, this method returns the
* beginning of the current line.</p>
*
* <p>If this method reaches the end of {@code text} while searching forward, then
* {@code text.length()} is returned. If this method reaches the beginning of
* {@code text} while searching backward, then 0 is returned.</p>
*
* @param text the string in which to skip a determined amount of lines.
* @param numLines the number of lines to skip. Can be positive, zero or negative.
* @param fromIndex index at which to start the search, from 0 to {@code text.length()} inclusive.
* @return index of the first character after the last skipped line.
* @throws NullPointerException if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code fromIndex} is out of bounds.
*/
public static int indexOfLineStart(final CharSequence text, int numLines, int fromIndex) {
final int length = text.length();
/*
* Go backward if the number of lines is negative.
* No need to use the codePoint API because we are
* looking only for characters in the BMP plane.
*/
if (numLines <= 0) {
do {
char c;
do {
if (fromIndex == 0) {
return fromIndex;
}
c = text.charAt(--fromIndex);
if (c == '\n') {
if (fromIndex != 0 && text.charAt(fromIndex - 1) == '\r') {
--fromIndex;
}
break;
}
} while (c != '\r');
} while (++numLines != 1);
// Execute the forward code below for skipping the "end of line" characters.
}
/*
* Skips forward the given amount of lines.
*/
while (--numLines >= 0) {
char c;
do {
if (fromIndex == length) {
return fromIndex;
}
c = text.charAt(fromIndex++);
if (c == '\r') {
if (fromIndex != length && text.charAt(fromIndex) == '\n') {
fromIndex++;
}
break;
}
} while (c != '\n');
}
return fromIndex;
}
/**
* Returns the index of the first non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character after the given range, which is always equals or greater than {@code toIndex}.
* Note that this character may not exist if {@code toIndex} is equals to the text length.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is greater than {@code toIndex},
* then this method unconditionally returns {@code fromIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code toIndex-1}
* is the high surrogate of a valid supplementary code point, then this method returns
* {@code toIndex+1}, which is the index of the next code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of a non-space character, starting
* at the specified index, or a value equals or greater than {@code toIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripLeading()
*/
public static int skipLeadingWhitespaces(final CharSequence text, int fromIndex, final int toIndex) {
while (fromIndex < toIndex) {
final int c = codePointAt(text, fromIndex);
if (!isWhitespace(c)) break;
fromIndex += charCount(c);
}
return fromIndex;
}
/**
* Returns the index <em>after</em> the last non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character in the given range, which is always equals or lower than {@code fromIndex}.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is lower than {@code toIndex},
* then this method unconditionally returns {@code toIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code fromIndex}
* is the low surrogate of a valid supplementary code point, then this method returns
* {@code fromIndex-1}, which is the index of the code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the last occurrence of a non-space character, starting
* at the specified index, or a value equals or lower than {@code fromIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripTrailing()
*/
public static int skipTrailingWhitespaces(final CharSequence text, final int fromIndex, int toIndex) {
while (toIndex > fromIndex) {
final int c = codePointBefore(text, toIndex);
if (!isWhitespace(c)) break;
toIndex -= charCount(c);
}
return toIndex;
}
/**
* Allocates the array to be returned by the {@code split(…)} methods. If the given {@code text} argument is
* an instance of {@link String}, {@link StringBuilder} or {@link StringBuffer}, then this method returns a
* {@code String[]} array instead of {@code CharSequence[]}. This is possible because the specification of
* their {@link CharSequence#subSequence(int, int)} method guarantees to return {@code String} instances.
* Some Apache SIS code will cast the {@code split(…)} return value based on this knowledge.
*
* <p>Note that this is a undocumented SIS features. There is currently no commitment that this implementation
* details will not change in future version.</p>
*
* @param text the text to be splitted.
* @return an array where to store the result of splitting the given {@code text}.
*/
private static CharSequence[] createSplitArray(final CharSequence text) {
return (text instanceof String ||
text instanceof StringBuilder ||
text instanceof StringBuffer) ? new String[8] : new CharSequence[8];
}
/**
* Splits a text around the given character. The array returned by this method contains all
* subsequences of the given text that is terminated by the given character or is terminated
* by the end of the text. The subsequences in the array are in the order in which they occur
* in the given text. If the character is not found in the input, then the resulting array has
* just one element, which is the whole given text.
*
* <p>This method is similar to the standard {@link String#split(String)} method except for the
* following:</p>
*
* <ul>
* <li>It accepts generic character sequences.</li>
* <li>It accepts {@code null} argument, in which case an empty array is returned.</li>
* <li>The separator is a simple character instead of a regular expression.</li>
* <li>If the {@code separator} argument is {@code '\n'} or {@code '\r'}, then this method
* splits around any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} characters sequences.
* <li>The leading and trailing spaces of each subsequences are trimmed.</li>
* </ul>
*
* @param text the text to split, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of subsequences computed by splitting the given text around the given
* character, or an empty array if {@code text} was null.
*
* @see String#split(String)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] split(final CharSequence text, final char separator) {
if (text == null) {
return EMPTY_ARRAY;
}
if (separator == '\n' || separator == '\r') {
final CharSequence[] splitted = splitOnEOL(text);
for (int i=0; i < splitted.length; i++) {
// For consistency with the rest of this method.
splitted[i] = trimWhitespaces(splitted[i]);
}
return splitted;
}
// 'excludeEmpty' must use the same criterion than trimWhitespaces(…).
final boolean excludeEmpty = isWhitespace(separator);
CharSequence[] splitted = createSplitArray(text);
final int length = text.length();
int count = 0, last = 0, i = 0;
while ((i = indexOf(text, separator, i, length)) >= 0) {
final CharSequence item = trimWhitespaces(text, last, i);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count << 1);
}
splitted[count++] = item;
}
last = ++i;
}
// Add the last element.
final CharSequence item = trimWhitespaces(text, last, length);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count + 1);
}
splitted[count++] = item;
}
return ArraysExt.resize(splitted, count);
}
/**
* Splits a text around the <cite>End Of Line</cite> (EOL) characters.
* EOL characters can be any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} sequences.
* Each element in the returned array will be a single line. If the given text is already
* a single line, then this method returns a singleton containing only the given text.
*
* <p>Notes:</p>
* <ul>
* <li>At the difference of <code>{@linkplain #split split}(toSplit, '\n’)</code>,
* this method does not remove whitespaces.</li>
* <li>This method does not check for Unicode
* {@linkplain Characters#LINE_SEPARATOR line separator} and
* {@linkplain Characters#PARAGRAPH_SEPARATOR paragraph separator}.</li>
* </ul>
*
* <div class="note"><b>Performance note:</b>
* Prior JDK8 this method was usually cheap because all string instances created by
* {@link String#substring(int,int)} shared the same {@code char[]} internal array.
* However since JDK8, the new {@code String} implementation copies the data in new arrays.
* Consequently it is better to use index rather than this method for splitting large {@code String}s.
* However this method still useful for other {@link CharSequence} implementations providing an efficient
* {@code subSequence(int,int)} method.</div>
*
* @param text the multi-line text from which to get the individual lines, or {@code null}.
* @return the lines in the text, or an empty array if the given text was null.
*
* @see #indexOfLineStart(CharSequence, int, int)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] splitOnEOL(final CharSequence text) {
if (text == null) {
return EMPTY_ARRAY;
}
/*
* This method is implemented on top of String.indexOf(int,int),
* assuming that it will be faster for String and StringBuilder.
*/
final int length = text.length();
int lf = indexOf(text, '\n', 0, length);
int cr = indexOf(text, '\r', 0, length);
if (lf < 0 && cr < 0) {
return new CharSequence[] {
text
};
}
int count = 0;
CharSequence[] splitted = createSplitArray(text);
int last = 0;
boolean hasMore;
do {
int skip = 1;
final int splitAt;
if (cr < 0) {
// There is no "\r" character in the whole text, only "\n".
splitAt = lf;
hasMore = (lf = indexOf(text, '\n', lf+1, length)) >= 0;
} else if (lf < 0) {
// There is no "\n" character in the whole text, only "\r".
splitAt = cr;
hasMore = (cr = indexOf(text, '\r', cr+1, length)) >= 0;
} else if (lf < cr) {
// There is both "\n" and "\r" characters with "\n" first.
splitAt = lf;
hasMore = true;
lf = indexOf(text, '\n', lf+1, length);
} else {
// There is both "\r" and "\n" characters with "\r" first.
// We need special care for the "\r\n" sequence.
splitAt = cr;
if (lf == ++cr) {
cr = indexOf(text, '\r', cr+1, length);
lf = indexOf(text, '\n', lf+1, length);
hasMore = (cr >= 0 || lf >= 0);
skip = 2;
} else {
cr = indexOf(text, '\r', cr+1, length);
hasMore = true; // Because there is lf.
}
}
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count*2);
}
splitted[count++] = text.subSequence(last, splitAt);
last = splitAt + skip;
} while (hasMore);
/*
* Add the remaining string and we are done.
*/
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count+1);
}
splitted[count++] = text.subSequence(last, text.length());
return ArraysExt.resize(splitted, count);
}
/**
* Returns {@code true} if {@link #split(CharSequence, char)} parsed an empty string.
*/
private static boolean isEmpty(final CharSequence[] tokens) {
switch (tokens.length) {
case 0: return true;
case 1: return tokens[0].length() == 0;
default: return false;
}
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Double#parseDouble(String) parses} each item as a {@code double}.
* Empty sub-sequences are parsed as {@link Double#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static double[] parseDoubles(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_DOUBLE;
final double[] parsed = new double[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Double.NaN : Double.parseDouble(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Float#parseFloat(String) parses} each item as a {@code float}.
* Empty sub-sequences are parsed as {@link Float#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static float[] parseFloats(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_FLOAT;
final float[] parsed = new float[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Float.NaN : Float.parseFloat(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Long#parseLong(String) parses} each item as a {@code long}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static long[] parseLongs(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_LONG;
final long[] parsed = new long[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Long.parseLong(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Integer#parseInt(String) parses} each item as an {@code int}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static int[] parseInts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_INT;
final int[] parsed = new int[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Integer.parseInt(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Short#parseShort(String) parses} each item as a {@code short}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static short[] parseShorts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_SHORT;
final short[] parsed = new short[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Short.parseShort(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Byte#parseByte(String) parses} each item as a {@code byte}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static byte[] parseBytes(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_BYTE;
final byte[] parsed = new byte[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Byte.parseByte(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* Replaces some Unicode characters by ASCII characters on a "best effort basis".
* For example the “ é ” character is replaced by “ e ” (without accent),
* the “ ″ ” symbol for minutes of angle is replaced by straight double quotes “ " ”,
* and combined characters like ㎏, ㎎, ㎝, ㎞, ㎢, ㎦, ㎖, ㎧, ㎩, ㎐, <i>etc.</i> are replaced
* by the corresponding sequences of characters.
*
* <div class="note"><b>Note:</b>
* the replacement of Greek letters is a more complex task than what this method can do,
* since it depends on the context. For example if the Greek letters are abbreviations
* for coordinate system axes like φ and λ, then the replacements depend on the enclosing
* coordinate system. See {@link org.apache.sis.io.wkt.Transliterator} for more information.</div>
*
* @param text the text to scan for Unicode characters to replace by ASCII characters, or {@code null}.
* @return the given text with substitutions applied, or {@code text} if no replacement
* has been applied, or {@code null} if the given text was null.
*
* @see StringBuilders#toASCII(StringBuilder)
* @see org.apache.sis.io.wkt.Transliterator#filter(String)
* @see java.text.Normalizer
*/
public static CharSequence toASCII(final CharSequence text) {
return StringBuilders.toASCII(text, null);
}
/**
* Returns a string with leading and trailing whitespace characters omitted.
* This method is similar in purpose to {@link String#trim()}, except that the later considers
* every {@linkplain Character#isISOControl(int) ISO control codes} below 32 to be a whitespace.
* That {@code String.trim()} behavior has the side effect of removing the heading of ANSI escape
* sequences (a.k.a. X3.64), and to ignore Unicode spaces. This {@code trimWhitespaces(…)} method
* is built on the more accurate {@link Character#isWhitespace(int)} method instead.
*
* <p>This method performs the same work than {@link #trimWhitespaces(CharSequence)},
* but is overloaded for the {@code String} type because of its frequent use.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a string with leading and trailing whitespaces removed, or {@code null} is the given
* text was null.
*
* @todo To be replaced by {@link String#strip()} in JDK 11.
*/
public static String trimWhitespaces(String text) {
if (text != null) {
final int length = text.length();
final int lower = skipLeadingWhitespaces(text, 0, length);
text = text.substring(lower, skipTrailingWhitespaces(text, lower, length));
}
return text;
}
/**
* Returns a text with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>This method is the generic version of {@link #trimWhitespaces(String)}.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a characters sequence with leading and trailing whitespaces removed,
* or {@code null} is the given text was null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see String#strip()
*/
public static CharSequence trimWhitespaces(CharSequence text) {
if (text != null) {
text = trimWhitespaces(text, 0, text.length());
}
return text;
}
/**
* Returns a sub-sequence with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>Invoking this method is functionally equivalent to the following code snippet,
* except that the {@link CharSequence#subSequence(int, int) subSequence} method is
* invoked only once instead of two times:</p>
*
* {@preformat java
* text = trimWhitespaces(text.subSequence(lower, upper));
* }
*
* @param text the text from which to remove leading and trailing white spaces.
* @param lower index of the first character to consider for inclusion in the sub-sequence.
* @param upper index after the last character to consider for inclusion in the sub-sequence.
* @return a characters sequence with leading and trailing white spaces removed, or {@code null}
* if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code lower} or {@code upper} is out of bounds.
*/
public static CharSequence trimWhitespaces(CharSequence text, int lower, int upper) {
final int length = length(text);
ArgumentChecks.ensureValidIndexRange(length, lower, upper);
if (text != null) {
lower = skipLeadingWhitespaces (text, lower, upper);
upper = skipTrailingWhitespaces(text, lower, upper);
if (lower != 0 || upper != length) { // Safety in case subSequence doesn't make the check.
text = text.subSequence(lower, upper);
}
}
return text;
}
/**
* Trims the fractional part of the given formatted number, provided that it doesn't change
* the value. This method assumes that the number is formatted in the US locale, typically
* by the {@link Double#toString(double)} method.
*
* <p>More specifically if the given value ends with a {@code '.'} character followed by a
* sequence of {@code '0'} characters, then those characters are omitted. Otherwise this
* method returns the text unchanged. This is a <cite>"all or nothing"</cite> method:
* either the fractional part is completely removed, or either it is left unchanged.</p>
*
* <h4>Examples</h4>
* This method returns {@code "4"} if the given value is {@code "4."}, {@code "4.0"} or
* {@code "4.00"}, but returns {@code "4.10"} unchanged (including the trailing {@code '0'}
* character) if the input is {@code "4.10"}.
*
* <h4>Use case</h4>
* This method is useful before to {@linkplain Integer#parseInt(String) parse a number}
* if that number should preferably be parsed as an integer before attempting to parse
* it as a floating point number.
*
* @param value the value to trim if possible, or {@code null}.
* @return the value without the trailing {@code ".0"} part (if any),
* or {@code null} if the given text was null.
*
* @see StringBuilders#trimFractionalPart(StringBuilder)
*/
public static CharSequence trimFractionalPart(final CharSequence value) {
if (value != null) {
for (int i=value.length(); i>0;) {
final int c = codePointBefore(value, i);
i -= charCount(c);
switch (c) {
case '0': continue;
case '.': return value.subSequence(0, i);
default : return value;
}
}
}
return value;
}
/**
* Makes sure that the {@code text} string is not longer than {@code maxLength} characters.
* If {@code text} is not longer, then it is returned unchanged. Otherwise this method returns
* a copy of {@code text} with some characters substituted by the {@code "(…)"} string.
*
* <p>If the text needs to be shortened, then this method tries to apply the above-cited
* substitution between two words. For example, the following text:</p>
*
* <blockquote>
* "This sentence given as an example is way too long to be included in a short name."
* </blockquote>
*
* May be shortened to something like this:
*
* <blockquote>
* "This sentence given (…) in a short name."
* </blockquote>
*
* @param text the sentence to reduce if it is too long, or {@code null}.
* @param maxLength the maximum length allowed for {@code text}.
* @return a sentence not longer than {@code maxLength}, or {@code null} if the given text was null.
*/
public static CharSequence shortSentence(CharSequence text, final int maxLength) {
ArgumentChecks.ensureStrictlyPositive("maxLength", maxLength);
if (text != null) {
final int length = text.length();
int toRemove = length - maxLength;
if (toRemove > 0) {
toRemove += 5; // Space needed for the " (…) " string.
/*
* We will remove characters from 'lower' to 'upper' both exclusive. We try to
* adjust 'lower' and 'upper' in such a way that the first and last characters
* to be removed will be spaces or punctuation characters.
*/
int lower = length >>> 1;
if (lower != 0 && isLowSurrogate(text.charAt(lower))) {
lower--;
}
int upper = lower;
boolean forward = false;
do { // To be run as long as we need to remove more characters.
int nc=0, type=UNASSIGNED;
forward = !forward;
searchWordBreak: while (true) {
final int c;
if (forward) {
if ((upper += nc) == length) break;
c = codePointAt(text, upper);
} else {
if ((lower -= nc) == 0) break;
c = codePointBefore(text, lower);
}
nc = charCount(c);
if (isWhitespace(c)) {
if (type != UNASSIGNED) {
type = SPACE_SEPARATOR;
}
} else switch (type) {
// After we skipped white, then non-white, then white characters, stop.
case SPACE_SEPARATOR: {
break searchWordBreak;
}
// For the first non-white character, just remember its type.
// Arbitrarily use UPPERCASE_LETTER for any kind of identifier
// part (which include UPPERCASE_LETTER anyway).
case UNASSIGNED: {
type = isUnicodeIdentifierPart(c) ? UPPERCASE_LETTER : getType(c);
break;
}
// If we expected an identifier, stop at the first other char.
case UPPERCASE_LETTER: {
if (!isUnicodeIdentifierPart(c)) {
break searchWordBreak;
}
break;
}
// For all other kind of character, break when the type change.
default: {
if (getType(c) != type) {
break searchWordBreak;
}
break;
}
}
toRemove -= nc;
}
} while (toRemove > 0);
text = new StringBuilder(lower + (length-upper) + 5) // 5 is the length of " (…) "
.append(text, 0, lower).append(" (…) ").append(text, upper, length);
}
}
return text;
}
/**
* Given a string in upper cases (typically a Java constant), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Replace all occurrences of {@code '_'} by spaces.</li>
* <li>Converts all letters except the first one to lower case letters using
* {@link Character#toLowerCase(int)}. Note that this method does not use
* the {@link String#toLowerCase()} method. Consequently the system locale
* is ignored. This method behaves as if the conversion were done in the
* {@linkplain java.util.Locale#ROOT root} locale.</li>
* </ol>
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier the name of a Java constant, or {@code null}.
* @return the identifier like an English sentence, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence upperCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer = new StringBuilder(identifier.length());
final int length = identifier.length();
for (int i=0; i<length;) {
int c = codePointAt(identifier, i);
if (i != 0) {
if (c == '_') {
c = ' ';
} else {
c = toLowerCase(c);
}
}
buffer.appendCodePoint(c);
i += charCount(c);
}
return buffer;
}
/**
* Given a string in camel cases (typically an identifier), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Invoke {@link #camelCaseToWords(CharSequence, boolean)}, which separate the words
* on the basis of character case. For example {@code "transferFunctionType"} become
* <cite>"transfer function type"</cite>. This works fine for ISO 19115 identifiers.</li>
*
* <li>Next replace all occurrence of {@code '_'} by spaces in order to take in account
* an other common naming convention, which uses {@code '_'} as a word separator. This
* convention is used by netCDF attributes like {@code "project_name"}.</li>
*
* <li>Finally ensure that the first character is upper-case.</li>
* </ol>
*
* <h4>Exception to the above rules</h4>
* If the given identifier contains only upper-case letters, digits and the {@code '_'} character,
* then the identifier is returned "as is" except for the {@code '_'} characters which are replaced by {@code '-'}.
* This work well for identifiers like {@code "UTF-8"} or {@code "ISO-LATIN-1"} for instance.
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character, or {@code null}.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer;
if (isCode(identifier)) {
if (identifier instanceof String) {
return ((String) identifier).replace('_', '-');
}
buffer = new StringBuilder(identifier);
StringBuilders.replace(buffer, '_', '-');
} else {
buffer = (StringBuilder) camelCaseToWords(identifier, true);
final int length = buffer.length();
if (length != 0) {
StringBuilders.replace(buffer, '_', ' ');
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
}
return buffer;
}
/**
* Given a string in camel cases, returns a string with the same words separated by spaces.
* A word begins with a upper-case character following a lower-case character. For example
* if the given string is {@code "PixelInterleavedSampleModel"}, then this method returns
* <cite>"Pixel Interleaved Sample Model"</cite> or <cite>"Pixel interleaved sample model"</cite>
* depending on the value of the {@code toLowerCase} argument.
*
* <p>If {@code toLowerCase} is {@code false}, then this method inserts spaces but does not change
* the case of characters. If {@code toLowerCase} is {@code true}, then this method changes
* {@linkplain Character#toLowerCase(int) to lower case} the first character after each spaces
* inserted by this method (note that this intentionally exclude the very first character in
* the given string), except if the second character {@linkplain Character#isUpperCase(int)
* is upper case}, in which case the word is assumed an acronym.</p>
*
* <p>The given string is usually a programmatic identifier like a class name or a method name.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character.
* @param toLowerCase {@code true} for changing the first character of words to lower case,
* except for the first word and acronyms.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToWords(final CharSequence identifier, final boolean toLowerCase) {
if (identifier == null) {
return null;
}
/*
* Implementation note: the 'camelCaseToSentence' method needs
* this method to unconditionally returns a new StringBuilder.
*/
final int length = identifier.length();
final StringBuilder buffer = new StringBuilder(length + 8);
final int lastIndex = (length != 0) ? length - charCount(codePointBefore(identifier, length)) : 0;
int last = 0;
for (int i=1; i<=length;) {
final int cp;
final boolean doAppend;
if (i == length) {
cp = 0;
doAppend = true;
} else {
cp = codePointAt(identifier, i);
doAppend = Character.isUpperCase(cp) && isLowerCase(codePointBefore(identifier, i));
}
if (doAppend) {
final int pos = buffer.length();
buffer.append(identifier, last, i).append(' ');
if (toLowerCase && pos!=0 && last<lastIndex && isLowerCase(codePointAfter(identifier, last))) {
final int c = buffer.codePointAt(pos);
final int low = toLowerCase(c);
if (c != low) {
StringBuilders.replace(buffer, pos, pos + charCount(c), toChars(low));
}
}
last = i;
}
i += charCount(cp);
}
/*
* Removes the trailing space, if any.
*/
final int lg = buffer.length();
if (lg != 0) {
final int cp = buffer.codePointBefore(lg);
if (isWhitespace(cp)) {
buffer.setLength(lg - charCount(cp));
}
}
return buffer;
}
/**
* Creates an acronym from the given text. This method returns a string containing the first character of each word,
* where the words are separated by the camel case convention, the {@code '_'} character, or any character which is
* not a {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part} (including spaces).
*
* <p>An exception to the above rule happens if the given text is a Unicode identifier without the {@code '_'}
* character, and every characters are upper case. In such case the text is returned unchanged on the assumption
* that it is already an acronym.</p>
*
* <p><b>Examples:</b> given {@code "northEast"}, this method returns {@code "NE"}.
* Given {@code "Open Geospatial Consortium"}, this method returns {@code "OGC"}.</p>
*
* @param text the text for which to create an acronym, or {@code null}.
* @return the acronym, or {@code null} if the given text was null.
*/
public static CharSequence camelCaseToAcronym(CharSequence text) {
text = trimWhitespaces(text);
if (text != null && !isAcronym(text)) {
final int length = text.length();
final StringBuilder buffer = new StringBuilder(8); // Acronyms are usually short.
boolean wantChar = true;
for (int i=0; i<length;) {
final int c = codePointAt(text, i);
if (wantChar) {
if (isUnicodeIdentifierStart(c)) {
buffer.appendCodePoint(c);
wantChar = false;
}
} else if (!isUnicodeIdentifierPart(c) || c == '_') {
wantChar = true;
} else if (Character.isUpperCase(c)) {
// Test for mixed-case (e.g. "northEast").
// Note that i is guaranteed to be greater than 0 here.
if (!Character.isUpperCase(codePointBefore(text, i))) {
buffer.appendCodePoint(c);
}
}
i += charCount(c);
}
final int acrlg = buffer.length();
if (acrlg != 0) {
/*
* If every characters except the first one are upper-case, ensure that the
* first one is upper-case as well. This is for handling the identifiers which
* are compliant to Java-Beans convention (e.g. "northEast").
*/
if (isUpperCase(buffer, 1, acrlg, true)) {
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
if (!equals(text, buffer)) {
text = buffer;
}
}
}
return text;
}
/**
* Returns {@code true} if the first string is likely to be an acronym of the second string.
* An acronym is a sequence of {@linkplain Character#isLetterOrDigit(int) letters or digits}
* built from at least one character of each word in the {@code words} string. More than
* one character from the same word may appear in the acronym, but they must always
* be the first consecutive characters. The comparison is case-insensitive.
*
* <div class="note"><b>Example:</b>
* Given the {@code "Open Geospatial Consortium"} words, the following strings are recognized as acronyms:
* {@code "OGC"}, {@code "ogc"}, {@code "O.G.C."}, {@code "OpGeoCon"}.</div>
*
* If any of the given arguments is {@code null}, this method returns {@code false}.
*
* @param acronym a possible acronym of the sequence of words, or {@code null}.
* @param words the sequence of words, or {@code null}.
* @return {@code true} if the first string is an acronym of the second one.
*/
public static boolean isAcronymForWords(final CharSequence acronym, final CharSequence words) {
final int lga = length(acronym);
int ia=0, ca;
do {
if (ia >= lga) return false;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
final int lgc = length(words);
int ic=0, cc;
do {
if (ic >= lgc) return false;
cc = codePointAt(words, ic);
ic += charCount(cc);
}
while (!isLetterOrDigit(cc));
if (toUpperCase(ca) != toUpperCase(cc)) {
// The first letter must match.
return false;
}
cmp: while (ia < lga) {
if (ic >= lgc) {
// There is more letters in the acronym than in the complete name.
return false;
}
ca = codePointAt(acronym, ia); ia += charCount(ca);
cc = codePointAt(words, ic); ic += charCount(cc);
if (isLetterOrDigit(ca)) {
if (toUpperCase(ca) == toUpperCase(cc)) {
// Acronym letter matches the letter from the complete name.
// Continue the comparison with next letter of both strings.
continue;
}
// Will search for the next word after the 'else' block.
} else do {
if (ia >= lga) break cmp;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
/*
* At this point, 'ca' is the next acronym letter to compare and we
* need to search for the next word in the complete name. We first
* skip remaining letters, then we skip non-letter characters.
*/
boolean skipLetters = true;
do while (isLetterOrDigit(cc) == skipLetters) {
if (ic >= lgc) {
return false;
}
cc = codePointAt(words, ic);
ic += charCount(cc);
} while ((skipLetters = !skipLetters) == false);
// Now that we are aligned on a new word, the first letter must match.
if (toUpperCase(ca) != toUpperCase(cc)) {
return false;
}
}
/*
* Now that we have processed all acronym letters, the complete name can not have
* any additional word. We can only finish the current word and skip trailing non-
* letter characters.
*/
boolean skipLetters = true;
do {
do {
if (ic >= lgc) return true;
cc = codePointAt(words, ic);
ic += charCount(cc);
} while (isLetterOrDigit(cc) == skipLetters);
} while ((skipLetters = !skipLetters) == false);
return false;
}
/**
* Returns {@code true} if the given string contains only upper case letters or digits.
* A few punctuation characters like {@code '_'} and {@code '.'} are also accepted.
*
* <p>This method is used for identifying character strings that are likely to be code
* like {@code "UTF-8"} or {@code "ISO-LATIN-1"}.</p>
*
* @see #isUnicodeIdentifier(CharSequence)
*/
private static boolean isCode(final CharSequence identifier) {
for (int i=identifier.length(); --i>=0;) {
final char c = identifier.charAt(i);
// No need to use the code point API here, since the conditions
// below are requiring the characters to be in the basic plane.
if (!((c >= 'A' && c <= 'Z') || (c >= '-' && c <= ':') || c == '_')) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is presumed to be an acronym. Acronyms are presumed
* to be valid Unicode identifiers in all upper-case letters and without the {@code '_'} character.
*
* @see #camelCaseToAcronym(CharSequence)
*/
private static boolean isAcronym(final CharSequence text) {
return isUpperCase(text) && indexOf(text, '_', 0, text.length()) < 0 && isUnicodeIdentifier(text);
}
/**
* Returns {@code true} if the given identifier is a legal Unicode identifier.
* This method returns {@code true} if the identifier length is greater than zero,
* the first character is a {@linkplain Character#isUnicodeIdentifierStart(int)
* Unicode identifier start} and all remaining characters (if any) are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier parts}.
*
* <h4>Relationship with legal XML identifiers</h4>
* Most legal Unicode identifiers are also legal XML identifiers, but the converse is not true.
* The most noticeable differences are the ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ characters,
* which are legal in XML identifiers but not in Unicode.
*
* <table class="sis">
* <caption>Characters legal in one set but not in the other</caption>
* <tr><th colspan="2">Not legal in Unicode</th> <th class="sep" colspan="2">Not legal in XML</th></tr>
* <tr><td>{@code :}</td><td>(colon)</td> <td class="sep">{@code µ}</td><td>(micro sign)</td></tr>
* <tr><td>{@code -}</td><td>(hyphen or minus)</td> <td class="sep">{@code ª}</td><td>(feminine ordinal indicator)</td></tr>
* <tr><td>{@code .}</td><td>(dot)</td> <td class="sep">{@code º}</td><td>(masculine ordinal indicator)</td></tr>
* <tr><td>{@code ·}</td><td>(middle dot)</td> <td class="sep">{@code ⁔}</td><td>(inverted undertie)</td></tr>
* <tr>
* <td colspan="2">Many punctuation, symbols, <i>etc</i>.</td>
* <td colspan="2" class="sep">{@linkplain Character#isIdentifierIgnorable(int) Identifier ignorable} characters.</td>
* </tr>
* </table>
*
* Note that the ‘{@code _}’ (underscore) character is legal according both Unicode and XML, while spaces,
* ‘{@code !}’, ‘{@code #}’, ‘{@code *}’, ‘{@code /}’, ‘{@code ?}’ and most other punctuation characters are not.
*
* <h4>Usage in Apache SIS</h4>
* In its handling of {@linkplain org.apache.sis.referencing.ImmutableIdentifier identifiers}, Apache SIS favors
* Unicode identifiers without {@linkplain Character#isIdentifierIgnorable(int) ignorable} characters since those
* identifiers are legal XML identifiers except for the above-cited rarely used characters. As a side effect,
* this policy excludes ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ which would normally be legal XML identifiers.
* But since those characters could easily be confused with
* {@linkplain org.apache.sis.util.iso.DefaultNameSpace#DEFAULT_SEPARATOR namespace separators},
* this exclusion is considered desirable.
*
* @param identifier the character sequence to test, or {@code null}.
* @return {@code true} if the given character sequence is a legal Unicode identifier.
*
* @see org.apache.sis.referencing.ImmutableIdentifier
* @see org.apache.sis.metadata.iso.citation.Citations#toCodeSpace(Citation)
* @see org.apache.sis.referencing.IdentifiedObjects#getSimpleNameOrIdentifier(IdentifiedObject)
*/
public static boolean isUnicodeIdentifier(final CharSequence identifier) {
final int length = length(identifier);
if (length == 0) {
return false;
}
int c = codePointAt(identifier, 0);
if (!isUnicodeIdentifierStart(c)) {
return false;
}
for (int i=0; (i += charCount(c)) < length;) {
c = codePointAt(identifier, i);
if (!isUnicodeIdentifierPart(c)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test (may be {@code null}).
* @return {@code true} if non-null, contains at least one upper-case character and no lower-case character.
*
* @see String#toUpperCase()
*
* @since 0.7
*/
public static boolean isUpperCase(final CharSequence text) {
return isUpperCase(text, 0, length(text), false);
}
/**
* Returns {@code true} if the given sub-sequence is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test.
* @param lower index of the first character to check, inclusive.
* @param upper index of the last character to check, exclusive.
* @param hasUpperCase {@code true} if this method should behave as if the given text already had
* at least one upper-case character (not necessarily in the portion given by the indices).
* @return {@code true} if contains at least one upper-case character and no lower-case character.
*/
private static boolean isUpperCase(final CharSequence text, int lower, final int upper, boolean hasUpperCase) {
while (lower < upper) {
final int c = codePointAt(text, lower);
if (Character.isLowerCase(c)) {
return false;
}
if (!hasUpperCase) {
hasUpperCase = Character.isUpperCase(c);
}
lower += charCount(c);
}
return hasUpperCase;
}
/**
* Returns {@code true} if the given texts are equal, optionally ignoring case and filtered-out characters.
* This method is sometime used for comparing identifiers in a lenient way.
*
* <p><b>Example:</b> the following call compares the two strings ignoring case and any
* characters which are not {@linkplain Character#isLetterOrDigit(int) letter or digit}.
* In particular, spaces and punctuation characters like {@code '_'} and {@code '-'} are
* ignored:</p>
*
* {@preformat java
* assert equalsFiltered("WGS84", "WGS_84", Characters.Filter.LETTERS_AND_DIGITS, true) == true;
* }
*
* @param s1 the first characters sequence to compare, or {@code null}.
* @param s2 the second characters sequence to compare, or {@code null}.
* @param filter the subset of characters to compare, or {@code null} for comparing all characters.
* @param ignoreCase {@code true} for ignoring cases, or {@code false} for requiring exact match.
* @return {@code true} if both arguments are {@code null} or if the two given texts are equal,
* optionally ignoring case and filtered-out characters.
*/
public static boolean equalsFiltered(final CharSequence s1, final CharSequence s2,
final Characters.Filter filter, final boolean ignoreCase)
{
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
if (filter == null) {
return ignoreCase ? equalsIgnoreCase(s1, s2) : equals(s1, s2);
}
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1 < lg1) {
int c1 = codePointAt(s1, i1);
final int n = charCount(c1);
if (filter.contains(c1)) {
int c2; // Fetch the next significant character from the second string.
do {
if (i2 >= lg2) {
return false; // The first string has more significant characters than expected.
}
c2 = codePointAt(s2, i2);
i2 += charCount(c2);
} while (!filter.contains(c2));
// Compare the characters in the same way than String.equalsIgnoreCase(String).
if (c1 != c2 && !(ignoreCase && equalsIgnoreCase(c1, c2))) {
return false;
}
}
i1 += n;
}
while (i2 < lg2) {
final int s = codePointAt(s2, i2);
if (filter.contains(s)) {
return false; // The first string has less significant characters than expected.
}
i2 += charCount(s);
}
return true;
}
/**
* Returns {@code true} if the given code points are equal, ignoring case.
* This method implements the same comparison algorithm than String#equalsIgnoreCase(String).
*
* <p>This method does not verify if {@code c1 == c2}. This check should have been done
* by the caller, since the caller code is a more optimal place for this check.</p>
*/
private static boolean equalsIgnoreCase(int c1, int c2) {
c1 = toUpperCase(c1);
c2 = toUpperCase(c2);
if (c1 == c2) {
return true;
}
// Need this check for Georgian alphabet.
return toLowerCase(c1) == toLowerCase(c2);
}
/**
* Returns {@code true} if the two given texts are equal, ignoring case.
* This method is similar to {@link String#equalsIgnoreCase(String)}, except
* it works on arbitrary character sequences and compares <cite>code points</cite>
* instead of characters.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, ignoring case,
* or if both arguments are {@code null}.
*
* @see String#equalsIgnoreCase(String)
*/
public static boolean equalsIgnoreCase(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
// Do not check for String cases. We do not want to delegate to String.equalsIgnoreCase
// because we compare code points while String.equalsIgnoreCase compares characters.
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1<lg1 && i2<lg2) {
final int c1 = codePointAt(s1, i1);
final int c2 = codePointAt(s2, i2);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
i1 += charCount(c1);
i2 += charCount(c2);
}
return i1 == i2;
}
/**
* Returns {@code true} if the two given texts are equal. This method delegates to
* {@link String#contentEquals(CharSequence)} if possible. This method never invoke
* {@link CharSequence#toString()} in order to avoid a potentially large copy of data.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, or if both arguments are {@code null}.
*
* @see String#contentEquals(CharSequence)
*/
public static boolean equals(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 != null && s2 != null) {
if (s1 instanceof String) return ((String) s1).contentEquals(s2);
if (s2 instanceof String) return ((String) s2).contentEquals(s1);
final int length = s1.length();
if (s2.length() == length) {
for (int i=0; i<length; i++) {
if (s1.charAt(i) != s2.charAt(i)) {
return false;
}
}
return true;
}
}
return false;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* in a case-sensitive comparison. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(int, String, int, int)
*/
public static boolean regionMatches(final CharSequence text, final int fromIndex, final CharSequence part) {
if (text instanceof String && part instanceof String) {
// It is okay to delegate to String implementation since we do not ignore cases.
return ((String) text).startsWith((String) part, fromIndex);
}
final int length;
if (fromIndex < 0 || fromIndex + (length = part.length()) > text.length()) {
return false;
}
for (int i=0; i<length; i++) {
// No need to use the code point API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != part.charAt(i)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* optionally in a case-insensitive way. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(ignoreCase, offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(boolean, int, String, int, int)
*
* @since 0.4
*/
public static boolean regionMatches(final CharSequence text, int fromIndex, final CharSequence part, final boolean ignoreCase) {
if (!ignoreCase) {
return regionMatches(text, fromIndex, part);
}
// Do not check for String cases. We do not want to delegate to String.regionMatches
// because we compare code points while String.regionMatches(…) compares characters.
final int limit = text.length();
final int length = part.length();
if (fromIndex < 0) { // Not checked before because we want NullPointerException if an argument is null.
return false;
}
for (int i=0; i<length;) {
if (fromIndex >= limit) {
return false;
}
final int c1 = codePointAt(part, i);
final int c2 = codePointAt(text, fromIndex);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
fromIndex += charCount(c2);
i += charCount(c1);
}
return true;
}
/**
* Returns {@code true} if the given character sequence starts with the given prefix.
*
* @param text the characters sequence to test.
* @param prefix the expected prefix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence starts with the given prefix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean startsWith(final CharSequence text, final CharSequence prefix, final boolean ignoreCase) {
return regionMatches(text, 0, prefix, ignoreCase);
}
/**
* Returns {@code true} if the given character sequence ends with the given suffix.
*
* @param text the characters sequence to test.
* @param suffix the expected suffix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence ends with the given suffix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean endsWith(final CharSequence text, final CharSequence suffix, final boolean ignoreCase) {
int is = text.length();
int ip = suffix.length();
while (ip > 0) {
if (is <= 0) {
return false;
}
final int cs = codePointBefore(text, is);
final int cp = codePointBefore(suffix, ip);
if (cs != cp && (!ignoreCase || !equalsIgnoreCase(cs, cp))) {
return false;
}
is -= charCount(cs);
ip -= charCount(cp);
}
return true;
}
/**
* Returns the longest sequence of characters which is found at the beginning of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common prefix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common prefix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonPrefix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (i < length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(i) != s2.charAt(i)) {
break;
}
i++;
}
return shortest.subSequence(0, i);
}
/**
* Returns the longest sequence of characters which is found at the end of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common suffix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonSuffix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (++i <= length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(lg1 - i) != s2.charAt(lg2 - i)) {
break;
}
}
i--;
return shortest.subSequence(length - i, shortest.length());
}
/**
* Returns the words found at the beginning and end of both texts.
* The returned string is the concatenation of the {@linkplain #commonPrefix common prefix}
* with the {@linkplain #commonSuffix common suffix}, with prefix and suffix eventually made
* shorter for avoiding to cut in the middle of a word.
*
* <p>The purpose of this method is to create a global identifier from a list of component identifiers.
* The later are often eastward and northward components of a vector, in which case this method provides
* an identifier for the vector as a whole.</p>
*
* <div class="note"><b>Example:</b>
* given the following inputs:
* <ul>
* <li>{@code "baroclinic_eastward_velocity"}</li>
* <li>{@code "baroclinic_northward_velocity"}</li>
* </ul>
* This method returns {@code "baroclinic_velocity"}. Note that the {@code "ward"} characters
* are a common suffix of both texts but nevertheless omitted because they cut a word.</div>
*
* <p>If one of those texts is {@code null}, then the other text is returned.<|fim▁hole|> * that may appear in the middle of the strings. A character is considered the beginning of a word if it is
* {@linkplain Character#isLetterOrDigit(int) a letter or digit} which is not preceded by another letter or
* digit (as leading "s" and "c" in "snake_case"), or if it is an {@linkplain Character#isUpperCase(int)
* upper case} letter preceded by a {@linkplain Character#isLowerCase(int) lower case} letter or no letter
* (as both "C" in "CamelCase").
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*
* @since 1.1
*/
public static CharSequence commonWords(final CharSequence s1, final CharSequence s2) {
final int lg1 = length(s1);
final int lg2 = length(s2);
final int shortestLength = Math.min(lg1, lg2); // 0 if s1 or s2 is null, in which case prefix and suffix will have the other value.
final CharSequence prefix = commonPrefix(s1, s2); int prefixLength = length(prefix); if (prefixLength >= shortestLength) return prefix;
final CharSequence suffix = commonSuffix(s1, s2); int suffixLength = length(suffix); if (suffixLength >= shortestLength) return suffix;
final int length = prefixLength + suffixLength;
if (length >= lg1) return s1; // Check if one of the strings is already equal to prefix + suffix.
if (length >= lg2) return s2;
/*
* At this point `s1` and `s2` contain at least one character between the prefix and the suffix.
* If the prefix or the suffix seems to stop in the middle of a word, skip the remaining of that word.
* For example if `s1` and `s2` are "eastward_velocity" and "northward_velocity", the common suffix is
* "ward_velocity" but we want to retain only "velocity".
*
* The first condition below (before the loop) checks the character after the common prefix (for example "e"
* in "baroclinic_eastward_velocity" if the prefix is "baroclinic_"). The intent is to handle the case where
* the word separator is not the same (e.g. "baroclinic_eastward_velocity" and "baroclinic northward velocity",
* in which case the '_' or ' ' character would not appear in the prefix).
*/
if (!isWordBoundary(s1, prefixLength, codePointAt(s1, prefixLength)) &&
!isWordBoundary(s2, prefixLength, codePointAt(s2, prefixLength)))
{
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
final int n = charCount(c);
prefixLength -= n;
if (isWordBoundary(prefix, prefixLength, c)) {
if (!isLetterOrDigit(c)) prefixLength += n; // Keep separator character.
break;
}
}
}
/*
* Same process than for the prefix above. The condition before the loop checks the character before suffix
* for the same reason than above, but using only `isLetterOrDigit` ignoring camel-case. The reason is that
* if the character before was a word separator according camel-case convention (i.e. an upper-case letter),
* we would need to include it in the common suffix.
*/
int suffixStart = 0;
if (isLetterOrDigit(codePointBefore(s1, lg1 - suffixLength)) &&
isLetterOrDigit(codePointBefore(s2, lg2 - suffixLength)))
{
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isWordBoundary(suffix, suffixStart, c)) break;
suffixStart += charCount(c);
}
}
/*
* At this point we got the final prefix and suffix to use. If the prefix or suffix is empty,
* trim whitespaces or '_' character. For example if the suffix is "_velocity" and no prefix,
* return "velocity" without leading "_" character.
*/
if (prefixLength == 0) {
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c)) {
return suffix.subSequence(suffixStart, suffixLength); // Skip leading ignorable characters in suffix.
}
suffixStart += charCount(c);
}
return "";
}
if (suffixStart >= suffixLength) {
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
if (isLetterOrDigit(c)) {
return prefix.subSequence(0, prefixLength); // Skip trailing ignorable characters in prefix.
}
prefixLength -= charCount(c);
}
return "";
}
/*
* All special cases have been examined. Return the concatenation of (possibly shortened)
* common prefix and suffix.
*/
final StringBuilder buffer = new StringBuilder(prefixLength + suffixLength).append(prefix);
final int c1 = codePointBefore(prefix, prefixLength);
final int c2 = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c1) && isLetterOrDigit(c2)) {
if (!Character.isUpperCase(c2) || !isLowerCase(c1)) {
buffer.append(' '); // Keep a separator between two words (except if CamelCase is used).
}
} else if (c1 == c2) {
suffixStart += charCount(c2); // Avoid repeating '_' in e.g. "baroclinic_<removed>_velocity".
}
return buffer.append(suffix, suffixStart, suffixLength).toString();
}
/**
* Returns {@code true} if the character {@code c} is the beginning of a word or a non-word character.
* For example this method returns {@code true} if {@code c} is {@code '_'} in {@code "snake_case"} or
* {@code "C"} in {@code "CamelCase"}.
*
* @param s the character sequence from which the {@code c} character has been obtained.
* @param i the index in {@code s} where the {@code c} character has been obtained.
* @param c the code point in {@code s} as index {@code i}.
* @return whether the given character is the beginning of a word or a non-word character.
*/
private static boolean isWordBoundary(final CharSequence s, final int i, final int c) {
if (!isLetterOrDigit(c)) return true;
if (!Character.isUpperCase(c)) return false;
return (i <= 0 || isLowerCase(codePointBefore(s, i)));
}
/**
* Returns the token starting at the given offset in the given text. For the purpose of this
* method, a "token" is any sequence of consecutive characters of the same type, as defined
* below.
*
* <p>Let define <var>c</var> as the first non-blank character located at an index equals or
* greater than the given offset. Then the characters that are considered of the same type
* are:</p>
*
* <ul>
* <li>If <var>c</var> is a
* {@linkplain Character#isUnicodeIdentifierStart(int) Unicode identifier start},
* then any following characters that are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part}.</li>
* <li>Otherwise any character for which {@link Character#getType(int)} returns
* the same value than for <var>c</var>.</li>
* </ul>
*
* @param text the text for which to get the token.
* @param fromIndex index of the fist character to consider in the given text.
* @return a sub-sequence of {@code text} starting at the given offset, or an empty string
* if there is no non-blank character at or after the given offset.
* @throws NullPointerException if the {@code text} argument is null.
*/
public static CharSequence token(final CharSequence text, int fromIndex) {
final int length = text.length();
int upper = fromIndex;
/*
* Skip whitespaces. At the end of this loop,
* 'c' will be the first non-blank character.
*/
int c;
do {
if (upper >= length) return "";
c = codePointAt(text, upper);
fromIndex = upper;
upper += charCount(c);
}
while (isWhitespace(c));
/*
* Advance over all characters "of the same type".
*/
if (isUnicodeIdentifierStart(c)) {
while (upper<length && isUnicodeIdentifierPart(c = codePointAt(text, upper))) {
upper += charCount(c);
}
} else {
final int type = getType(codePointAt(text, fromIndex));
while (upper<length && getType(c = codePointAt(text, upper)) == type) {
upper += charCount(c);
}
}
return text.subSequence(fromIndex, upper);
}
/**
* Replaces all occurrences of a given string in the given character sequence. If no occurrence of
* {@code toSearch} is found in the given text or if {@code toSearch} is equal to {@code replaceBy},
* then this method returns the {@code text} unchanged.
* Otherwise this method returns a new character sequence with all occurrences replaced by {@code replaceBy}.
*
* <p>This method is similar to {@link String#replace(CharSequence, CharSequence)} except that is accepts
* arbitrary {@code CharSequence} objects. As of Java 10, another difference is that this method does not
* create a new {@code String} if {@code toSearch} is equals to {@code replaceBy}.</p>
*
* @param text the character sequence in which to perform the replacements, or {@code null}.
* @param toSearch the string to replace.
* @param replaceBy the replacement for the searched string.
* @return the given text with replacements applied, or {@code text} if no replacement has been applied,
* or {@code null} if the given text was null
*
* @see String#replace(char, char)
* @see StringBuilders#replace(StringBuilder, String, String)
* @see String#replace(CharSequence, CharSequence)
*
* @since 0.4
*/
public static CharSequence replace(final CharSequence text, final CharSequence toSearch, final CharSequence replaceBy) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
ArgumentChecks.ensureNonNull ("replaceBy", replaceBy);
if (text != null && !toSearch.equals(replaceBy)) {
if (text instanceof String) {
return ((String) text).replace(toSearch, replaceBy);
}
final int length = text.length();
int i = indexOf(text, toSearch, 0, length);
if (i >= 0) {
int p = 0;
final int sl = toSearch.length();
final StringBuilder buffer = new StringBuilder(length + (replaceBy.length() - sl));
do {
buffer.append(text, p, i).append(replaceBy);
i = indexOf(text, toSearch, p = i + sl, length);
} while (i >= 0);
return buffer.append(text, p, length);
}
}
return text;
}
/**
* Copies a sequence of characters in the given {@code char[]} array.
*
* @param src the characters sequence from which to copy characters.
* @param srcOffset index of the first character from {@code src} to copy.
* @param dst the array where to copy the characters.
* @param dstOffset index where to write the first character in {@code dst}.
* @param length number of characters to copy.
*
* @see String#getChars(int, int, char[], int)
* @see StringBuilder#getChars(int, int, char[], int)
* @see StringBuffer#getChars(int, int, char[], int)
* @see CharBuffer#get(char[], int, int)
* @see javax.swing.text.Segment#array
*/
public static void copyChars(final CharSequence src, int srcOffset,
final char[] dst, int dstOffset, int length)
{
ArgumentChecks.ensurePositive("length", length);
if (src instanceof String) {
((String) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuilder) {
((StringBuilder) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuffer) {
((StringBuffer) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof CharBuffer) {
((CharBuffer) src).subSequence(srcOffset, srcOffset + length).get(dst, dstOffset, length);
} else {
/*
* Another candidate could be `javax.swing.text.Segment`, but it
* is probably not worth to introduce a Swing dependency for it.
*/
while (length != 0) {
dst[dstOffset++] = src.charAt(srcOffset++);
length--;
}
}
}
}<|fim▁end|> | * If there is no common words, then this method returns an empty string.</p>
*
* <h4>Possible future evolution</h4>
* Current implementation searches only for a common prefix and a common suffix, ignoring any common words |
<|file_name|>results_model.py<|end_file_name|><|fim▁begin|># Created By: Virgil Dupras
# Created On: 2009-04-23
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from PyQt5.QtCore import Qt, pyqtSignal, QModelIndex
from PyQt5.QtGui import QBrush, QFont, QFontMetrics, QColor
from PyQt5.QtWidgets import QTableView
from qtlib.table import Table
class ResultsModel(Table):
def __init__(self, app, view, **kwargs):
model = app.model.result_table
super().__init__(model, view, **kwargs)
view.horizontalHeader().setSortIndicator(1, Qt.AscendingOrder)
font = view.font()
font.setPointSize(app.prefs.tableFontSize)
self.view.setFont(font)<|fim▁hole|> view.verticalHeader().setDefaultSectionSize(fm.height() + 2)
app.willSavePrefs.connect(self.appWillSavePrefs)
self.prefs = app.prefs
def _getData(self, row, column, role):
if column.name == "marked":
if role == Qt.CheckStateRole and row.markable:
return Qt.Checked if row.marked else Qt.Unchecked
return None
if role == Qt.DisplayRole:
data = row.data_delta if self.model.delta_values else row.data
return data[column.name]
elif role == Qt.ForegroundRole:
if row.isref:
return QBrush(Qt.blue)
elif row.is_cell_delta(column.name):
return QBrush(QColor(255, 142, 40)) # orange
elif role == Qt.FontRole:
font = QFont(self.view.font())
if self.prefs.reference_bold_font:
font.setBold(row.isref)
return font
elif role == Qt.EditRole:
if column.name == "name":
return row.data[column.name]
return None
def _getFlags(self, row, column):
flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable
if column.name == "marked":
if row.markable:
flags |= Qt.ItemIsUserCheckable
elif column.name == "name":
flags |= Qt.ItemIsEditable
return flags
def _setData(self, row, column, value, role):
if role == Qt.CheckStateRole:
if column.name == "marked":
row.marked = bool(value)
return True
elif role == Qt.EditRole:
if column.name == "name":
return self.model.rename_selected(value)
return False
def sort(self, column, order):
column = self.model.COLUMNS[column]
self.model.sort(column.name, order == Qt.AscendingOrder)
# --- Properties
@property
def power_marker(self):
return self.model.power_marker
@power_marker.setter
def power_marker(self, value):
self.model.power_marker = value
@property
def delta_values(self):
return self.model.delta_values
@delta_values.setter
def delta_values(self, value):
self.model.delta_values = value
# --- Events
def appWillSavePrefs(self):
self.model.columns.save_columns()
# --- model --> view
def invalidate_markings(self):
# redraw view
# HACK. this is the only way I found to update the widget without reseting everything
self.view.scroll(0, 1)
self.view.scroll(0, -1)
class ResultsView(QTableView):
# --- Override
def keyPressEvent(self, event):
if event.text() == " ":
self.spacePressed.emit()
return
super().keyPressEvent(event)
def mouseDoubleClickEvent(self, event):
self.doubleClicked.emit(QModelIndex())
# We don't call the superclass' method because the default behavior is to rename the cell.
# --- Signals
spacePressed = pyqtSignal()<|fim▁end|> | fm = QFontMetrics(font) |
<|file_name|>TheorEval.cc<|end_file_name|><|fim▁begin|>/*!
@file TheorEval.cc
@date Tue Aug 12 2013
@author Andrey Sapronov <[email protected]>
Contains TheorEval class member function implementations.
*/
#include <fstream>
#include <list>
#include <sstream>
#include <stack>
#include <float.h>
#include <valarray>
#include "TheorEval.h"
#include "CommonGrid.h"
#include "xfitter_cpp.h"
using namespace std;
// extern struct ord_scales {
// double datasetmur[150];
// double datasetmuf[150];
// int datasetiorder[150];
// } cscales_;
TheorEval::TheorEval(const int dsId, const int nTerms, const std::vector<string> stn, const std::vector<string> stt,
const std::vector<string> sti, const std::vector<string> sts, const string& expr) : _dsId(dsId), _nTerms(nTerms)
{
// _iOrd = cscales_.datasetiorder[_dsId-1];
// _xmur = cscales_.datasetmur[_dsId-1];
// _xmuf = cscales_.datasetmuf[_dsId-1];
for (int it= 0 ; it<nTerms; it++ ){
_termNames.push_back(stn[it]);
_termTypes.push_back(stt[it]);
_termInfos.push_back(sti[it]);
_termSources.push_back(sts[it]);
}
_expr.assign(expr);
_ppbar = false;
}
TheorEval::~TheorEval()
{
map<CommonGrid*, valarray<double>* >::iterator itm = _mapGridToken.begin();
for (; itm!= _mapGridToken.end(); itm++){
delete itm->first;
}
vector<tToken>::iterator it = _exprRPN.begin();
for (; it!=_exprRPN.end(); it++){
if ( ! it->val ) { delete it->val; it->val = NULL; }
}
}
int
TheorEval::initTheory()
{
list<tToken> sl;
this->assignTokens(sl);
this->convertToRPN(sl);
}
int
TheorEval::assignTokens(list<tToken> &sl)
{
stringstream strexpr(_expr);
int it = 0;
const int nb = this->getNbins();
char c;
string term;
tToken t;
while (1){
strexpr.get(c);
if ( strexpr.eof() ) break;
if ( isspace(c) ) continue; // skip whitespaces.
// Oh noes! doesn't work after fortran reading expression with spaces :(.
if ( isdigit(c) ) { // process numbers
term.assign(1,c);
do {
strexpr.get(c);
if ( strexpr.eof() ) break;
if ( isdigit(c) || c=='.' ) {
term.append(1,c);
} else if ( c=='E' || c=='e' ) { // read mantissa including sign in scientific notation
term.append(1,c);
strexpr.get(c);
if ( strexpr.eof() ) break;
if ( isdigit(c) || c == '-' ){
term.append(1,c);
} else {
cout << "Theory expression syntax error: " << _expr << endl;
return -1;
}
} else {
strexpr.putback(c);<|fim▁hole|>
t.opr = 0;
t.name = term;
t.val = new valarray<double>(dterm, nb);
sl.push_back(t);
continue;
} else if ( isalpha(c) ) { // process literal terms
term.assign(1,c);
while (strexpr.get(c) ) {
if ( isalnum(c) ) term.append(1,c);
else {
strexpr.putback(c);
break;
}
}
if ( term == string("sum") ) { // special case for sum() function
t.opr = 4;
t.name = "sum";
t.val = new valarray<double>(0., nb);
sl.push_back(t);
continue;
}
/*
if ( term == string("avg") ) { // special case for avg() function
t.opr = 4;
t.name = "avg";
t.val = new valarray<double>(0., nb);
sl.push_back(t);
continue;
}
*/
vector<string>::iterator found_term = find(_termNames.begin(), _termNames.end(), term);
if ( found_term == _termNames.end() ) {
cout << "Undeclared term " << term << " in expression " << _expr << endl;
return -1;
} else {
t.opr = 0;
t.name = term;
if ( _mapInitdTerms.find(term) != _mapInitdTerms.end()){
t.val = _mapInitdTerms[term];
} else {
t.val = new valarray<double>(0.,nb);
this->initTerm(int(found_term-_termNames.begin()), t.val);
_mapInitdTerms[term] = t.val;
}
sl.push_back(t);
}
term.clear();
continue;
} else {
switch(c){
case '(': t.opr = -1; break;
case ')': t.opr = -2; break;
case '+': t.opr = 1; break;
case '-': t.opr = 1; break;
case '*': t.opr = 3; break;
case '/': t.opr = 3; break;
default: cout << "Unknown operator "<< c << " in expression " << _expr << endl;
}
t.name.assign(1,c);
t.val = new valarray<double>(0., nb);
sl.push_back(t);
}
}
}
int
TheorEval::convertToRPN(list<tToken> &sl)
{
stack<tToken> tknstk;
// convert to RPN
while ( 0!=sl.size()){
tToken t = sl.front();
sl.pop_front();
if ( 0 == t.opr ) {
_exprRPN.push_back(t);
}
//if ( 4 == t.opr ){ // push functions
// tknstk.push(t);
//}
if ( t.opr >0 ) {
while ( tknstk.size() > 0 && t.opr <= tknstk.top().opr ) {
_exprRPN.push_back(tknstk.top());
tknstk.pop();
}
tknstk.push(t);
}
if ( t.opr == -1 ){ tknstk.push(t); delete t.val;} // left parenthesis
if ( t.opr == -2 ){ // right parenthesis
while ( tknstk.top().opr != -1 ) {
if ( tknstk.size() == 0 ) cout << "ERROR: Wrong syntax in theoretical expression: "<< _expr << endl;
_exprRPN.push_back(tknstk.top());
tknstk.pop();
}
delete t.val;
tknstk.pop();
}
}
while ( tknstk.size() != 0 ){
if (tknstk.top().opr == -1 ) cout << "ERROR: Wrong syntax in theoretical expression: "<< _expr << endl;
_exprRPN.push_back(tknstk.top());
tknstk.pop();
}
/*
vector<tToken>::iterator it= _exprRPN.begin();
for (;it!=_exprRPN.end(); it++){
cout << it->name << " " ;
}
cout << endl;
*/
}
int
TheorEval::initTerm(int iterm, valarray<double> *val)
{
string term_type = _termTypes.at(iterm);
if ( term_type.find("grid") != string::npos || term_type.find("ast") != string::npos ){ //appl'grid' or f'ast'NLO
this->initGridTerm(iterm, val);
} else if ( term_type == string("kfactor")) {
this->initKfTerm(iterm, val);
} else {
int id = 15102301;
char text[] = "S: Unknown term type in expression for term";
std::cout << "Unknown term type in expression for term " << _termNames[iterm] << std::endl;
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
return -1;
}
}
int
TheorEval::initGridTerm(int iterm, valarray<double> *val)
{
string term_source = _termSources.at(iterm);
string term_type = _termTypes.at(iterm);
string term_info = _termInfos.at(iterm);
CommonGrid *g = new CommonGrid(term_type, term_source);
if ( term_type.find("grid") != string::npos ) {
// set the collision for the grid term
string collision ("pp"); // default is pp
// this is to have backward-compatibility with Tevatron datasets
if ( _ppbar ) collision.assign(string("ppbar"));
// otherwise we check beams in the TermInfo lines
else {
size_t beams_pos = term_info.find(string("beams"));
if ( beams_pos != string::npos ){
size_t semicol_pos = term_info.find(';', beams_pos);
size_t eq_pos = term_info.find('=', beams_pos);
collision.assign(term_info.substr(eq_pos+1, semicol_pos - eq_pos-1));
}
}
// strip blanks
collision.erase(std::remove(collision.begin(), collision.end(), ' '), collision.end());
// and set the collision
g->SetCollisions(collision);
g->SetDynamicScale( _dynamicscale );
// check the binning with the grids, will be ignored for normalisation grids
g->checkBins(_binFlags, _dsBins);
}
else if ( term_type.find("ast") != string::npos ){
bool PublicationUnits = true; // todo: take from new steering flag 'TermNorm'
//FastNLOReader* fnlo = g->getHBins().back().f;
FastNLOxFitter* fnlo = g->getHBins().back().f;
if(PublicationUnits)
fnlo->SetUnits(fastNLO::kPublicationUnits);
else
fnlo->SetUnits(fastNLO::kAbsoluteUnits);
// --- set scales
if(_MurDef>=0)
fnlo->SetMuRFunctionalForm((fastNLO::EScaleFunctionalForm) ((int) (_MurDef)));
if(_MufDef>=0)
fnlo->SetMuFFunctionalForm((fastNLO::EScaleFunctionalForm) ((int) (_MufDef)));
if ( _xmur!=1 || _xmuf!=1 )
fnlo->SetScaleFactorsMuRMuF(_xmur, _xmuf);
// --- set order
if ( _iOrd == 1 ) {
fnlo->SetContributionON(fastNLO::kFixedOrder,1,false); // switch 'off' NLO
}
else if (_iOrd==2) {
// that's fastNLO default
}
else if (_iOrd==3) {
fnlo->SetContributionON(fastNLO::kFixedOrder,2,true); // switch 'on' NNLO
}
else {
printf("fastNLO pert. order is not defined, ordercalc = %d:\n",_iOrd);
exit(1);
}
}
/*
appl::grid *g = new appl::grid(term_source);
if (_dynamicscale != 0)
{
#ifdef APPLGRID_DYNSCALE
g->setDynamicScale( _dynamicscale );
#else
int id = 2204201401;
char text[] = "S: Cannot use dynamic scale emulation in Applgrid, use v1.4.43 or higher";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
#endif
}
g->trim();
*/
// associate grid and valarray pointers in token
_mapGridToken[g] = val;
}
int
TheorEval::initKfTerm(int iterm, valarray<double> *val)
{
string term_source(_termSources.at(iterm));
// read k-Factor table and compare it's binning to the data
cout << "reading k-factor table from " << term_source << endl;
vector<double> tv;
vector<vector<double> > bkf(_dsBins.size(),tv);
vector<double> vkf;
ifstream kff(term_source.c_str());
string line;
if (kff.is_open()){
while (1) {
getline(kff,line);
if (true == kff.eof()) break;
if (line.at(0) == '#' ) continue; //ignore comments
line.erase(line.find_last_not_of(" \n\r\t")+1); // trim trailing whitespaces
stringstream sl(line);
// first count words
int nw(0);
while (sl.good()) {
string ts;
sl >> ts;
nw++;
}
// check that we have even number of bins (low and high columns)
if (0!=(nw-1)%2) {
int id = 14040340;
char text[] = "S: Bad number of bins in k-factor file. Each bin must have low and high value.";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
}
// check that the number of bins is equal to data binning dimension
if ((nw-1) != _dsBins.size()) {
int id = 14040341;
char text[] = "S: Bad number of bins in k-factor file. Must be equal to data binning dimension.";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
}
// now read bins
sl.clear();
sl.seekg(0);
sl.str(line);
double tb(0);
for (int iw=0; iw<nw-1; iw++) {
sl >> tb;
bkf.at(iw).push_back(tb);
}
// and k-factor
sl>>tb;
vkf.push_back(tb);
}
kff.close();
} else {
int id = 14040339;
char text[] = "S: Error reading k-factor file.";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
}
// check that k-factor file binning is compatible with data
for (int iv = 0; iv<_dsBins.size(); iv++){
for (int ib = 0; ib<_dsBins.at(iv).size(); ib++){
if ( _binFlags.at(ib) == 0 ) continue;
if ( 0 == (_binFlags.at(ib) & 2) ) {
if (fabs(bkf[iv][ib] - _dsBins[iv][ib]) > 100*DBL_MIN) {
int id = 14040338;
char text[] = "S: Data and grid bins don't match.";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
return -1;
}
}
}
}
// write k-factor array to the token valarray
*val = valarray<double>(vkf.data(), vkf.size());
}
int
TheorEval::setBins(int nBinDim, int nPoints, int *binFlags, double *allBins)
{
for(int ip = 0; ip<nPoints; ip++){
_binFlags.push_back(binFlags[ip]);
}
for(int ibd = 0; ibd < nBinDim; ibd++){
vector<double> bins;
bins.clear();
for(int ip = 0; ip<nPoints; ip++){
bins.push_back(allBins[ip*10 + ibd]);
}
_dsBins.push_back(bins);
}
return _dsBins.size();
}
int
TheorEval::setCKM(const vector<double> &v_ckm)
{
#ifdef APPLGRID_CKM
map<CommonGrid*, valarray<double>* >::iterator itm = _mapGridToken.begin();
for(; itm != _mapGridToken.end(); itm++){
itm->first->setCKM(v_ckm);
}
#else
int id = 611201320;
char text[] = "S: Cannot set CKM in Applgrid, use v1.4.33 or higher";
int textlen = strlen(text);
hf_errlog_(id, text, textlen);
#endif
}
int
TheorEval::Evaluate(valarray<double> &vte )
{
// get values from grids
this->getGridValues();
// calculate expression result
stack<valarray<double> > stk;
vector<tToken>::iterator it = _exprRPN.begin();
while(it!= _exprRPN.end()){
if ( it->opr < 0 ){
cout << "ERROR: Expression RPN is wrong" << endl;
return -1;
}
if ( it->opr == 0 ){
stk.push(*(it->val));
} else if ( it->name == string("sum") ){
double sum = stk.top().sum();
stk.top() = sum;
/* } else if ( it->name == string("avg") ){
if (0 == stk.top().size()) {
cout << "ERROR: avg() argument dimension is 0." << endl;
}
double avg = stk.top().sum()/stk.top().size();
stk.top() = avg;*/
} else if ( it->name == string("+") ){
valarray<double> a(stk.top());
stk.pop();
stk.top() += a;
} else if ( it->name == string("-") ){
valarray<double> a(stk.top());
stk.pop();
stk.top() -= a;
} else if ( it->name == string("*") ){
valarray<double> a(stk.top());
stk.pop();
stk.top() *= a;
} else if ( it->name == string("/") ){
valarray<double> a(stk.top());
stk.pop();
stk.top() /= a;
}
it++;
}
if (stk.size() != 1 ) {
cout << "ERROR: Expression RPN calculation error." << endl;
return -1;
} else {
vte = stk.top();
//Normalised cross section
if (_normalised)
{
double integral = 0;
for (int bin = 0; bin < _binFlags.size(); bin++)
if (!(vte[bin] != vte[bin])) //protection against nan
integral += (_dsBins.at(1).at(bin) - _dsBins.at(0).at(bin)) * vte[bin];
if (integral != 0)
for (int bin = 0; bin < _binFlags.size(); bin++)
vte[bin] /= integral;
}
//vte /= _units;
}
}
int
TheorEval::getGridValues()
{
map<CommonGrid*, valarray<double>*>::iterator itm;
for(itm = _mapGridToken.begin(); itm != _mapGridToken.end(); itm++){
CommonGrid* g = itm->first;
vector<double> xs;
std::vector< std::vector<double> > result = g->vconvolute(_iOrd, _xmur, _xmuf);
for(int i = 0; i < result.size(); i++)
for(int j = 0; j < result[i].size(); j++)
xs.push_back(result[i][j]);
(itm->second)->resize(xs.size());
*(itm->second) = valarray<double>(xs.data(), xs.size());
/*
for (int i = 0; i<xs.size(); i++){
cout << xs[i] << endl;
}
*/
}
}
int
TheorEval::getNbins()
{
return _dsBins[0].size();
}
void TheorEval::ChangeTheorySource(string term, string source)
{
vector<string>::iterator found_term = find(_termNames.begin(), _termNames.end(), term);
if ( found_term == _termNames.end())
{
string msg = (string) "S: Undeclared term " + term;
hf_errlog_(14020603, msg.c_str(), msg.size());
}
int iterm = int(found_term-_termNames.begin());
// cout << "switch " << _termSources[iterm] << " to " << source << endl;
_termSources[iterm] = source;
//delete old applgrid
map<CommonGrid*, valarray<double>* >::iterator itm = _mapGridToken.begin();
for (; itm!= _mapGridToken.end(); itm++)
{
if (itm->second == _mapInitdTerms[term])
{
delete itm->first;
_mapGridToken.erase(itm);
break;
}
}
initTerm(int(found_term-_termNames.begin()), _mapInitdTerms[term]);
}
string TheorEval::GetTheorySource(string term)
{
vector<string>::iterator found_term = find(_termNames.begin(), _termNames.end(), term);
if ( found_term == _termNames.end())
{
string msg = (string) "S: Undeclared term " + term;
hf_errlog_(14020603, msg.c_str(), msg.size());
}
int iterm = int(found_term-_termNames.begin());
return _termSources[iterm];
}<|fim▁end|> | break;
}
} while (1);
double dterm = atof(term.c_str()); |
<|file_name|>boss_felmyst.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Boss_Felmyst
SD%Complete: 0
SDComment:
EndScriptData */
#include "ScriptMgr.h"
#include "CellImpl.h"
#include "GridNotifiersImpl.h"
#include "InstanceScript.h"
#include "MotionMaster.h"
#include "ObjectAccessor.h"
#include "ScriptedCreature.h"
#include "sunwell_plateau.h"
#include "TemporarySummon.h"
enum Yells
{
YELL_BIRTH = 0,
YELL_KILL = 1,
YELL_BREATH = 2,
YELL_TAKEOFF = 3,
YELL_BERSERK = 4,
YELL_DEATH = 5,
//YELL_KALECGOS = 6, Not used. After felmyst's death spawned and say this
};
enum Spells
{
//Aura
AURA_SUNWELL_RADIANCE = 45769,
AURA_NOXIOUS_FUMES = 47002,
//Land phase
SPELL_CLEAVE = 19983,
SPELL_CORROSION = 45866,
SPELL_GAS_NOVA = 45855,
SPELL_ENCAPSULATE_CHANNEL = 45661,
// SPELL_ENCAPSULATE_EFFECT = 45665,
// SPELL_ENCAPSULATE_AOE = 45662,
//Flight phase
SPELL_VAPOR_SELECT = 45391, // fel to player, force cast 45392, 50000y selete target
SPELL_VAPOR_SUMMON = 45392, // player summon vapor, radius around caster, 5y,
SPELL_VAPOR_FORCE = 45388, // vapor to fel, force cast 45389
SPELL_VAPOR_CHANNEL = 45389, // fel to vapor, green beam channel
SPELL_VAPOR_TRIGGER = 45411, // linked to 45389, vapor to self, trigger 45410 and 46931
SPELL_VAPOR_DAMAGE = 46931, // vapor damage, 4000
SPELL_TRAIL_SUMMON = 45410, // vapor summon trail
SPELL_TRAIL_TRIGGER = 45399, // trail to self, trigger 45402
SPELL_TRAIL_DAMAGE = 45402, // trail damage, 2000 + 2000 dot
SPELL_DEAD_SUMMON = 45400, // summon blazing dead, 5min
SPELL_DEAD_PASSIVE = 45415,
SPELL_FOG_BREATH = 45495, // fel to self, speed burst
SPELL_FOG_TRIGGER = 45582, // fog to self, trigger 45782
SPELL_FOG_FORCE = 45782, // fog to player, force cast 45714
SPELL_FOG_INFORM = 45714, // player let fel cast 45717, script effect
SPELL_FOG_CHARM = 45717, // fel to player
SPELL_FOG_CHARM2 = 45726, // link to 45717
SPELL_TRANSFORM_TRIGGER = 44885, // madrigosa to self, trigger 46350
SPELL_TRANSFORM_VISUAL = 46350, // 46411stun?
SPELL_TRANSFORM_FELMYST = 45068, // become fel
SPELL_FELMYST_SUMMON = 45069,
//Other
SPELL_BERSERK = 45078,
SPELL_CLOUD_VISUAL = 45212,
SPELL_CLOUD_SUMMON = 45884
};
enum PhaseFelmyst
{
PHASE_NONE,
PHASE_GROUND,
PHASE_FLIGHT
};
enum EventFelmyst
{
EVENT_NONE,
EVENT_BERSERK,
EVENT_CLEAVE,
EVENT_CORROSION,
EVENT_GAS_NOVA,
EVENT_ENCAPSULATE,
EVENT_FLIGHT,
EVENT_FLIGHT_SEQUENCE,
EVENT_SUMMON_DEAD,
EVENT_SUMMON_FOG
};
class boss_felmyst : public CreatureScript
{
public:
boss_felmyst() : CreatureScript("boss_felmyst") { }
struct boss_felmystAI : public ScriptedAI
{
boss_felmystAI(Creature* creature) : ScriptedAI(creature)
{
Initialize();
instance = creature->GetInstanceScript();
uiBreathCount = 0;
breathX = 0.f;
breathY = 0.f;
}
void Initialize()
{
phase = PHASE_NONE;
uiFlightCount = 0;
}
InstanceScript* instance;
PhaseFelmyst phase;
EventMap events;
uint32 uiFlightCount;
uint32 uiBreathCount;
float breathX, breathY;
void Reset() override
{
Initialize();
events.Reset();
me->SetDisableGravity(true);
me->SetFloatValue(UNIT_FIELD_BOUNDINGRADIUS, 10);
me->SetFloatValue(UNIT_FIELD_COMBATREACH, 10);
DespawnSummons(NPC_VAPOR_TRAIL);
me->setActive(false);
instance->SetBossState(DATA_FELMYST, NOT_STARTED);
}
void EnterCombat(Unit* /*who*/) override
{
events.ScheduleEvent(EVENT_BERSERK, 600000);
me->setActive(true);
DoZoneInCombat();
DoCast(me, AURA_SUNWELL_RADIANCE, true);
DoCast(me, AURA_NOXIOUS_FUMES, true);
EnterPhase(PHASE_GROUND);
instance->SetBossState(DATA_FELMYST, IN_PROGRESS);
}
void AttackStart(Unit* who) override
{
if (phase != PHASE_FLIGHT)
ScriptedAI::AttackStart(who);
}
void MoveInLineOfSight(Unit* who) override
{
if (phase != PHASE_FLIGHT)
ScriptedAI::MoveInLineOfSight(who);
}
void KilledUnit(Unit* /*victim*/) override
{
Talk(YELL_KILL);
}
void JustRespawned() override
{
Talk(YELL_BIRTH);
}
void JustDied(Unit* /*killer*/) override
{
Talk(YELL_DEATH);
instance->SetBossState(DATA_FELMYST, DONE);
}
void SpellHit(Unit* caster, SpellInfo const* spell) override
{
// workaround for linked aura
/*if (spell->Id == SPELL_VAPOR_FORCE)
{
caster->CastSpell(caster, SPELL_VAPOR_TRIGGER, true);
}*/
// workaround for mind control
if (spell->Id == SPELL_FOG_INFORM)
{
float x, y, z;
caster->GetPosition(x, y, z);
if (Unit* summon = me->SummonCreature(NPC_DEAD, x, y, z, 0, TEMPSUMMON_TIMED_DESPAWN_OUT_OF_COMBAT, 5000))
{
summon->SetMaxHealth(caster->GetMaxHealth());
summon->SetHealth(caster->GetMaxHealth());
summon->CastSpell(summon, SPELL_FOG_CHARM, true);
summon->CastSpell(summon, SPELL_FOG_CHARM2, true);
}
me->DealDamage(caster, caster->GetHealth(), nullptr, DIRECT_DAMAGE, SPELL_SCHOOL_MASK_NORMAL, nullptr, false);
}
}
void JustSummoned(Creature* summon) override
{
if (summon->GetEntry() == NPC_DEAD)
{
summon->AI()->AttackStart(SelectTarget(SELECT_TARGET_RANDOM));
DoZoneInCombat(summon);
summon->CastSpell(summon, SPELL_DEAD_PASSIVE, true);
}
}
void MovementInform(uint32, uint32) override
{
if (phase == PHASE_FLIGHT)
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 1);
}
void DamageTaken(Unit*, uint32 &damage) override
{
if (phase != PHASE_GROUND && damage >= me->GetHealth())
damage = 0;
}
void EnterPhase(PhaseFelmyst NextPhase)
{
switch (NextPhase)
{
case PHASE_GROUND:
me->CastStop(SPELL_FOG_BREATH);
me->RemoveAurasDueToSpell(SPELL_FOG_BREATH);
me->StopMoving();
me->SetSpeedRate(MOVE_RUN, 2.0f);
events.ScheduleEvent(EVENT_CLEAVE, urand(5000, 10000));
events.ScheduleEvent(EVENT_CORROSION, urand(10000, 20000));
events.ScheduleEvent(EVENT_GAS_NOVA, urand(15000, 20000));
events.ScheduleEvent(EVENT_ENCAPSULATE, urand(20000, 25000));
events.ScheduleEvent(EVENT_FLIGHT, 60000);
break;
case PHASE_FLIGHT:
me->SetDisableGravity(true);
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 1000);
uiFlightCount = 0;
uiBreathCount = 0;
break;
default:
break;
}
phase = NextPhase;
}
void HandleFlightSequence()
{
switch (uiFlightCount)
{
case 0:
//me->AttackStop();
me->GetMotionMaster()->Clear(false);
me->HandleEmoteCommand(EMOTE_ONESHOT_LIFTOFF);
me->StopMoving();
Talk(YELL_TAKEOFF);
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 2000);
break;
case 1:
me->GetMotionMaster()->MovePoint(0, me->GetPositionX()+1, me->GetPositionY(), me->GetPositionZ()+10);
break;
case 2:
{
Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 150, true);
if (!target)
target = ObjectAccessor::GetUnit(*me, instance->GetGuidData(DATA_PLAYER_GUID));
if (!target)
{
EnterEvadeMode();
return;
}
if (Creature* Vapor = me->SummonCreature(NPC_VAPOR, target->GetPositionX() - 5 + rand32() % 10, target->GetPositionY() - 5 + rand32() % 10, target->GetPositionZ(), 0, TEMPSUMMON_TIMED_DESPAWN, 9000))
{
Vapor->AI()->AttackStart(target);
me->InterruptNonMeleeSpells(false);
DoCast(Vapor, SPELL_VAPOR_CHANNEL, false); // core bug
Vapor->CastSpell(Vapor, SPELL_VAPOR_TRIGGER, true);
}
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 10000);
break;
}
case 3:
{
DespawnSummons(NPC_VAPOR_TRAIL);
//DoCast(me, SPELL_VAPOR_SELECT); need core support
Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 150, true);
if (!target)
target = ObjectAccessor::GetUnit(*me, instance->GetGuidData(DATA_PLAYER_GUID));
if (!target)
{
EnterEvadeMode();
return;
}
//target->CastSpell(target, SPELL_VAPOR_SUMMON, true); need core support
if (Creature* pVapor = me->SummonCreature(NPC_VAPOR, target->GetPositionX() - 5 + rand32() % 10, target->GetPositionY() - 5 + rand32() % 10, target->GetPositionZ(), 0, TEMPSUMMON_TIMED_DESPAWN, 9000))
{
if (pVapor->AI())
pVapor->AI()->AttackStart(target);
me->InterruptNonMeleeSpells(false);
DoCast(pVapor, SPELL_VAPOR_CHANNEL, false); // core bug
pVapor->CastSpell(pVapor, SPELL_VAPOR_TRIGGER, true);
}
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 10000);
break;
}
case 4:
DespawnSummons(NPC_VAPOR_TRAIL);
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 1);
break;
case 5:
{
Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 150, true);
if (!target)
target = ObjectAccessor::GetUnit(*me, instance->GetGuidData(DATA_PLAYER_GUID));
if (!target)
{
EnterEvadeMode();
return;
}
breathX = target->GetPositionX();
breathY = target->GetPositionY();
float x, y, z;
target->GetContactPoint(me, x, y, z, 70);
me->GetMotionMaster()->MovePoint(0, x, y, z+10);
break;
}
case 6:
me->SetFacingTo(me->GetAngle(breathX, breathY));
//DoTextEmote("takes a deep breath.", nullptr);
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 10000);
break;
case 7:
{
DoCast(me, SPELL_FOG_BREATH, true);
float x, y, z;
me->GetPosition(x, y, z);
x = 2 * breathX - x;
y = 2 * breathY - y;
me->GetMotionMaster()->MovePoint(0, x, y, z);
events.ScheduleEvent(EVENT_SUMMON_FOG, 1);
break;
}
case 8:
me->CastStop(SPELL_FOG_BREATH);
me->RemoveAurasDueToSpell(SPELL_FOG_BREATH);
++uiBreathCount;
events.ScheduleEvent(EVENT_FLIGHT_SEQUENCE, 1);
if (uiBreathCount < 3)
uiFlightCount = 4;
break;
case 9:
if (Unit* target = SelectTarget(SELECT_TARGET_MAXTHREAT))
DoStartMovement(target);
else
{
EnterEvadeMode();
return;
}
break;
case 10:
me->SetDisableGravity(false);
me->HandleEmoteCommand(EMOTE_ONESHOT_LAND);
EnterPhase(PHASE_GROUND);
AttackStart(SelectTarget(SELECT_TARGET_MAXTHREAT));
break;
}
++uiFlightCount;
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
{
if (phase == PHASE_FLIGHT && !me->IsInEvadeMode())
EnterEvadeMode();
return;
}
events.Update(diff);
if (me->IsNonMeleeSpellCast(false))
return;
if (phase == PHASE_GROUND)
{
switch (events.ExecuteEvent())
{
case EVENT_BERSERK:
Talk(YELL_BERSERK);
DoCast(me, SPELL_BERSERK, true);
events.ScheduleEvent(EVENT_BERSERK, 10000);
break;
case EVENT_CLEAVE:
DoCastVictim(SPELL_CLEAVE, false);
events.ScheduleEvent(EVENT_CLEAVE, urand(5000, 10000));
break;
case EVENT_CORROSION:
DoCastVictim(SPELL_CORROSION, false);
events.ScheduleEvent(EVENT_CORROSION, urand(20000, 30000));
break;
case EVENT_GAS_NOVA:
DoCast(me, SPELL_GAS_NOVA, false);
events.ScheduleEvent(EVENT_GAS_NOVA, urand(20000, 25000));
break;
case EVENT_ENCAPSULATE:
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 150, true))
DoCast(target, SPELL_ENCAPSULATE_CHANNEL, false);
events.ScheduleEvent(EVENT_ENCAPSULATE, urand(25000, 30000));
break;
case EVENT_FLIGHT:
EnterPhase(PHASE_FLIGHT);
break;
default:
DoMeleeAttackIfReady();
break;
}
}
if (phase == PHASE_FLIGHT)
{
switch (events.ExecuteEvent())
{
case EVENT_BERSERK:
Talk(YELL_BERSERK);
DoCast(me, SPELL_BERSERK, true);
break;
case EVENT_FLIGHT_SEQUENCE:
HandleFlightSequence();
break;
case EVENT_SUMMON_FOG:
{
float x, y, z;
me->GetPosition(x, y, z);
me->UpdateGroundPositionZ(x, y, z);
if (Creature* Fog = me->SummonCreature(NPC_VAPOR_TRAIL, x, y, z, 0, TEMPSUMMON_TIMED_DESPAWN, 10000))
{
Fog->RemoveAurasDueToSpell(SPELL_TRAIL_TRIGGER);
Fog->CastSpell(Fog, SPELL_FOG_TRIGGER, true);
me->CastSpell(Fog, SPELL_FOG_FORCE, true);
}
}
events.ScheduleEvent(EVENT_SUMMON_FOG, 1000);
break;
}
}
}
void DespawnSummons(uint32 entry)
{
std::list<Creature*> templist;
float x, y, z;
me->GetPosition(x, y, z);
Trinity::AllCreaturesOfEntryInRange check(me, entry, 100);
Trinity::CreatureListSearcher<Trinity::AllCreaturesOfEntryInRange> searcher(me, templist, check);
Cell::VisitGridObjects(me, searcher, me->GetGridActivationRange());
for (std::list<Creature*>::const_iterator i = templist.begin(); i != templist.end(); ++i)
{
if (entry == NPC_VAPOR_TRAIL && phase == PHASE_FLIGHT)
{
(*i)->GetPosition(x, y, z);
me->SummonCreature(NPC_DEAD, x, y, z, 0, TEMPSUMMON_TIMED_DESPAWN_OUT_OF_COMBAT, 5000);
}
(*i)->SetVisible(false);
(*i)->DespawnOrUnsummon();
}
}
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetSunwellPlateauAI<boss_felmystAI>(creature);
}
};
class npc_felmyst_vapor : public CreatureScript
{
public:
npc_felmyst_vapor() : CreatureScript("npc_felmyst_vapor") { }
CreatureAI* GetAI(Creature* creature) const override
{
return GetSunwellPlateauAI<npc_felmyst_vaporAI>(creature);
}
struct npc_felmyst_vaporAI : public ScriptedAI
{
npc_felmyst_vaporAI(Creature* creature) : ScriptedAI(creature)
{
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->SetSpeedRate(MOVE_RUN, 0.8f);
}
void Reset() override { }
void EnterCombat(Unit* /*who*/) override
{
DoZoneInCombat();
//DoCast(me, SPELL_VAPOR_FORCE, true); core bug<|fim▁hole|> if (!me->GetVictim())
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 100, true))
AttackStart(target);
}
};
};
class npc_felmyst_trail : public CreatureScript
{
public:
npc_felmyst_trail() : CreatureScript("npc_felmyst_trail") { }
CreatureAI* GetAI(Creature* creature) const override
{
return GetSunwellPlateauAI<npc_felmyst_trailAI>(creature);
}
struct npc_felmyst_trailAI : public ScriptedAI
{
npc_felmyst_trailAI(Creature* creature) : ScriptedAI(creature)
{
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
DoCast(me, SPELL_TRAIL_TRIGGER, true);
me->SetTarget(me->GetGUID());
me->SetFloatValue(UNIT_FIELD_BOUNDINGRADIUS, 0.01f); // core bug
}
void Reset() override { }
void EnterCombat(Unit* /*who*/) override { }
void AttackStart(Unit* /*who*/) override { }
void MoveInLineOfSight(Unit* /*who*/) override { }
void UpdateAI(uint32 /*diff*/) override { }
};
};
void AddSC_boss_felmyst()
{
new boss_felmyst();
new npc_felmyst_vapor();
new npc_felmyst_trail();
}<|fim▁end|> | }
void UpdateAI(uint32 /*diff*/) override
{ |
<|file_name|>connect.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use std::collections::BTreeSet;
use std::io;
use std::time::Duration;
use std::net::SocketAddr;
use futures::{Future, Poll, Async};
use tokio_core::reactor::Handle;
use tokio_core::net::{TcpStream, TcpStreamNew};
use key_server_cluster::{Error, NodeId, NodeKeyPair};
use key_server_cluster::io::{handshake, Handshake, Deadline, deadline};
use key_server_cluster::net::Connection;
/// Create future for connecting to other node.
pub fn connect(address: &SocketAddr, handle: &Handle, self_key_pair: Arc<NodeKeyPair>, trusted_nodes: BTreeSet<NodeId>) -> Deadline<Connect> {
let connect = Connect {
state: ConnectState::TcpConnect(TcpStream::connect(address, handle)),
address: address.clone(),
self_key_pair: self_key_pair,
trusted_nodes: trusted_nodes,
};
deadline(Duration::new(5, 0), handle, connect).expect("Failed to create timeout")
}
enum ConnectState {
TcpConnect(TcpStreamNew),
Handshake(Handshake<TcpStream>),
Connected,
}
/// Future for connecting to other node.
pub struct Connect {
state: ConnectState,
address: SocketAddr,
self_key_pair: Arc<NodeKeyPair>,
trusted_nodes: BTreeSet<NodeId>,
}
impl Future for Connect {
type Item = Result<Connection, Error>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let (next, result) = match self.state {
ConnectState::TcpConnect(ref mut future) => {
let stream = try_ready!(future.poll());
let handshake = handshake(stream, self.self_key_pair.clone(), self.trusted_nodes.clone());
(ConnectState::Handshake(handshake), Async::NotReady)
},
ConnectState::Handshake(ref mut future) => {
let (stream, result) = try_ready!(future.poll());
let result = match result {
Ok(result) => result,
Err(err) => return Ok(Async::Ready(Err(err))),
};
let connection = Connection {
stream: stream.into(),
address: self.address,
node_id: result.node_id,
key: result.shared_key,
};
(ConnectState::Connected, Async::Ready(Ok(connection)))
},
ConnectState::Connected => panic!("poll Connect after it's done"),
};
self.state = next;
match result {
// by polling again, we register new future
Async::NotReady => self.poll(),<|fim▁hole|> }
}
}<|fim▁end|> | result => Ok(result) |
<|file_name|>example.py<|end_file_name|><|fim▁begin|>from Robinhood import Robinhood
#Setup
my_trader = Robinhood(username="YOUR_USERNAME", password="YOUR_PASSWORD");
#Get stock information
#Note: Sometimes more than one instrument may be returned for a given stock symbol
stock_instrument = my_trader.instruments("GEVO")[0]
#Get a stock's quote
my_trader.print_quote("AAPL")
#Prompt for a symbol
my_trader.print_quote();
#Print multiple symbols
my_trader.print_quotes(stocks=["BBRY", "FB", "MSFT"])
#View all data for a given stock ie. Ask price and size, bid price and size, previous close, adjusted previous close, etc.
quote_info = my_trader.quote_data("GEVO")<|fim▁hole|>
#Place a sell order
sell_order = my_trader.place_sell_order(stock_instrument, 1)<|fim▁end|> | print(quote_info);
#Place a buy order (uses market bid price)
buy_order = my_trader.place_buy_order(stock_instrument, 1) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
use std::fs;
use std::path::PathBuf;
mod directory_files;
use directory_files::*;
mod file_comparable;
mod directory_comparable;
use directory_comparable::*;
/// The Docopt usage string
const USAGE: &'static str = "
Usage: subset [-q | -v] [-t | -n] [-b] <dir1> <dir2>
subset --help
subset lets you compare two directory structures.
We are going to check whether the files in dir1 are a subset of the files in dir2, regardless of directory structure.
We are going to check to see that every file under the directory structure in dir1 must be present somewhere in the dir2 directory structure, regardless of where in the directory structure or definitions of equality.
There are multiple definitions of file equality that you can specify using flags, but the default is a MD5 hash of the contents of the file. It is conceivable that you can define a custom equality strategy that relies on other parameters, such as file name, subdirectory location, metadata, EXIF data, etc. The possibilities are endless.
Common options:
-h, --help Show this usage message.
-q, --quiet Do not print all mappings.
-v, --verbose Print all mappings.
-t, --trivial Will swap out the MD5 comparison for a trivial comparison (everything is equal). (This is to test extensibility.)
-n, --name Will swap out the MD5 comparison for a filename comparison.
-b, --bidirectional Also check whether dir2 is also a subset of dir1 (essentially, set equality) and print out missing lists for both directories.
";
// We should think about moving away from DocOpt soon since it uses RustcDecodable,
// which is deprecated in favor of serde?
/// Parsing comand line arguments here
#[derive(Debug, Deserialize)]
struct Args {
arg_dir1: String,
arg_dir2: String,
flag_quiet: bool,
flag_verbose: bool,
flag_trivial: bool,
flag_name: bool,
flag_bidirectional: bool,
}
/// This should be the UI layer as much as possible-- it parses the command line arguments,
/// hands it off to our business logic, and then collects the answers back and print them.
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("Comparing {} with {}", args.arg_dir1, args.arg_dir2);
// Make sure both of our inputs are valid directories
fs::read_dir(&args.arg_dir1).expect("Directory cannot be read!");
fs::read_dir(&args.arg_dir2).expect("Directory cannot be read!");
// Main logic: using dynamic dispatch
// (I don't feel too bad about boxing here because this is essentially a singleton.)
let mut program: Box<DirectoryComparable> = if args.flag_trivial {
Box::new(TrivialDirectoryComparable {})
} else if args.flag_name {
let filename_comparator = file_comparable::FileNameComparable::new();
Box::new(DirectoryComparableWithFileComparable::new(
filename_comparator,
))
} else {
let md5_comparator = file_comparable::Md5Comparable::new();
Box::new(DirectoryComparableWithFileComparable::new(md5_comparator))
};
let superset_dirpath = PathBuf::from(&args.arg_dir2);
// eww... why do we have to coerce these Box types again?
// (again, only two of these Box types in existence so not so bad...)
let mut superset_iter: Box<Iterator<Item = PathBuf>> =
Box::new(DirectoryFiles::new(&superset_dirpath));
let subset_dirpath = PathBuf::from(&args.arg_dir1);
let mut subset_iter: Box<Iterator<Item = PathBuf>> =
Box::new(DirectoryFiles::new(&subset_dirpath)); // mut needed for .by_ref
if args.flag_bidirectional {
// Run program
let (subset_missing_result, superset_missing_result) =
program.report_missing_bidirectional(&mut subset_iter, &mut superset_iter);
// View layer (printing)
for missing_file in subset_missing_result.iter() {
println!(
"Could not find {} in {}",
missing_file.display(),
superset_dirpath.display()
);
}
println!(
"\nWe are missing {} files in {}\n",
subset_missing_result.len(),
superset_dirpath.display()
);
for missing_file in superset_missing_result.iter() {<|fim▁hole|> subset_dirpath.display()
);
}
println!(
"\nWe are missing {} files in {}",
superset_missing_result.len(),
subset_dirpath.display()
);
} else {
// Run program
let result = program.report_missing(&mut subset_iter, &mut superset_iter);
// View layer (printing)
for missing_file in result.iter() {
println!(
"Could not find {} in {}",
missing_file.display(),
superset_dirpath.display()
);
}
println!(
"\nWe are missing {} files in {}",
result.len(),
superset_dirpath.display()
);
}
}<|fim▁end|> | println!(
"Could not find {} in {}",
missing_file.display(), |
<|file_name|>font_context.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::{Font, FontDescriptor, FontGroup, FontHandleMethods, FontStyle,
SelectorPlatformIdentifier};
use font::{SpecifiedFontStyle, UsedFontStyle};
use font_list::FontList;
use servo_util::cache::{Cache, LRUCache};
use servo_util::time::ProfilerChan;
<|fim▁hole|>
use azure::azure_hl::BackendType;
use std::hashmap::HashMap;
// TODO(Rust #3934): creating lots of new dummy styles is a workaround
// for not being able to store symbolic enums in top-level constants.
pub fn dummy_style() -> FontStyle {
use font::FontWeight300;
return FontStyle {
pt_size: 20.0,
weight: FontWeight300,
italic: false,
oblique: false,
families: ~"serif, sans-serif",
}
}
pub trait FontContextHandleMethods {
fn clone(&self) -> FontContextHandle;
fn create_font_from_identifier(&self, ~str, UsedFontStyle) -> Result<FontHandle, ()>;
}
pub struct FontContext {
instance_cache: LRUCache<FontDescriptor, @mut Font>,
font_list: Option<FontList>, // only needed by layout
group_cache: LRUCache<SpecifiedFontStyle, @FontGroup>,
handle: FontContextHandle,
backend: BackendType,
generic_fonts: HashMap<~str,~str>,
profiler_chan: ProfilerChan,
}
impl<'self> FontContext {
pub fn new(backend: BackendType,
needs_font_list: bool,
profiler_chan: ProfilerChan)
-> FontContext {
let handle = FontContextHandle::new();
let font_list = if needs_font_list {
Some(FontList::new(&handle, profiler_chan.clone())) }
else { None };
// TODO: Allow users to specify these.
let mut generic_fonts = HashMap::with_capacity(5);
generic_fonts.insert(~"serif", ~"Times New Roman");
generic_fonts.insert(~"sans-serif", ~"Arial");
generic_fonts.insert(~"cursive", ~"Apple Chancery");
generic_fonts.insert(~"fantasy", ~"Papyrus");
generic_fonts.insert(~"monospace", ~"Menlo");
FontContext {
instance_cache: LRUCache::new(10),
font_list: font_list,
group_cache: LRUCache::new(10),
handle: handle,
backend: backend,
generic_fonts: generic_fonts,
profiler_chan: profiler_chan,
}
}
fn get_font_list(&'self self) -> &'self FontList {
self.font_list.get_ref()
}
pub fn get_resolved_font_for_style(&mut self, style: &SpecifiedFontStyle) -> @FontGroup {
match self.group_cache.find(style) {
Some(fg) => {
debug!("font group cache hit");
fg
},
None => {
debug!("font group cache miss");
let fg = self.create_font_group(style);
self.group_cache.insert(style.clone(), fg);
fg
}
}
}
pub fn get_font_by_descriptor(&mut self, desc: &FontDescriptor) -> Result<@mut Font, ()> {
match self.instance_cache.find(desc) {
Some(f) => {
debug!("font cache hit");
Ok(f)
},
None => {
debug!("font cache miss");
let result = self.create_font_instance(desc);
match result {
Ok(font) => {
self.instance_cache.insert(desc.clone(), font);
}, _ => {}
};
result
}
}
}
fn transform_family(&self, family: &str) -> ~str {
// FIXME: Need a find_like() in HashMap.
let family = family.to_str();
debug!("(transform family) searching for `%s`", family);
match self.generic_fonts.find(&family) {
None => family,
Some(mapped_family) => (*mapped_family).clone()
}
}
fn create_font_group(&mut self, style: &SpecifiedFontStyle) -> @FontGroup {
let mut fonts = ~[];
debug!("(create font group) --- starting ---");
// TODO(Issue #193): make iteration over 'font-family' more robust.
for family in style.families.split_iter(',') {
let family_name = family.trim();
let transformed_family_name = self.transform_family(family_name);
debug!("(create font group) transformed family is `%s`", transformed_family_name);
let result = do self.font_list.and_then_ref |fl| {
fl.find_font_in_family(transformed_family_name, style)
};
let mut found = false;
for font_entry in result.iter() {
found = true;
let font_id =
SelectorPlatformIdentifier(font_entry.handle.face_identifier());
let font_desc = FontDescriptor::new((*style).clone(), font_id);
let instance = self.get_font_by_descriptor(&font_desc);
for font in instance.iter() { fonts.push(*font); }
};
if !found {
debug!("(create font group) didn't find `%s`", transformed_family_name);
}
}
let last_resort = FontList::get_last_resort_font_families();
for family in last_resort.iter() {
let result = do self.font_list.and_then_ref |fl| {
fl.find_font_in_family(*family, style)
};
for font_entry in result.iter() {
let font_id =
SelectorPlatformIdentifier(font_entry.handle.face_identifier());
let font_desc = FontDescriptor::new((*style).clone(), font_id);
let instance = self.get_font_by_descriptor(&font_desc);
for font in instance.iter() {
fonts.push(*font);
}
}
}
assert!(fonts.len() > 0);
// TODO(Issue #179): Split FontStyle into specified and used styles
let used_style = (*style).clone();
debug!("(create font group) --- finished ---");
@FontGroup::new(style.families.to_managed(), &used_style, fonts)
}
fn create_font_instance(&self, desc: &FontDescriptor) -> Result<@mut Font, ()> {
return match &desc.selector {
// TODO(Issue #174): implement by-platform-name font selectors.
&SelectorPlatformIdentifier(ref identifier) => {
let result_handle = self.handle.create_font_from_identifier((*identifier).clone(),
desc.style.clone());
do result_handle.and_then |handle| {
Ok(Font::new_from_adopted_handle(self,
handle,
&desc.style,
self.backend,
self.profiler_chan.clone()))
}
}
};
}
}<|fim▁end|> | use platform::font::FontHandle;
use platform::font_context::FontContextHandle; |
<|file_name|>test_metric_service_client_v3.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pytest
from google.api import metric_pb2 as api_metric_pb2
from google.api import monitored_resource_pb2
from google.cloud import monitoring_v3
from google.cloud.monitoring_v3 import enums
from google.cloud.monitoring_v3.proto import common_pb2
from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2
from google.cloud.monitoring_v3.proto import metric_service_pb2
from google.protobuf import empty_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self, method, request_serializer=None, response_deserializer=None):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestMetricServiceClient(object):
def test_list_monitored_resource_descriptors(self):
# Setup Expected Response
next_page_token = ""
resource_descriptors_element = {}
resource_descriptors = [resource_descriptors_element]
expected_response = {
"next_page_token": next_page_token,
"resource_descriptors": resource_descriptors,
}
expected_response = metric_service_pb2.ListMonitoredResourceDescriptorsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.project_path("[PROJECT]")
paged_list_response = client.list_monitored_resource_descriptors(name)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.resource_descriptors[0] == resources[0]
assert len(channel.requests) == 1
expected_request = metric_service_pb2.ListMonitoredResourceDescriptorsRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_monitored_resource_descriptors_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.project_path("[PROJECT]")
paged_list_response = client.list_monitored_resource_descriptors(name)
with pytest.raises(CustomException):
list(paged_list_response)
def test_get_monitored_resource_descriptor(self):
# Setup Expected Response
name_2 = "name2-1052831874"
type_ = "type3575610"
display_name = "displayName1615086568"
description = "description-1724546052"
expected_response = {
"name": name_2,
"type": type_,
"display_name": display_name,
"description": description,
}
expected_response = monitored_resource_pb2.MonitoredResourceDescriptor(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.monitored_resource_descriptor_path(
"[PROJECT]", "[MONITORED_RESOURCE_DESCRIPTOR]"
)
response = client.get_monitored_resource_descriptor(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = metric_service_pb2.GetMonitoredResourceDescriptorRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_monitored_resource_descriptor_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.monitored_resource_descriptor_path(
"[PROJECT]", "[MONITORED_RESOURCE_DESCRIPTOR]"
)
with pytest.raises(CustomException):
client.get_monitored_resource_descriptor(name)
def test_list_metric_descriptors(self):
# Setup Expected Response
next_page_token = ""
metric_descriptors_element = {}
metric_descriptors = [metric_descriptors_element]
expected_response = {
"next_page_token": next_page_token,
"metric_descriptors": metric_descriptors,
}
expected_response = metric_service_pb2.ListMetricDescriptorsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel<|fim▁hole|> # Setup Request
name = client.project_path("[PROJECT]")
paged_list_response = client.list_metric_descriptors(name)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.metric_descriptors[0] == resources[0]
assert len(channel.requests) == 1
expected_request = metric_service_pb2.ListMetricDescriptorsRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_metric_descriptors_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.project_path("[PROJECT]")
paged_list_response = client.list_metric_descriptors(name)
with pytest.raises(CustomException):
list(paged_list_response)
def test_get_metric_descriptor(self):
# Setup Expected Response
name_2 = "name2-1052831874"
type_ = "type3575610"
unit = "unit3594628"
description = "description-1724546052"
display_name = "displayName1615086568"
expected_response = {
"name": name_2,
"type": type_,
"unit": unit,
"description": description,
"display_name": display_name,
}
expected_response = api_metric_pb2.MetricDescriptor(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.metric_descriptor_path("[PROJECT]", "[METRIC_DESCRIPTOR]")
response = client.get_metric_descriptor(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = metric_service_pb2.GetMetricDescriptorRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_metric_descriptor_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.metric_descriptor_path("[PROJECT]", "[METRIC_DESCRIPTOR]")
with pytest.raises(CustomException):
client.get_metric_descriptor(name)
def test_create_metric_descriptor(self):
# Setup Expected Response
name_2 = "name2-1052831874"
type_ = "type3575610"
unit = "unit3594628"
description = "description-1724546052"
display_name = "displayName1615086568"
expected_response = {
"name": name_2,
"type": type_,
"unit": unit,
"description": description,
"display_name": display_name,
}
expected_response = api_metric_pb2.MetricDescriptor(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.project_path("[PROJECT]")
metric_descriptor = {}
response = client.create_metric_descriptor(name, metric_descriptor)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = metric_service_pb2.CreateMetricDescriptorRequest(
name=name, metric_descriptor=metric_descriptor
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_metric_descriptor_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.project_path("[PROJECT]")
metric_descriptor = {}
with pytest.raises(CustomException):
client.create_metric_descriptor(name, metric_descriptor)
def test_delete_metric_descriptor(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.metric_descriptor_path("[PROJECT]", "[METRIC_DESCRIPTOR]")
client.delete_metric_descriptor(name)
assert len(channel.requests) == 1
expected_request = metric_service_pb2.DeleteMetricDescriptorRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_metric_descriptor_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.metric_descriptor_path("[PROJECT]", "[METRIC_DESCRIPTOR]")
with pytest.raises(CustomException):
client.delete_metric_descriptor(name)
def test_list_time_series(self):
# Setup Expected Response
next_page_token = ""
time_series_element = {}
time_series = [time_series_element]
expected_response = {
"next_page_token": next_page_token,
"time_series": time_series,
}
expected_response = metric_service_pb2.ListTimeSeriesResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
interval = {}
view = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
paged_list_response = client.list_time_series(name, filter_, interval, view)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.time_series[0] == resources[0]
assert len(channel.requests) == 1
expected_request = metric_service_pb2.ListTimeSeriesRequest(
name=name, filter=filter_, interval=interval, view=view
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_time_series_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.project_path("[PROJECT]")
filter_ = "filter-1274492040"
interval = {}
view = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
paged_list_response = client.list_time_series(name, filter_, interval, view)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_time_series(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup Request
name = client.project_path("[PROJECT]")
time_series = []
client.create_time_series(name, time_series)
assert len(channel.requests) == 1
expected_request = metric_service_pb2.CreateTimeSeriesRequest(
name=name, time_series=time_series
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_time_series_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = monitoring_v3.MetricServiceClient()
# Setup request
name = client.project_path("[PROJECT]")
time_series = []
with pytest.raises(CustomException):
client.create_time_series(name, time_series)<|fim▁end|> | client = monitoring_v3.MetricServiceClient()
|
<|file_name|>ExFour03.java<|end_file_name|><|fim▁begin|>package com.shumz.think.ex004;
public class ExFour03 {
public ExFour03() {
System.out.println("An instance of ExFour03 was created...");
}
public static void main(String[] args) {
new ExFour03();
}<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>feed_parse_extractVodkatranslationsCom.py<|end_file_name|><|fim▁begin|>def extractVodkatranslationsCom(item):
'''
Parser for 'vodkatranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Ordinary I and Extraordinary Them', 'Ordinary I and Extraordinary Them', 'translated'),
('PRC', 'PRC', 'translated'),<|fim▁hole|> for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False<|fim▁end|> | ('Loiterous', 'Loiterous', 'oel'),
]
|
<|file_name|>TestAndOrSimple.java<|end_file_name|><|fim▁begin|>package com.globalforge.infix;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.globalforge.infix.api.InfixSimpleActions;
import com.google.common.collect.ListMultimap;
/*-
The MIT License (MIT)
Copyright (c) 2019-2020 Global Forge LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
public class TestAndOrSimple {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
private ListMultimap<String, String> getResults(String sampleRule) throws Exception {
InfixSimpleActions rules = new InfixSimpleActions(sampleRule);
String result = rules.transformFIXMsg(TestAndOrSimple.sampleMessage1);
return StaticTestingUtils.parseMessage(result);
}
@Test
public void t1() {
try {
String sampleRule = "&45==0 && &47==0 ? &50=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("50").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t2() {
try {
String sampleRule = "&45==1 && &47==0 ? &50=1 : &50=2";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("2", resultStore.get("50").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t3() {
try {
String sampleRule = "&45!=1 && &47==0 ? &50=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("50").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t4() {
try {
String sampleRule = "&45==0 && &47 != 1 ? &50=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("50").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t9() {
try {
String sampleRule = "&45==0 && &47==0 && &48==1.5 ? &45=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t10() {
try {
String sampleRule = "&45==1 && &47==0 && &48==1.5 ? &45=1 : &47=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("1", resultStore.get("47").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t11() {
try {
String sampleRule = "&45==0 && &47==1 && &48==1.5 ? &45=1 : &47=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("1", resultStore.get("47").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t12() {
try {
String sampleRule = "&45==0 && &47==0 && &48==1.6 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t13() {
try {
String sampleRule = "&45==0 || &47==0 && &48==1.6 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t14() {
try {
String sampleRule = "&45==0 && &47==0 || &48==1.6 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t15() {
try {
String sampleRule = "&45==0 || &47==0 && &48==1.6 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t16() {
try {
String sampleRule = "(&45==0 || &47==0) && (&48==1.6) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));<|fim▁hole|> Assert.fail();
}
}
@Test
public void t17() {
try {
String sampleRule = "&45==0 || (&47==0 && &48==1.6) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("0", resultStore.get("47").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t18() {
try {
String sampleRule = "^&45 && ^&47 && ^&48 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t19() {
try {
String sampleRule = "^&45 && ^&47 && ^&50 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t20() {
try {
String sampleRule = "^&45 || ^&47 || ^&50 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t21() {
try {
String sampleRule = "!&50 && !&51 && !&52 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t22() {
try {
String sampleRule = "^&45 || !&51 && !&52 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t23() {
try {
String sampleRule = "(^&45 || !&51) && !&52 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t24() {
try {
String sampleRule = "^&45 || (!&51 && !&52) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t25() {
try {
String sampleRule = "!&50 || !&45 && !&52 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t26() {
try {
String sampleRule = "(!&50 || !&45) && !&52 ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t27() {
try {
String sampleRule = "!&50 || (!&45 && !&52) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t28() {
try {
String sampleRule = "!&55 && (!&54 && (!&53 && (!&47 && !&52))) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t29() {
try {
String sampleRule = "!&55 && (!&54 && (!&53 && (!&56 && !&52))) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t30() {
try {
String sampleRule = "(!&55 || (!&54 || (!&53 || (!&52 && !&47)))) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t31() {
try {
String sampleRule = "((((!&55 || !&54) || !&53) || !&52) && !&47) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("0", resultStore.get("45").get(0));
Assert.assertEquals("1", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t32() {
try {
String sampleRule = "(&382[1]->&655!=\"tarz\" || (&382[0]->&655==\"fubi\" "
+ "|| (&382[1]->&375==3 || (&382 >= 2 || (&45 > -1 || (&48 <=1.5 && &47 < 0.0001)))))) ? &45=1 : &48=1";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("45").get(0));
Assert.assertEquals("1.5", resultStore.get("48").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t34() {
try {
// left to right
String sampleRule = "&45 == 0 || &43 == -100 && &207 == \"USA\" ? &43=1 : &43=2";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("2", resultStore.get("43").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void t35() {
try {
String sampleRule = "&45 == 0 || (&43 == -100 && &207 == \"USA\") ? &43=1 : &43=2";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("1", resultStore.get("43").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
static final String sampleMessage1 = "8=FIX.4.4" + '\u0001' + "9=1000" + '\u0001' + "35=8"
+ '\u0001' + "44=3.142" + '\u0001' + "60=20130412-19:30:00.686" + '\u0001' + "75=20130412"
+ '\u0001' + "45=0" + '\u0001' + "47=0" + '\u0001' + "48=1.5" + '\u0001' + "49=8dhosb"
+ '\u0001' + "382=2" + '\u0001' + "375=1.5" + '\u0001' + "655=fubi" + '\u0001' + "375=3"
+ '\u0001' + "655=yubl" + '\u0001' + "10=004";
@Test
public void t36() {
try {
// 45=0,
String sampleRule = "(&45 == 0 || &43 == -100) && &207 == \"USA\" ? &43=1 : &43=2";
ListMultimap<String, String> resultStore = getResults(sampleRule);
Assert.assertEquals("2", resultStore.get("43").get(0));
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
}<|fim▁end|> | } catch (Exception e) {
e.printStackTrace(); |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)]
#![feature(core_intrinsics)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(optin_builtin_traits)]
#![feature(plugin)]
#![feature(panic_handler)]
#![feature(reflect_marker)]
#![feature(step_by)]
#![plugin(heapsize_plugin, plugins, serde_macros)]
#![deny(unsafe_code)]
extern crate app_units;
extern crate backtrace;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate bitflags;
extern crate deque;
extern crate euclid;
extern crate getopts;
extern crate heapsize;
extern crate ipc_channel;
#[cfg(feature = "non-geckolib")]
extern crate js;
#[allow(unused_extern_crates)]
#[macro_use]
extern crate lazy_static;
extern crate libc;
#[macro_use]
extern crate log;
extern crate num;
extern crate num_cpus;
extern crate rand;
extern crate rustc_serialize;
extern crate serde;
extern crate smallvec;
extern crate string_cache;
extern crate url;
use std::sync::Arc;
pub mod cache;
#[allow(unsafe_code)]
pub mod debug_utils;
pub mod geometry;
#[allow(unsafe_code)]
pub mod ipc;
pub mod linked_list;
#[cfg(feature = "non-geckolib")]
#[allow(unsafe_code)]
pub mod non_geckolib;
#[allow(unsafe_code)]
pub mod opts;
pub mod panicking;
#[allow(unsafe_code)]
pub mod prefs;
pub mod print_tree;
#[allow(unsafe_code)]
pub mod resource_files;
#[allow(unsafe_code)]
pub mod str;
pub mod thread;
pub mod thread_state;
pub mod tid;<|fim▁hole|>#[allow(unsafe_code)]
pub mod workqueue;
#[allow(unsafe_code)]
pub fn breakpoint() {
unsafe { ::std::intrinsics::breakpoint() };
}
// Workaround for lack of `ptr_eq` on Arcs...
#[inline]
pub fn arc_ptr_eq<T: 'static + Send + Sync>(a: &Arc<T>, b: &Arc<T>) -> bool {
let a: &T = &**a;
let b: &T = &**b;
(a as *const T) == (b as *const T)
}<|fim▁end|> | pub mod time;
pub mod vec; |
<|file_name|>forInStatement2.ts<|end_file_name|><|fim▁begin|>var expr: number;
for (var a in expr) {<|fim▁hole|><|fim▁end|> | } |
<|file_name|>BeanTypeTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import com.oracle.java.testlibrary.Asserts;
import java.lang.management.MemoryType;
import sun.hotspot.code.BlobType;
/**
* @test BeanTypeTest
* @library /testlibrary /../../test/lib
* @modules java.management
* @build BeanTypeTest<|fim▁hole|> * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions
* -XX:+WhiteBoxAPI -XX:+SegmentedCodeCache BeanTypeTest
* @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions
* -XX:+WhiteBoxAPI -XX:-SegmentedCodeCache BeanTypeTest
* @summary verify types of code cache memory pool bean
*/
public class BeanTypeTest {
public static void main(String args[]) {
for (BlobType bt : BlobType.getAvailable()) {
Asserts.assertEQ(MemoryType.NON_HEAP, bt.getMemoryPool().getType());
}
}
}<|fim▁end|> | * @run main ClassFileInstaller sun.hotspot.WhiteBox
* sun.hotspot.WhiteBox$WhiteBoxPermission |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
import os
from setuptools import setup
readme = os.path.join(os.path.dirname(__file__), 'README.md')
setup(name = 'bottleneck',
version = '0.1.0',
description = 'performance report generator for OpenMP programs in GNU/Linux',
long_description = open(readme).read(),
author = 'Andres More',
author_email='[email protected]',
url='https://github.com/moreandres/bottleneck.git',
packages= [ 'bottleneck' ],
entry_points = { 'console_scripts': [ 'bt = bottleneck.bottleneck:main' ] },
data_files = [ ( 'config', [ 'cfg/bt.cfg', 'cfg/bt.tex' ] ) ],
classifiers = [
'Development Status :: 1 - Planning',
'Environment :: Console',
'Intended Audience :: Developers',<|fim▁hole|> 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: POSIX',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Quality Assurance',
'Topic :: System :: Benchmark',
'Topic :: Utilities',
],
zip_safe = False,
test_suite = 'tests',
# include_package_data = True,
# install_requires=[ 'numpy', 'scipy', 'matplotlib' ],
)<|fim▁end|> | |
<|file_name|>pool.go<|end_file_name|><|fim▁begin|>// Copyright ©2014 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package mat
import (
"sync"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
)
var tab64 = [64]byte{
0x3f, 0x00, 0x3a, 0x01, 0x3b, 0x2f, 0x35, 0x02,
0x3c, 0x27, 0x30, 0x1b, 0x36, 0x21, 0x2a, 0x03,
0x3d, 0x33, 0x25, 0x28, 0x31, 0x12, 0x1c, 0x14,
0x37, 0x1e, 0x22, 0x0b, 0x2b, 0x0e, 0x16, 0x04,
0x3e, 0x39, 0x2e, 0x34, 0x26, 0x1a, 0x20, 0x29,
0x32, 0x24, 0x11, 0x13, 0x1d, 0x0a, 0x0d, 0x15,
0x38, 0x2d, 0x19, 0x1f, 0x23, 0x10, 0x09, 0x0c,
0x2c, 0x18, 0x0f, 0x08, 0x17, 0x07, 0x06, 0x05,
}
// bits returns the ceiling of base 2 log of v.
// Approach based on http://stackoverflow.com/a/11398748.
func bits(v uint64) byte {
if v == 0 {
return 0
}
v <<= 2
v--
v |= v >> 1
v |= v >> 2
v |= v >> 4
v |= v >> 8
v |= v >> 16
v |= v >> 32
return tab64[((v-(v>>1))*0x07EDD5E59A4E28C2)>>58] - 1
}
var (
// pool contains size stratified workspace Dense pools.
// Each pool element i returns sized matrices with a data
// slice capped at 1<<i.
pool [63]sync.Pool
// poolSym is the SymDense equivalent of pool.
poolSym [63]sync.Pool
// poolTri is the TriDense equivalent of pool.
poolTri [63]sync.Pool
// poolVec is the VecDense equivalent of pool.
poolVec [63]sync.Pool
// poolFloats is the []float64 equivalent of pool.
poolFloats [63]sync.Pool
// poolInts is the []int equivalent of pool.
poolInts [63]sync.Pool
)
func init() {
for i := range pool {
l := 1 << uint(i)
pool[i].New = func() interface{} {
return &Dense{mat: blas64.General{
Data: make([]float64, l),
}}
}
poolSym[i].New = func() interface{} {
return &SymDense{mat: blas64.Symmetric{
Uplo: blas.Upper,
Data: make([]float64, l),
}}
}
poolTri[i].New = func() interface{} {
return &TriDense{mat: blas64.Triangular{
Data: make([]float64, l),
}}<|fim▁hole|> Inc: 1,
Data: make([]float64, l),
}}
}
poolFloats[i].New = func() interface{} {
return make([]float64, l)
}
poolInts[i].New = func() interface{} {
return make([]int, l)
}
}
}
// getWorkspace returns a *Dense of size r×c and a data slice
// with a cap that is less than 2*r*c. If clear is true, the
// data slice visible through the Matrix interface is zeroed.
func getWorkspace(r, c int, clear bool) *Dense {
l := uint64(r * c)
w := pool[bits(l)].Get().(*Dense)
w.mat.Data = w.mat.Data[:l]
if clear {
zero(w.mat.Data)
}
w.mat.Rows = r
w.mat.Cols = c
w.mat.Stride = c
w.capRows = r
w.capCols = c
return w
}
// putWorkspace replaces a used *Dense into the appropriate size
// workspace pool. putWorkspace must not be called with a matrix
// where references to the underlying data slice have been kept.
func putWorkspace(w *Dense) {
pool[bits(uint64(cap(w.mat.Data)))].Put(w)
}
// getWorkspaceSym returns a *SymDense of size n and a cap that
// is less than 2*n. If clear is true, the data slice visible
// through the Matrix interface is zeroed.
func getWorkspaceSym(n int, clear bool) *SymDense {
l := uint64(n)
l *= l
s := poolSym[bits(l)].Get().(*SymDense)
s.mat.Data = s.mat.Data[:l]
if clear {
zero(s.mat.Data)
}
s.mat.N = n
s.mat.Stride = n
s.cap = n
return s
}
// putWorkspaceSym replaces a used *SymDense into the appropriate size
// workspace pool. putWorkspaceSym must not be called with a matrix
// where references to the underlying data slice have been kept.
func putWorkspaceSym(s *SymDense) {
poolSym[bits(uint64(cap(s.mat.Data)))].Put(s)
}
// getWorkspaceTri returns a *TriDense of size n and a cap that
// is less than 2*n. If clear is true, the data slice visible
// through the Matrix interface is zeroed.
func getWorkspaceTri(n int, kind TriKind, clear bool) *TriDense {
l := uint64(n)
l *= l
t := poolTri[bits(l)].Get().(*TriDense)
t.mat.Data = t.mat.Data[:l]
if clear {
zero(t.mat.Data)
}
t.mat.N = n
t.mat.Stride = n
if kind == Upper {
t.mat.Uplo = blas.Upper
} else if kind == Lower {
t.mat.Uplo = blas.Lower
} else {
panic(ErrTriangle)
}
t.mat.Diag = blas.NonUnit
t.cap = n
return t
}
// putWorkspaceTri replaces a used *TriDense into the appropriate size
// workspace pool. putWorkspaceTri must not be called with a matrix
// where references to the underlying data slice have been kept.
func putWorkspaceTri(t *TriDense) {
poolTri[bits(uint64(cap(t.mat.Data)))].Put(t)
}
// getWorkspaceVec returns a *VecDense of length n and a cap that
// is less than 2*n. If clear is true, the data slice visible
// through the Matrix interface is zeroed.
func getWorkspaceVec(n int, clear bool) *VecDense {
l := uint64(n)
v := poolVec[bits(l)].Get().(*VecDense)
v.mat.Data = v.mat.Data[:l]
if clear {
zero(v.mat.Data)
}
v.n = n
return v
}
// putWorkspaceVec replaces a used *VecDense into the appropriate size
// workspace pool. putWorkspaceVec must not be called with a matrix
// where references to the underlying data slice have been kept.
func putWorkspaceVec(v *VecDense) {
poolVec[bits(uint64(cap(v.mat.Data)))].Put(v)
}
// getFloats returns a []float64 of length l and a cap that is
// less than 2*l. If clear is true, the slice visible is zeroed.
func getFloats(l int, clear bool) []float64 {
w := poolFloats[bits(uint64(l))].Get().([]float64)
w = w[:l]
if clear {
zero(w)
}
return w
}
// putFloats replaces a used []float64 into the appropriate size
// workspace pool. putFloats must not be called with a slice
// where references to the underlying data have been kept.
func putFloats(w []float64) {
poolFloats[bits(uint64(cap(w)))].Put(w)
}
// getInts returns a []ints of length l and a cap that is
// less than 2*l. If clear is true, the slice visible is zeroed.
func getInts(l int, clear bool) []int {
w := poolInts[bits(uint64(l))].Get().([]int)
w = w[:l]
if clear {
for i := range w {
w[i] = 0
}
}
return w
}
// putInts replaces a used []int into the appropriate size
// workspace pool. putInts must not be called with a slice
// where references to the underlying data have been kept.
func putInts(w []int) {
poolInts[bits(uint64(cap(w)))].Put(w)
}<|fim▁end|> | }
poolVec[i].New = func() interface{} {
return &VecDense{mat: blas64.Vector{ |
<|file_name|>Errores.java<|end_file_name|><|fim▁begin|>package es.com.blogspot.elblogdepicodev.activiti;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.ProcessEngineConfiguration;
import org.activiti.engine.ProcessEngines;
import org.activiti.engine.RepositoryService;
import org.activiti.engine.RuntimeService;
import org.activiti.engine.runtime.ProcessInstance;
import org.h2.tools.Server;
import es.com.blogspot.elblogdepicodev.activiti.misc.Producto;
public class Errores {
public static void main(String[] args) throws Exception {
Server server = null;
try {
server = Server.createTcpServer().start();
ProcessEngines.init();
ProcessEngine processEngine = ProcessEngineConfiguration.createProcessEngineConfigurationFromResource("activiti-mysql.cfg.xml").buildProcessEngine();
RuntimeService runtimeService = processEngine.getRuntimeService();
RepositoryService repositoryService = processEngine.getRepositoryService();
repositoryService.createDeployment().addClasspathResource("bpmn/Errores.bpmn20.xml").deploy();
Producto producto = new Producto("Arch Linux T-Shirt", 10l);
Map variables = new HashMap();<|fim▁hole|> ProcessInstance pi = runtimeService.startProcessInstanceByKey("errores", variables);
System.out.println(MessageFormat.format("Las nuevas existencias de {0} son {1}", producto.getNombre(), producto.getExistencias()));
} finally {
ProcessEngines.destroy();
if (server != null)
server.stop();
}
}
}<|fim▁end|> | variables.put("producto", producto);
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2010-2012 Roberto Longobardi
#
# This file is part of the Test Manager plugin for Trac.
#
# The Test Manager plugin for Trac is free software: you can
# redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later
# version.
#
# The Test Manager plugin for Trac is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Test Manager plugin for Trac. See the file LICENSE.txt.
# If not, see <http://www.gnu.org/licenses/>.
#
from setuptools import setup
setup(
name='TracGenericWorkflow',
version='1.0.4',
packages=['tracgenericworkflow','tracgenericworkflow.upgrades'],
package_data={'tracgenericworkflow' : ['*.txt', 'templates/*.html', 'htdocs/*.*', 'htdocs/js/*.js', 'htdocs/css/*.css', 'htdocs/images/*.*']},
author = 'Roberto Longobardi',
author_email='[email protected]',
license='GPL v. 3. See the file LICENSE.txt contained in the package.',<|fim▁hole|> download_url='https://sourceforge.net/projects/testman4trac/files/',
description='Test management plugin for Trac - Generic Workflow Engine component',
long_description='A Trac plugin to create Test Cases, organize them in catalogs and track their execution status and outcome. This module provides a generic workflow engine working on any Trac Resource.',
keywords='trac plugin test case management workflow engine resource project quality assurance statistics stats charts charting graph',
entry_points = {'trac.plugins': ['tracgenericworkflow = tracgenericworkflow']},
dependency_links=['http://svn.edgewall.org/repos/genshi/trunk#egg=Genshi-dev', 'http://trac-hacks.org/wiki/TestManagerForTracPluginGenericClass'],
install_requires=['Genshi >= 0.6', 'TracGenericClass >= 1.1.5']
)<|fim▁end|> | url='http://trac-hacks.org/wiki/TestManagerForTracPlugin', |
<|file_name|>request_test.go<|end_file_name|><|fim▁begin|>package request_test
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"reflect"
"runtime"
"strconv"
"strings"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/corehandlers"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/awstesting"
"github.com/aws/aws-sdk-go/awstesting/unit"
"github.com/aws/aws-sdk-go/private/protocol/jsonrpc"
"github.com/aws/aws-sdk-go/private/protocol/rest"
"github.com/aws/aws-sdk-go/aws/defaults"
)
type testData struct {
Data string
}
func body(str string) io.ReadCloser {
return ioutil.NopCloser(bytes.NewReader([]byte(str)))
}
func unmarshal(req *request.Request) {
defer req.HTTPResponse.Body.Close()
if req.Data != nil {
json.NewDecoder(req.HTTPResponse.Body).Decode(req.Data)
}
return
}
func unmarshalError(req *request.Request) {
bodyBytes, err := ioutil.ReadAll(req.HTTPResponse.Body)
if err != nil {
req.Error = awserr.New("UnmarshaleError", req.HTTPResponse.Status, err)
return
}
if len(bodyBytes) == 0 {
req.Error = awserr.NewRequestFailure(
awserr.New("UnmarshaleError", req.HTTPResponse.Status, fmt.Errorf("empty body")),
req.HTTPResponse.StatusCode,
"",
)
return
}
var jsonErr jsonErrorResponse
if err := json.Unmarshal(bodyBytes, &jsonErr); err != nil {
req.Error = awserr.New("UnmarshaleError", "JSON unmarshal", err)
return
}
req.Error = awserr.NewRequestFailure(
awserr.New(jsonErr.Code, jsonErr.Message, nil),
req.HTTPResponse.StatusCode,
"",
)
}
type jsonErrorResponse struct {
Code string `json:"__type"`
Message string `json:"message"`
}
// test that retries occur for 5xx status codes
func TestRequestRecoverRetry5xx(t *testing.T) {
reqNum := 0
reqs := []http.Response{
{StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
{StatusCode: 501, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
{StatusCode: 200, Body: body(`{"data":"valid"}`)},
}
s := awstesting.NewClient(aws.NewConfig().WithMaxRetries(10))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &reqs[reqNum]
reqNum++
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err != nil {
t.Fatalf("expect no error, but got %v", err)
}
if e, a := 2, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if e, a := "valid", out.Data; e != a {
t.Errorf("expect %q output got %q", e, a)
}
}
// test that retries occur for 4xx status codes with a response type that can be retried - see `shouldRetry`
func TestRequestRecoverRetry4xxRetryable(t *testing.T) {
reqNum := 0
reqs := []http.Response{
{StatusCode: 400, Body: body(`{"__type":"Throttling","message":"Rate exceeded."}`)},
{StatusCode: 400, Body: body(`{"__type":"ProvisionedThroughputExceededException","message":"Rate exceeded."}`)},
{StatusCode: 429, Body: body(`{"__type":"FooException","message":"Rate exceeded."}`)},
{StatusCode: 200, Body: body(`{"data":"valid"}`)},
}
s := awstesting.NewClient(aws.NewConfig().WithMaxRetries(10))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &reqs[reqNum]
reqNum++
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err != nil {
t.Fatalf("expect no error, but got %v", err)
}
if e, a := 3, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if e, a := "valid", out.Data; e != a {
t.Errorf("expect %q output got %q", e, a)
}
}
// test that retries don't occur for 4xx status codes with a response type that can't be retried
func TestRequest4xxUnretryable(t *testing.T) {
s := awstesting.NewClient(aws.NewConfig().WithMaxRetries(10))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &http.Response{StatusCode: 401, Body: body(`{"__type":"SignatureDoesNotMatch","message":"Signature does not match."}`)}
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err == nil {
t.Fatalf("expect error, but did not get one")
}
aerr := err.(awserr.RequestFailure)
if e, a := 401, aerr.StatusCode(); e != a {
t.Errorf("expect %d status code, got %d", e, a)
}
if e, a := "SignatureDoesNotMatch", aerr.Code(); e != a {
t.Errorf("expect %q error code, got %q", e, a)
}
if e, a := "Signature does not match.", aerr.Message(); e != a {
t.Errorf("expect %q error message, got %q", e, a)
}
if e, a := 0, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
}
func TestRequestExhaustRetries(t *testing.T) {
delays := []time.Duration{}
sleepDelay := func(delay time.Duration) {
delays = append(delays, delay)
}
reqNum := 0
reqs := []http.Response{
{StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
}
s := awstesting.NewClient(aws.NewConfig().WithSleepDelay(sleepDelay))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &reqs[reqNum]
reqNum++
})
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, nil)
err := r.Send()
if err == nil {
t.Fatalf("expect error, but did not get one")
}
aerr := err.(awserr.RequestFailure)
if e, a := 500, aerr.StatusCode(); e != a {
t.Errorf("expect %d status code, got %d", e, a)
}
if e, a := "UnknownError", aerr.Code(); e != a {
t.Errorf("expect %q error code, got %q", e, a)
}
if e, a := "An error occurred.", aerr.Message(); e != a {
t.Errorf("expect %q error message, got %q", e, a)
}
if e, a := 3, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
expectDelays := []struct{ min, max time.Duration }{{30, 59}, {60, 118}, {120, 236}}
for i, v := range delays {
min := expectDelays[i].min * time.Millisecond
max := expectDelays[i].max * time.Millisecond
if !(min <= v && v <= max) {
t.Errorf("Expect delay to be within range, i:%d, v:%s, min:%s, max:%s",
i, v, min, max)
}
}
}
// test that the request is retried after the credentials are expired.
func TestRequestRecoverExpiredCreds(t *testing.T) {
reqNum := 0
reqs := []http.Response{
{StatusCode: 400, Body: body(`{"__type":"ExpiredTokenException","message":"expired token"}`)},
{StatusCode: 200, Body: body(`{"data":"valid"}`)},
}
s := awstesting.NewClient(&aws.Config{MaxRetries: aws.Int(10), Credentials: credentials.NewStaticCredentials("AKID", "SECRET", "")})
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
credExpiredBeforeRetry := false
credExpiredAfterRetry := false
s.Handlers.AfterRetry.PushBack(func(r *request.Request) {
credExpiredAfterRetry = r.Config.Credentials.IsExpired()
})
s.Handlers.Sign.Clear()
s.Handlers.Sign.PushBack(func(r *request.Request) {
r.Config.Credentials.Get()
})
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &reqs[reqNum]
reqNum++
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
if credExpiredBeforeRetry {
t.Errorf("Expect valid creds before retry check")
}
if !credExpiredAfterRetry {
t.Errorf("Expect expired creds after retry check")
}
if s.Config.Credentials.IsExpired() {
t.Errorf("Expect valid creds after cred expired recovery")
}
if e, a := 1, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if e, a := "valid", out.Data; e != a {
t.Errorf("expect %q output got %q", e, a)
}
}
func TestMakeAddtoUserAgentHandler(t *testing.T) {
fn := request.MakeAddToUserAgentHandler("name", "version", "extra1", "extra2")
r := &request.Request{HTTPRequest: &http.Request{Header: http.Header{}}}
r.HTTPRequest.Header.Set("User-Agent", "foo/bar")
fn(r)
if e, a := "foo/bar name/version (extra1; extra2)", r.HTTPRequest.Header.Get("User-Agent"); e != a {
t.Errorf("expect %q user agent, got %q", e, a)
}
}
func TestMakeAddtoUserAgentFreeFormHandler(t *testing.T) {
fn := request.MakeAddToUserAgentFreeFormHandler("name/version (extra1; extra2)")
r := &request.Request{HTTPRequest: &http.Request{Header: http.Header{}}}
r.HTTPRequest.Header.Set("User-Agent", "foo/bar")
fn(r)
if e, a := "foo/bar name/version (extra1; extra2)", r.HTTPRequest.Header.Get("User-Agent"); e != a {
t.Errorf("expect %q user agent, got %q", e, a)
}
}
func TestRequestUserAgent(t *testing.T) {
s := awstesting.NewClient(&aws.Config{Region: aws.String("us-east-1")})
// s.Handlers.Validate.Clear()
req := s.NewRequest(&request.Operation{Name: "Operation"}, nil, &testData{})
req.HTTPRequest.Header.Set("User-Agent", "foo/bar")
if err := req.Build(); err != nil {
t.Fatalf("expect no error, got %v", err)
}
expectUA := fmt.Sprintf("foo/bar %s/%s (%s; %s; %s)",
aws.SDKName, aws.SDKVersion, runtime.Version(), runtime.GOOS, runtime.GOARCH)
if e, a := expectUA, req.HTTPRequest.Header.Get("User-Agent"); e != a {
t.Errorf("expect %q user agent, got %q", e, a)
}
}
func TestRequestThrottleRetries(t *testing.T) {
delays := []time.Duration{}
sleepDelay := func(delay time.Duration) {
delays = append(delays, delay)
}
reqNum := 0
reqs := []http.Response{
{StatusCode: 500, Body: body(`{"__type":"Throttling","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"Throttling","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"Throttling","message":"An error occurred."}`)},
{StatusCode: 500, Body: body(`{"__type":"Throttling","message":"An error occurred."}`)},
}
s := awstesting.NewClient(aws.NewConfig().WithSleepDelay(sleepDelay))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = &reqs[reqNum]
reqNum++
})
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, nil)
err := r.Send()
if err == nil {
t.Fatalf("expect error, but did not get one")
}
aerr := err.(awserr.RequestFailure)
if e, a := 500, aerr.StatusCode(); e != a {
t.Errorf("expect %d status code, got %d", e, a)
}
if e, a := "Throttling", aerr.Code(); e != a {
t.Errorf("expect %q error code, got %q", e, a)
}
if e, a := "An error occurred.", aerr.Message(); e != a {
t.Errorf("expect %q error message, got %q", e, a)
}
if e, a := 3, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
expectDelays := []struct{ min, max time.Duration }{{500, 999}, {1000, 1998}, {2000, 3996}}
for i, v := range delays {
min := expectDelays[i].min * time.Millisecond
max := expectDelays[i].max * time.Millisecond
if !(min <= v && v <= max) {
t.Errorf("Expect delay to be within range, i:%d, v:%s, min:%s, max:%s",
i, v, min, max)
}
}
}
// test that retries occur for request timeouts when response.Body can be nil
func TestRequestRecoverTimeoutWithNilBody(t *testing.T) {
reqNum := 0
reqs := []*http.Response{
{StatusCode: 0, Body: nil}, // body can be nil when requests time out
{StatusCode: 200, Body: body(`{"data":"valid"}`)},
}
errors := []error{
errTimeout, nil,
}
s := awstesting.NewClient(aws.NewConfig().WithMaxRetries(10))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.AfterRetry.Clear() // force retry on all errors
s.Handlers.AfterRetry.PushBack(func(r *request.Request) {
if r.Error != nil {
r.Error = nil
r.Retryable = aws.Bool(true)
r.RetryCount++
}
})
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = reqs[reqNum]
r.Error = errors[reqNum]
reqNum++
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err != nil {
t.Fatalf("expect no error, but got %v", err)
}
if e, a := 1, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if e, a := "valid", out.Data; e != a {
t.Errorf("expect %q output got %q", e, a)
}
}
func TestRequestRecoverTimeoutWithNilResponse(t *testing.T) {
reqNum := 0
reqs := []*http.Response{
nil,
{StatusCode: 200, Body: body(`{"data":"valid"}`)},
}
errors := []error{
errTimeout,
nil,
}
s := awstesting.NewClient(aws.NewConfig().WithMaxRetries(10))
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
s.Handlers.AfterRetry.Clear() // force retry on all errors
s.Handlers.AfterRetry.PushBack(func(r *request.Request) {
if r.Error != nil {
r.Error = nil
r.Retryable = aws.Bool(true)
r.RetryCount++
}
})
s.Handlers.Send.Clear() // mock sending
s.Handlers.Send.PushBack(func(r *request.Request) {
r.HTTPResponse = reqs[reqNum]
r.Error = errors[reqNum]
reqNum++
})
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err != nil {
t.Fatalf("expect no error, but got %v", err)
}
if e, a := 1, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if e, a := "valid", out.Data; e != a {
t.Errorf("expect %q output got %q", e, a)
}
}
func TestRequest_NoBody(t *testing.T) {
cases := []string{
"GET", "HEAD", "DELETE",
"PUT", "POST", "PATCH",
}
for i, c := range cases {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if v := r.TransferEncoding; len(v) > 0 {
t.Errorf("%d, expect no body sent with Transfer-Encoding, %v", i, v)
}
outMsg := []byte(`{"Value": "abc"}`)
if b, err := ioutil.ReadAll(r.Body); err != nil {
t.Fatalf("%d, expect no error reading request body, got %v", i, err)
} else if n := len(b); n > 0 {
t.Errorf("%d, expect no request body, got %d bytes", i, n)
}
w.Header().Set("Content-Length", strconv.Itoa(len(outMsg)))
if _, err := w.Write(outMsg); err != nil {
t.Fatalf("%d, expect no error writing server response, got %v", i, err)
}
}))
s := awstesting.NewClient(&aws.Config{
Region: aws.String("mock-region"),
MaxRetries: aws.Int(0),
Endpoint: aws.String(server.URL),
DisableSSL: aws.Bool(true),
})
s.Handlers.Build.PushBack(rest.Build)
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
in := struct {
Bucket *string `location:"uri" locationName:"bucket"`
Key *string `location:"uri" locationName:"key"`
}{
Bucket: aws.String("mybucket"), Key: aws.String("myKey"),
}
out := struct {
Value *string
}{}
r := s.NewRequest(&request.Operation{
Name: "OpName", HTTPMethod: c, HTTPPath: "/{bucket}/{key+}",
}, &in, &out)
if err := r.Send(); err != nil {
t.Fatalf("%d, expect no error sending request, got %v", i, err)
}
}
}
func TestIsSerializationErrorRetryable(t *testing.T) {
testCases := []struct {
err error
expected bool
}{
{
err: awserr.New(request.ErrCodeSerialization, "foo error", nil),
expected: false,
},
{
err: awserr.New("ErrFoo", "foo error", nil),
expected: false,
},
{
err: nil,
expected: false,
},
{
err: awserr.New(request.ErrCodeSerialization, "foo error", stubConnectionResetError),
expected: true,
},
}
for i, c := range testCases {
r := &request.Request{
Error: c.err,
}
if r.IsErrorRetryable() != c.expected {
t.Errorf("Case %d: Expected %v, but received %v", i+1, c.expected, !c.expected)
}
}
}
func TestWithLogLevel(t *testing.T) {
r := &request.Request{}
opt := request.WithLogLevel(aws.LogDebugWithHTTPBody)
r.ApplyOptions(opt)
if !r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) {
t.Errorf("expect log level to be set, but was not, %v",
r.Config.LogLevel.Value())
}
}
func TestWithGetResponseHeader(t *testing.T) {
r := &request.Request{}
var val, val2 string
r.ApplyOptions(
request.WithGetResponseHeader("x-a-header", &val),
request.WithGetResponseHeader("x-second-header", &val2),
)
r.HTTPResponse = &http.Response{
Header: func() http.Header {
h := http.Header{}
h.Set("x-a-header", "first")
h.Set("x-second-header", "second")
return h
}(),
}
r.Handlers.Complete.Run(r)
if e, a := "first", val; e != a {
t.Errorf("expect %q header value got %q", e, a)
}
if e, a := "second", val2; e != a {
t.Errorf("expect %q header value got %q", e, a)
}
}
func TestWithGetResponseHeaders(t *testing.T) {
r := &request.Request{}
var headers http.Header
opt := request.WithGetResponseHeaders(&headers)
r.ApplyOptions(opt)
r.HTTPResponse = &http.Response{
Header: func() http.Header {
h := http.Header{}
h.Set("x-a-header", "headerValue")
return h
}(),
}
r.Handlers.Complete.Run(r)
if e, a := "headerValue", headers.Get("x-a-header"); e != a {
t.Errorf("expect %q header value got %q", e, a)
}
}
type connResetCloser struct {
}
func (rc *connResetCloser) Read(b []byte) (int, error) {
return 0, stubConnectionResetError
}
func (rc *connResetCloser) Close() error {
return nil
}
func TestSerializationErrConnectionReset(t *testing.T) {
count := 0
handlers := request.Handlers{}
handlers.Send.PushBack(func(r *request.Request) {
count++
r.HTTPResponse = &http.Response{}
r.HTTPResponse.Body = &connResetCloser{}
})
handlers.Sign.PushBackNamed(v4.SignRequestHandler)
handlers.Build.PushBackNamed(jsonrpc.BuildHandler)
handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler)
handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler)
handlers.UnmarshalError.PushBackNamed(jsonrpc.UnmarshalErrorHandler)
handlers.AfterRetry.PushBackNamed(corehandlers.AfterRetryHandler)
op := &request.Operation{
Name: "op",
HTTPMethod: "POST",
HTTPPath: "/",
}
meta := metadata.ClientInfo{
ServiceName: "fooService",
SigningName: "foo",
SigningRegion: "foo",
Endpoint: "localhost",
APIVersion: "2001-01-01",
JSONVersion: "1.1",
TargetPrefix: "Foo",
}
cfg := unit.Session.Config.Copy()
cfg.MaxRetries = aws.Int(5)
req := request.New(
*cfg,
meta,
handlers,
client.DefaultRetryer{NumMaxRetries: 5},
op,
&struct {
}{},
&struct {
}{},
)
osErr := stubConnectionResetError
req.ApplyOptions(request.WithResponseReadTimeout(time.Second))
err := req.Send()
if err == nil {
t.Error("Expected rror 'SerializationError', but received nil")
}
if aerr, ok := err.(awserr.Error); ok && aerr.Code() != "SerializationError" {
t.Errorf("Expected 'SerializationError', but received %q", aerr.Code())
} else if !ok {
t.Errorf("Expected 'awserr.Error', but received %v", reflect.TypeOf(err))
} else if aerr.OrigErr().Error() != osErr.Error() {
t.Errorf("Expected %q, but received %q", osErr.Error(), aerr.OrigErr().Error())
}
if count != 6 {
t.Errorf("Expected '6', but received %d", count)
}
}
type testRetryer struct {
shouldRetry bool<|fim▁hole|> return 3
}
// RetryRules returns the delay duration before retrying this request again
func (d *testRetryer) RetryRules(r *request.Request) time.Duration {
return time.Duration(time.Millisecond)
}
func (d *testRetryer) ShouldRetry(r *request.Request) bool {
d.shouldRetry = true
if r.Retryable != nil {
return *r.Retryable
}
if r.HTTPResponse.StatusCode >= 500 {
return true
}
return r.IsErrorRetryable()
}
func TestEnforceShouldRetryCheck(t *testing.T) {
tp := &http.Transport{
Proxy: http.ProxyFromEnvironment,
ResponseHeaderTimeout: 1 * time.Millisecond,
}
client := &http.Client{Transport: tp}
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// This server should wait forever. Requests will timeout and the SDK should
// attempt to retry.
select {}
}))
retryer := &testRetryer{}
s := awstesting.NewClient(&aws.Config{
Region: aws.String("mock-region"),
MaxRetries: aws.Int(0),
Endpoint: aws.String(server.URL),
DisableSSL: aws.Bool(true),
Retryer: retryer,
HTTPClient: client,
EnforceShouldRetryCheck: aws.Bool(true),
})
s.Handlers.Validate.Clear()
s.Handlers.Unmarshal.PushBack(unmarshal)
s.Handlers.UnmarshalError.PushBack(unmarshalError)
out := &testData{}
r := s.NewRequest(&request.Operation{Name: "Operation"}, nil, out)
err := r.Send()
if err == nil {
t.Fatalf("expect error, but got nil")
}
if e, a := 3, int(r.RetryCount); e != a {
t.Errorf("expect %d retry count, got %d", e, a)
}
if !retryer.shouldRetry {
t.Errorf("expect 'true' for ShouldRetry, but got %v", retryer.shouldRetry)
}
}
type errReader struct {
err error
}
func (reader *errReader) Read(b []byte) (int, error) {
return 0, reader.err
}
func (reader *errReader) Close() error {
return nil
}
func TestIsNoBodyReader(t *testing.T) {
cases := []struct {
reader io.ReadCloser
expect bool
}{
{ioutil.NopCloser(bytes.NewReader([]byte("abc"))), false},
{ioutil.NopCloser(bytes.NewReader(nil)), false},
{nil, false},
{request.NoBody, true},
}
for i, c := range cases {
if e, a := c.expect, request.NoBody == c.reader; e != a {
t.Errorf("%d, expect %t match, but was %t", i, e, a)
}
}
}
func TestRequest_TemporaryRetry(t *testing.T) {
done := make(chan struct{})
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Length", "1024")
w.WriteHeader(http.StatusOK)
w.Write(make([]byte, 100))
f := w.(http.Flusher)
f.Flush()
<-done
}))
client := &http.Client{
Timeout: 100 * time.Millisecond,
}
svc := awstesting.NewClient(&aws.Config{
Region: unit.Session.Config.Region,
MaxRetries: aws.Int(1),
HTTPClient: client,
DisableSSL: aws.Bool(true),
Endpoint: aws.String(server.URL),
})
req := svc.NewRequest(&request.Operation{
Name: "name", HTTPMethod: "GET", HTTPPath: "/path",
}, &struct{}{}, &struct{}{})
req.Handlers.Unmarshal.PushBack(func(r *request.Request) {
defer req.HTTPResponse.Body.Close()
_, err := io.Copy(ioutil.Discard, req.HTTPResponse.Body)
r.Error = awserr.New(request.ErrCodeSerialization, "error", err)
})
err := req.Send()
if err == nil {
t.Errorf("expect error, got none")
}
close(done)
aerr := err.(awserr.Error)
if e, a := request.ErrCodeSerialization, aerr.Code(); e != a {
t.Errorf("expect %q error code, got %q", e, a)
}
if e, a := 1, req.RetryCount; e != a {
t.Errorf("expect %d retries, got %d", e, a)
}
type temporary interface {
Temporary() bool
}
terr := aerr.OrigErr().(temporary)
if !terr.Temporary() {
t.Errorf("expect temporary error, was not")
}
}
func TestRequest_Presign(t *testing.T) {
presign := func(r *request.Request, expire time.Duration) (string, http.Header, error) {
u, err := r.Presign(expire)
return u, nil, err
}
presignRequest := func(r *request.Request, expire time.Duration) (string, http.Header, error) {
return r.PresignRequest(expire)
}
mustParseURL := func(v string) *url.URL {
u, err := url.Parse(v)
if err != nil {
panic(err)
}
return u
}
cases := []struct {
Expire time.Duration
PresignFn func(*request.Request, time.Duration) (string, http.Header, error)
SignerFn func(*request.Request)
URL string
Header http.Header
Err string
}{
{
PresignFn: presign,
Err: request.ErrCodeInvalidPresignExpire,
},
{
PresignFn: presignRequest,
Err: request.ErrCodeInvalidPresignExpire,
},
{
Expire: -1,
PresignFn: presign,
Err: request.ErrCodeInvalidPresignExpire,
},
{
// Presign clear NotHoist
Expire: 1 * time.Minute,
PresignFn: func(r *request.Request, dur time.Duration) (string, http.Header, error) {
r.NotHoist = true
return presign(r, dur)
},
SignerFn: func(r *request.Request) {
r.HTTPRequest.URL = mustParseURL("https://endpoint/presignedURL")
fmt.Println("url", r.HTTPRequest.URL.String())
if r.NotHoist {
r.Error = fmt.Errorf("expect NotHoist to be cleared")
}
},
URL: "https://endpoint/presignedURL",
},
{
// PresignRequest does not clear NotHoist
Expire: 1 * time.Minute,
PresignFn: func(r *request.Request, dur time.Duration) (string, http.Header, error) {
r.NotHoist = true
return presignRequest(r, dur)
},
SignerFn: func(r *request.Request) {
r.HTTPRequest.URL = mustParseURL("https://endpoint/presignedURL")
if !r.NotHoist {
r.Error = fmt.Errorf("expect NotHoist not to be cleared")
}
},
URL: "https://endpoint/presignedURL",
},
{
// PresignRequest returns signed headers
Expire: 1 * time.Minute,
PresignFn: presignRequest,
SignerFn: func(r *request.Request) {
r.HTTPRequest.URL = mustParseURL("https://endpoint/presignedURL")
r.HTTPRequest.Header.Set("UnsigndHeader", "abc")
r.SignedHeaderVals = http.Header{
"X-Amzn-Header": []string{"abc", "123"},
"X-Amzn-Header2": []string{"efg", "456"},
}
},
URL: "https://endpoint/presignedURL",
Header: http.Header{
"X-Amzn-Header": []string{"abc", "123"},
"X-Amzn-Header2": []string{"efg", "456"},
},
},
}
svc := awstesting.NewClient()
svc.Handlers.Clear()
for i, c := range cases {
req := svc.NewRequest(&request.Operation{
Name: "name", HTTPMethod: "GET", HTTPPath: "/path",
}, &struct{}{}, &struct{}{})
req.Handlers.Sign.PushBack(c.SignerFn)
u, h, err := c.PresignFn(req, c.Expire)
if len(c.Err) != 0 {
if e, a := c.Err, err.Error(); !strings.Contains(a, e) {
t.Errorf("%d, expect %v to be in %v", i, e, a)
}
continue
} else if err != nil {
t.Errorf("%d, expect no error, got %v", i, err)
continue
}
if e, a := c.URL, u; e != a {
t.Errorf("%d, expect %v URL, got %v", i, e, a)
}
if e, a := c.Header, h; !reflect.DeepEqual(e, a) {
t.Errorf("%d, expect %v header got %v", i, e, a)
}
}
}
func TestNew_EndpointWithDefaultPort(t *testing.T) {
endpoint := "https://estest.us-east-1.es.amazonaws.com:443"
expectedRequestHost := "estest.us-east-1.es.amazonaws.com"
r := request.New(
aws.Config{},
metadata.ClientInfo{Endpoint: endpoint},
defaults.Handlers(),
client.DefaultRetryer{},
&request.Operation{},
nil,
nil,
)
if h := r.HTTPRequest.Host; h != expectedRequestHost {
t.Errorf("expect %v host, got %q", expectedRequestHost, h)
}
}
func TestSanitizeHostForHeader(t *testing.T) {
cases := []struct {
url string
expectedRequestHost string
}{
{"https://estest.us-east-1.es.amazonaws.com:443", "estest.us-east-1.es.amazonaws.com"},
{"https://estest.us-east-1.es.amazonaws.com", "estest.us-east-1.es.amazonaws.com"},
{"https://localhost:9200", "localhost:9200"},
{"http://localhost:80", "localhost"},
{"http://localhost:8080", "localhost:8080"},
}
for _, c := range cases {
r, _ := http.NewRequest("GET", c.url, nil)
request.SanitizeHostForHeader(r)
if h := r.Host; h != c.expectedRequestHost {
t.Errorf("expect %v host, got %q", c.expectedRequestHost, h)
}
}
}<|fim▁end|> | }
func (d *testRetryer) MaxRetries() int { |
<|file_name|>HotNeedleLightControl.cpp<|end_file_name|><|fim▁begin|>#include "HotNeedleLightControl.h"
HotNeedleLightControlClass::HotNeedleLightControlClass(uint8_t background[NEOPIXEL_COUNT][COLOR_BYTES], uint8_t hotNeedleColor[COLOR_BYTES], float highlightMultiplier, bool useHighlight, uint16_t fadeTime, uint8_t framePeriod, Adafruit_NeoPixel *strip) : LightControlClass(framePeriod, strip)
{
memcpy(this->backgroundColors, background, COLOR_BYTES*NEOPIXEL_COUNT);
memcpy(this->hotNeedleColor, hotNeedleColor, COLOR_BYTES);
fadeFrames = fadeTime / framePeriod;
this->useHighlight = useHighlight;
this->highlightMultiplier = highlightMultiplier;
this->maximumLedPosition = 0;
this->minimumLedPosition = NEOPIXEL_COUNT;
}
// Rendering code
<|fim▁hole|> // Increment existing counters
decrementCounters(ledCounters);
uint16_t needlePosition = pixelFromInputPosition(pos);
// Set current position hot pixel counter to max
ledCounters[needlePosition] = fadeFrames;
draw(needlePosition);
}
void HotNeedleLightControlClass::draw(uint16_t needlePosition)
{
// Calculate display values for each pixel
for (uint16_t p = 0; p < NEOPIXEL_COUNT; p++)
{
float backgroundRatio = (float)(fadeFrames - ledCounters[p]) / fadeFrames;
float foregroundRatio = 1.0 - backgroundRatio;
for (uint8_t c = 0; c < COLOR_BYTES; c++)
{
if (useHighlight)
{
// Foreground color is background color * highlight multiplier
// Make sure we don't wrap past 255
int bg = backgroundColors[p][c] * highlightMultiplier;
if (bg > 255)
{
bg = 255;
}
ledCurrentColors[p][c] = gammaCorrect((foregroundRatio * bg) + (backgroundRatio * backgroundColors[p][c]));
}
else
{
ledCurrentColors[p][c] = gammaCorrect((foregroundRatio * hotNeedleColor[c]) + (backgroundRatio * backgroundColors[p][c]));
}
}
strip->setPixelColor(p, ledCurrentColors[p][RED],
ledCurrentColors[p][GREEN],
ledCurrentColors[p][BLUE]);
}
if(useMaximum){
updateMaximum(needlePosition);
drawMaximum();
}
if(useMinimum){
updateMinimum(needlePosition);
drawMinimum();
}
strip->show();
}<|fim▁end|> | void HotNeedleLightControlClass::renderFrame(uint16_t pos, NEEDLE_DIRECTION dir)
{ |
<|file_name|>overloaded-index-in-field.rs<|end_file_name|><|fim▁begin|>// run-pass
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
use std::ops::Index;
struct Foo {
x: isize,
y: isize,
}
struct Bar {
foo: Foo
}
impl Index<isize> for Foo {
type Output = isize;
fn index(&self, z: isize) -> &isize {
if z == 0 {<|fim▁hole|> &self.x
} else {
&self.y
}
}
}
trait Int {
fn get(self) -> isize;
fn get_from_ref(&self) -> isize;
fn inc(&mut self);
}
impl Int for isize {
fn get(self) -> isize { self }
fn get_from_ref(&self) -> isize { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
}<|fim▁end|> | |
<|file_name|>0002_auto_20170415_1001.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('papers', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='conference',
options={'ordering': ['date']},<|fim▁hole|> ),
migrations.AddField(
model_name='paper',
name='url',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]<|fim▁end|> | |
<|file_name|>delete-droplet.spec.ts<|end_file_name|><|fim▁begin|>import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { createContext } from '../../common';
import {deleteDroplet} from './delete-droplet';
import * as MOCK from './delete-droplet.mock';
describe('droplet', () => {
const DROPLET_ID = 123;
const URL = `/droplets/${DROPLET_ID}`;
const TOKEN = process.env.TEST_TOKEN as string;
const mock = new MockAdapter(axios);
mock.onDelete(URL).reply(
MOCK.response.headers.status,
undefined,
MOCK.response.headers,
);
const context = createContext({
axios,
token: TOKEN,
});
beforeEach(() => {
mock.resetHistory();
});
describe('delete-droplet', () => {
it('should be a fn', () => {
expect(typeof deleteDroplet).toBe('function');
});
it('should return a fn', () => {
expect(typeof deleteDroplet(context)).toBe('function');
});
it('should return a valid response', async () => {
const _deleteDroplet = deleteDroplet(context);
const response = await _deleteDroplet({droplet_id: DROPLET_ID});
Object.assign(response, {request: mock.history.delete[0]});
/// validate response schema
expect(typeof response).toBe('object');
expect(typeof response.headers).toBe('object');
expect(typeof response.request).toBe('object');
expect(typeof response.status).toBe('number');
/// validate request
const {request} = response;
expect(request.baseURL + request.url).toBe(context.endpoint + URL);
expect(request.method).toBe('delete');<|fim▁hole|> expect(headers).toMatchObject(MOCK.response.headers);
expect(status).toBe(MOCK.response.headers.status);
});
});
});<|fim▁end|> | expect(request.headers).toMatchObject(MOCK.request.headers);
expect(request.data).toBeUndefined();
/// validate headers
const {headers, status} = response; |
<|file_name|>bg.js<|end_file_name|><|fim▁begin|><|fim▁hole|>CKEDITOR.plugins.setLang("codesnippet","bg",{button:"Въвеждане на блок с код",codeContents:"Съдържание на кода",emptySnippetError:"Блока с код не може да бъде празен.",language:"Език",title:"Блок с код",pathName:"блок с код"});<|fim▁end|> | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/ |
<|file_name|>test_metrics_fluster.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use std::path::Path;
use std::thread::sleep;
use std::time::Duration;
use engine_rocks::raw::{DBOptions, TitanDBOptions};
use tempfile::Builder;
use engine_rocks::util::{self as rocks_util, RocksCFOptions};
use engine_rocks::{RocksColumnFamilyOptions, RocksDBOptions};
use engine_traits::{ColumnFamilyOptions, Engines, MetricsFlusher, CF_DEFAULT, CF_LOCK, CF_WRITE};
#[test]
fn test_metrics_flusher() {
let path = Builder::new()
.prefix("_test_metrics_flusher")
.tempdir()
.unwrap();
let raft_path = path.path().join(Path::new("raft"));
let mut db_opt = DBOptions::new();<|fim▁hole|> let cf_opts = RocksColumnFamilyOptions::new();
let cfs_opts = vec![
RocksCFOptions::new(CF_DEFAULT, ColumnFamilyOptions::new()),
RocksCFOptions::new(CF_LOCK, ColumnFamilyOptions::new()),
RocksCFOptions::new(CF_WRITE, cf_opts),
];
let engine =
rocks_util::new_engine_opt(path.path().to_str().unwrap(), db_opt, cfs_opts).unwrap();
let cfs_opts = vec![RocksCFOptions::new(
CF_DEFAULT,
RocksColumnFamilyOptions::new(),
)];
let raft_engine = rocks_util::new_engine_opt(
raft_path.to_str().unwrap(),
RocksDBOptions::from_raw(DBOptions::new()),
cfs_opts,
)
.unwrap();
let engines = Engines::new(engine, raft_engine);
let mut metrics_flusher = MetricsFlusher::new(engines);
metrics_flusher.set_flush_interval(Duration::from_millis(100));
if let Err(e) = metrics_flusher.start() {
error!("failed to start metrics flusher, error = {:?}", e);
}
let rtime = Duration::from_millis(300);
sleep(rtime);
metrics_flusher.stop();
}<|fim▁end|> | db_opt.set_titandb_options(&TitanDBOptions::new());
let db_opt = RocksDBOptions::from_raw(db_opt); |
<|file_name|>vdnConfig.py<|end_file_name|><|fim▁begin|># coding=utf-8
#
# Copyright © 2015 VMware, Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
__author__ = 'yfauser'
from tests.config import *
from nsxramlclient.client import NsxClient
import time
client_session = NsxClient(nsxraml_file, nsxmanager, nsx_username, nsx_password, debug=True)
def test_segment_pools():
### Test Segment ID Pool Operations
# Get all configured Segment Pools
get_segment_resp = client_session.read('vdnSegmentPools')
client_session.view_response(get_segment_resp)
# Add a Segment Pool
segments_create_body = client_session.extract_resource_body_example('vdnSegmentPools', 'create')
client_session.view_body_dict(segments_create_body)
<|fim▁hole|>
create_response = client_session.create('vdnSegmentPools', request_body_dict=segments_create_body)
client_session.view_response(create_response)
time.sleep(5)
# Update the new Segment Pool:
update_segment_body = client_session.extract_resource_body_example('vdnSegmentPool', 'update')
update_segment_body['segmentRange']['name'] = 'PythonTest'
update_segment_body['segmentRange']['end'] = '11005'
client_session.update('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']},
request_body_dict=update_segment_body)
time.sleep(5)
# Display a specific Segment pool (the new one)
specific_segement_resp = client_session.read('vdnSegmentPool', uri_parameters={'segmentPoolId':
create_response['objectId']})
client_session.view_response(specific_segement_resp)
time.sleep(5)
# Delete new Segment Pool
client_session.delete('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']})
def test_mcast_pools():
### Test Multicast Pool Operations
# Add a multicast Pool
mcastpool_create_body = client_session.extract_resource_body_example('vdnMulticastPools', 'create')
client_session.view_body_dict(mcastpool_create_body)
mcastpool_create_body['multicastRange']['desc'] = 'Test'
mcastpool_create_body['multicastRange']['begin'] = '235.0.0.0'
mcastpool_create_body['multicastRange']['end'] = '235.1.1.1'
mcastpool_create_body['multicastRange']['name'] = 'legacy'
create_response = client_session.create('vdnMulticastPools', request_body_dict=mcastpool_create_body)
client_session.view_response(create_response)
# Get all configured Multicast Pools
get_mcast_pools = client_session.read('vdnMulticastPools')
client_session.view_response(get_mcast_pools)
time.sleep(5)
# Update the newly created mcast pool
mcastpool_update_body = client_session.extract_resource_body_example('vdnMulticastPool', 'update')
mcastpool_update_body['multicastRange']['end'] = '235.3.1.1'
mcastpool_update_body['multicastRange']['name'] = 'Python'
update_response = client_session.update('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId':
create_response['objectId']},
request_body_dict=mcastpool_update_body)
client_session.view_response(update_response)
# display a specific Multicast Pool
get_mcast_pool = client_session.read('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId':
create_response['objectId']})
client_session.view_response(get_mcast_pool)
# Delete new mcast pool
client_session.delete('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId': create_response['objectId']})
#test_segment_pools()
#test_mcast_pools()<|fim▁end|> | segments_create_body['segmentRange']['begin'] = '11002'
segments_create_body['segmentRange']['end'] = '11003'
segments_create_body['segmentRange']['name'] = 'legacy' |
<|file_name|>serve.ts<|end_file_name|><|fim▁begin|>import { task } from 'gulp';
import { join } from 'path';
import { config } from '../utils/config';
import { sequenceTask } from '../utils/sequence-task';
import { readFileSync, writeFileSync } from 'fs';
const serve = require('browser-sync');
const webpack = require('webpack');
const webpackDevMiddelware = require('webpack-dev-middleware');
const webpackHotMiddelware = require('webpack-hot-middleware');
const proxyMiddleware = require('http-proxy-middleware');
const helper = require('../../../config/helper');
const interceptor = require('../../../config/interceptor');
const devConfigPath = join(config.webpackConfigPath, 'webpack.dev');
const prodConfigPath = join(config.webpackConfigPath, 'webpack.prod');
task('serve', sequenceTask('clean', 'docs', ':serve'));
task(':serve', () => {
const devConfig = require(devConfigPath);
const appEntry = devConfig.entry.app;
devConfig.entry.app = [
'webpack-hot-middleware/client?noInfo=true&reload=true',
...appEntry
];
const proxyConfig = helper.getProxyConfig();
let target = proxyConfig.host;
if (proxyConfig.port) {
target = target += ':' + proxyConfig.port + '/';
}
if (proxyConfig.path) {
target = target + proxyConfig.path;
}
const compiler = webpack(devConfig);
serve({
port: process.env.PORT || 9009,
open: true,
server: { baseDir: config.appPath },
middleware: [
helper.isProxy()
? proxyMiddleware(proxyConfig.prefix, { target })
: interceptor,
webpackDevMiddelware(compiler, {
stats: {
chunks: false,
modules: false
},
publicPath: devConfig.output.publicPath
}),
webpackHotMiddelware(compiler)
]
});
// 监听模拟数据改变,自动刷新
// serve.watch(root + '/mock/**/*.js').on('change', serve.reload);
// serve.watch(root + '/index.html').on('change', serve.reload);
});
task('build:demo', sequenceTask('docs', 'build:demo:webpack', 'build:replace:basehref'));
task('build:demo:webpack', (cb?: Function) => {
let buildConfig = require(prodConfigPath);
if (helper.isDev()) {
buildConfig = require(devConfigPath);
}
webpack(buildConfig, (err: any, stats: any) => {
if (err) {<|fim▁hole|> console.log('webpack', err);
}
console.log('[webpack]', stats.toString({
chunks: false,
errorDetails: true
}));
if (cb) {
cb();
}
});
});
task('build:replace:basehref', () => {
const docsIndex = join(config.appPath, '../docs/index.html');
let indexContent = readFileSync(docsIndex, 'utf-8');
indexContent = indexContent.replace('base href="/"', 'base href="/measure/"');
writeFileSync(docsIndex, indexContent, 'utf-8');
});<|fim▁end|> | |
<|file_name|>id_mapping.py<|end_file_name|><|fim▁begin|>id_mappings = {
"EX1_097": "Abomination",
"CS2_188": "Abusive Sergeant",
"EX1_007": "Acolyte of Pain",
"NEW1_010": "Al'Akir the Windlord",
"EX1_006": "Alarm-o-Bot",
"EX1_382": "Aldor Peacekeeper",
"EX1_561": "Alexstrasza",
"EX1_393": "Amani Berserker",
"CS2_038": "Ancestral Spirit",
"EX1_057": "Ancient Brewmaster",
"EX1_584": "Ancient Mage",
"NEW1_008b": "Ancient Secrets",
"NEW1_008a": "Ancient Teachings",
"EX1_045": "Ancient Watcher",
"NEW1_008": "Ancient of Lore",
"EX1_178": "Ancient of War",
"EX1_009": "Angry Chicken",
"EX1_398": "Arathi Weaponsmith",
"EX1_089": "Arcane Golem",
"EX1_559": "Archmage Antonidas",
"EX1_067": "Argent Commander",
"EX1_362": "Argent Protector",
"EX1_008": "Argent Squire",
"EX1_402": "Armorsmith",
"EX1_383t": "Ashbringer",
"EX1_591": "Auchenai Soulpriest",
"EX1_384": "Avenging Wrath",
"EX1_284": "Azure Drake",
"EX1_110t": "Baine Bloodhoof",
"EX1_014t": "Bananas",
"EX1_320": "Bane of Doom",
"EX1_249": "Baron Geddon",
"EX1_398t": "Battle Axe",
"EX1_392": "Battle Rage",
"EX1_165b": "Bear Form",
"EX1_549": "Bestial Wrath",
"EX1_126": "Betrayal",
"EX1_005": "Big Game Hunter",
"EX1_570": "Bite",
"CS2_233": "Blade Flurry",
"EX1_355": "Blessed Champion",
"EX1_363": "Blessing of Wisdom",
"CS2_028": "Blizzard",
"EX1_323w": "Blood Fury",
"CS2_059": "Blood Imp",
"EX1_590": "Blood Knight",
"EX1_012": "Bloodmage Thalnos",
"NEW1_025": "Bloodsail Corsair",
"NEW1_018": "Bloodsail Raider",
"EX1_407": "Brawl",
"EX1_091": "Cabal Shadow Priest",
"EX1_110": "Cairne Bloodhoof",
"NEW1_024": "Captain Greenskin",
"EX1_165a": "Cat Form",
"EX1_573": "Cenarius",
"EX1_621": "Circle of Healing",
"CS2_073": "Cold Blood",
"EX1_050": "Coldlight Oracle",
"EX1_103": "Coldlight Seer",
"NEW1_036": "Commanding Shout",
"EX1_128": "Conceal",
"EX1_275": "Cone of Cold",
"EX1_287": "Counterspell",
"EX1_059": "Crazed Alchemist",
"EX1_603": "Cruel Taskmaster",
"EX1_595": "Cult Master",
"skele21": "Damaged Golem",
"EX1_046": "Dark Iron Dwarf",
"EX1_617": "Deadly Shot",
"NEW1_030": "Deathwing",
"EX1_130a": "Defender",
"EX1_093": "Defender of Argus",
"EX1_131t": "Defias Bandit",
"EX1_131": "Defias Ringleader",
"EX1_573a": "Demigod's Favor",
"EX1_102": "Demolisher",
"EX1_596": "Demonfire",
"EX1_tk29": "Devilsaur",
"EX1_162": "Dire Wolf Alpha",
"EX1_166b": "Dispel",
"EX1_349": "Divine Favor",
"EX1_310": "Doomguard",
"EX1_567": "Doomhammer",
"NEW1_021": "Doomsayer",
"NEW1_022": "Dread Corsair",
"DREAM_04": "Dream",
"EX1_165t2": "Druid of the Claw (bear)",
"EX1_165": "Druid of the Claw",
"EX1_165t1": "Druid of the Claw (cat)",
"EX1_243": "Dust Devil",
"EX1_536": "Eaglehorn Bow",
"EX1_250": "Earth Elemental",
"EX1_245": "Earth Shock",
"CS2_117": "Earthen Ring Farseer",
"EX1_613": "Edwin VanCleef",
"DREAM_03": "Emerald Drake",
"EX1_170": "Emperor Cobra",
"EX1_619": "Equality",
"EX1_274": "Ethereal Arcanist",
"EX1_124": "Eviscerate",
"EX1_537": "Explosive Shot",
"EX1_610": "Explosive Trap",
"EX1_132": "Eye for an Eye",<|fim▁hole|> "NEW1_023": "Faerie Dragon",
"CS2_053": "Far Sight",
"EX1_301": "Felguard",
"CS1_069": "Fen Creeper",
"EX1_248": "Feral Spirit",
"EX1_finkle": "Finkle Einhorn",
"EX1_319": "Flame Imp",
"EX1_614t": "Flame of Azzinoth",
"EX1_544": "Flare",
"tt_004": "Flesheating Ghoul",
"EX1_571": "Force of Nature",
"EX1_251": "Forked Lightning",
"EX1_611": "Freezing Trap",
"EX1_283": "Frost Elemental",
"EX1_604": "Frothing Berserker",
"EX1_095": "Gadgetzan Auctioneer",
"DS1_188": "Gladiator's Longbow",
"NEW1_040t": "Gnoll",
"EX1_411": "Gorehowl",
"EX1_414": "Grommash Hellscream",
"NEW1_038": "Gruul",
"EX1_558": "Harrison Jones",
"EX1_556": "Harvest Golem",
"EX1_137": "Headcrack",
"EX1_409t": "Heavy Axe",
"NEW1_040": "Hogger",
"EX1_624": "Holy Fire",
"EX1_365": "Holy Wrath",
"EX1_538t": "Hound",
"NEW1_017": "Hungry Crab",
"EX1_534t": "Hyena",
"EX1_289": "Ice Barrier",
"EX1_295": "Ice Block",
"CS2_031": "Ice Lance",
"EX1_614": "Illidan Stormrage",
"EX1_598": "Imp",
"EX1_597": "Imp Master",
"EX1_tk34": "Infernal",
"CS2_181": "Injured Blademaster",
"CS1_129": "Inner Fire",
"EX1_607": "Inner Rage",
"CS2_203": "Ironbeak Owl",
"EX1_017": "Jungle Panther",
"EX1_166": "Keeper of the Grove",
"NEW1_005": "Kidnapper",
"EX1_543": "King Krush",
"EX1_014": "King Mukla",
"EX1_612": "Kirin Tor Mage",
"NEW1_019": "Knife Juggler",
"DREAM_01": "Laughing Sister",
"EX1_241": "Lava Burst",
"EX1_354": "Lay on Hands",
"EX1_160b": "Leader of the Pack",
"EX1_116": "Leeroy Jenkins",
"EX1_029": "Leper Gnome",
"EX1_238": "Lightning Bolt",
"EX1_259": "Lightning Storm",
"EX1_335": "Lightspawn",
"EX1_001": "Lightwarden",
"EX1_341": "Lightwell",
"EX1_096": "Loot Hoarder",
"EX1_323": "Lord Jaraxxus",
"EX1_100": "Lorewalker Cho",
"EX1_082": "Mad Bomber",
"EX1_563": "Malygos",
"EX1_055": "Mana Addict",
"EX1_575": "Mana Tide Totem",
"EX1_616": "Mana Wraith",
"NEW1_012": "Mana Wyrm",
"EX1_155": "Mark of Nature",
"EX1_155b": "Mark of Nature",
"EX1_155a": "Mark of Nature",
"EX1_626": "Mass Dispel",
"NEW1_037": "Master Swordsmith",
"NEW1_014": "Master of Disguise",
"NEW1_029": "Millhouse Manastorm",
"EX1_085": "Mind Control Tech",
"EX1_345": "Mindgames",
"EX1_294": "Mirror Entity",
"EX1_533": "Misdirection",
"EX1_396": "Mogu'shan Warden",
"EX1_620": "Molten Giant",
"EX1_166a": "Moonfire",
"EX1_408": "Mortal Strike",
"EX1_105": "Mountain Giant",
"EX1_509": "Murloc Tidecaller",
"EX1_507": "Murloc Warleader",
"EX1_557": "Nat Pagle",
"EX1_161": "Naturalize",
"DREAM_05": "Nightmare",
"EX1_130": "Noble Sacrifice",
"EX1_164b": "Nourish",
"EX1_164a": "Nourish",
"EX1_164": "Nourish",
"EX1_560": "Nozdormu",
"EX1_562": "Onyxia",
"EX1_160t": "Panther",
"EX1_522": "Patient Assassin",
"EX1_133": "Perdition's Blade",
"EX1_076": "Pint-Sized Summoner",
"EX1_313": "Pit Lord",
"EX1_316": "Power Overwhelming",
"EX1_160": "Power of the Wild",
"EX1_145": "Preparation",
"EX1_583": "Priestess of Elune",
"EX1_350": "Prophet Velen",
"EX1_279": "Pyroblast",
"EX1_044": "Questing Adventurer",
"EX1_412": "Raging Worgen",
"EX1_298": "Ragnaros the Firelord",
"CS2_104": "Rampage",
"CS2_161": "Ravenholdt Assassin",
"EX1_136": "Redemption",
"EX1_379": "Repentance",
"EX1_178a": "Rooted",
"EX1_134": "SI:7 Agent",
"EX1_578": "Savagery",
"EX1_534": "Savannah Highmane",
"EX1_020": "Scarlet Crusader",
"EX1_531": "Scavenging Hyena",
"EX1_586": "Sea Giant",
"EX1_080": "Secretkeeper",
"EX1_317": "Sense Demons",
"EX1_334": "Shadow Madness",
"EX1_345t": "Shadow of Nothing",
"EX1_303": "Shadowflame",
"EX1_625": "Shadowform",
"EX1_144": "Shadowstep",
"EX1_573b": "Shan'do's Lesson",
"EX1_410": "Shield Slam",
"EX1_405": "Shieldbearer",
"EX1_332": "Silence",
"CS2_151": "Silver Hand Knight",
"EX1_023": "Silvermoon Guardian",
"EX1_309": "Siphon Soul",
"EX1_391": "Slam",
"EX1_554t": "Snake",
"EX1_554": "Snake Trap",
"EX1_609": "Snipe",
"EX1_608": "Sorcerer's Apprentice",
"EX1_158": "Soul of the Forest",
"NEW1_027": "Southsea Captain",
"CS2_146": "Southsea Deckhand",
"tt_010a": "Spellbender (minion)",
"tt_010": "Spellbender",
"EX1_048": "Spellbreaker",
"EX1_tk11": "Spirit Wolf",
"CS2_221": "Spiteful Smith",
"CS2_152": "Squire",
"EX1_tk28": "Squirrel",
"NEW1_041": "Stampeding Kodo",
"NEW1_007a": "Starfall",
"NEW1_007b": "Starfall",
"NEW1_007": "Starfall",
"EX1_247": "Stormforged Axe",
"EX1_028": "Stranglethorn Tiger",
"EX1_160a": "Summon a Panther",
"EX1_315": "Summoning Portal",
"EX1_058": "Sunfury Protector",
"EX1_032": "Sunwalker",
"EX1_366": "Sword of Justice",
"EX1_016": "Sylvanas Windrunner",
"EX1_390": "Tauren Warrior",
"EX1_623": "Temple Enforcer",
"EX1_577": "The Beast",
"EX1_002": "The Black Knight",
"EX1_339": "Thoughtsteal",
"EX1_021": "Thrallmar Farseer",
"EX1_083": "Tinkmaster Overspark",
"EX1_383": "Tirion Fordring",
"EX1_tk9": "Treant (charge)",
"EX1_573t": "Treant (taunt)",
"EX1_158t": "Treant",
"EX1_043": "Twilight Drake",
"EX1_312": "Twisting Nether",
"EX1_258": "Unbound Elemental",
"EX1_538": "Unleash the Hounds",
"EX1_409": "Upgrade!",
"EX1_178b": "Uproot",
"EX1_594": "Vaporize",
"CS2_227": "Venture Co. Mercenary",
"NEW1_026t": "Violet Apprentice",
"NEW1_026": "Violet Teacher",
"EX1_304": "Void Terror",
"ds1_whelptoken": "Whelp",
"EX1_116t": "Whelp",
"NEW1_020": "Wild Pyromancer",
"EX1_033": "Windfury Harpy",
"CS2_231": "Wisp",
"EX1_010": "Worgen Infiltrator",
"EX1_317t": "Worthless Imp",
"EX1_154b": "Wrath",
"EX1_154a": "Wrath",
"EX1_154": "Wrath",
"CS2_169": "Young Dragonhawk",
"EX1_004": "Young Priestess",
"EX1_049": "Youthful Brewmaster",
"EX1_572": "Ysera",
"DREAM_02": "Ysera Awakens",
"EX1_066": "Acidic Swamp Ooze",
"CS2_041": "Ancestral Healing",
"NEW1_031": "Animal Companion",
"CS2_025": "Arcane Explosion",
"CS2_023": "Arcane Intellect",
"EX1_277": "Arcane Missiles",
"DS1_185": "Arcane Shot",
"CS2_112": "Arcanite Reaper",
"CS2_155": "Archmage",
"CS2_080": "Assassin's Blade",
"CS2_076": "Assassinate",
"GAME_002": "Avatar of the Coin",
"CS2_072": "Backstab",
"CS2_092": "Blessing of Kings",
"CS2_087": "Blessing of Might",
"CS2_172": "Bloodfen Raptor",
"CS2_046": "Bloodlust",
"CS2_173": "Bluegill Warrior",
"CS2_boar": "Boar",
"CS2_187": "Booty Bay Bodyguard",
"CS2_200": "Boulderfist Ogre",
"CS2_103": "Charge",
"CS2_182": "Chillwind Yeti",
"CS2_005": "Claw",
"CS2_114": "Cleave",
"CS2_093": "Consecration",
"CS2_201": "Core Hound",
"CS2_063": "Corruption",
"EX1_582": "Dalaran Mage",
"DS1_055": "Darkscale Healer",
"CS2_074": "Deadly Poison",
"CS2_236": "Divine Spirit",
"EX1_025": "Dragonling Mechanic",
"CS2_061": "Drain Life",
"CS2_064": "Dread Infernal",
"CS2_189": "Elven Archer",
"CS2_013t": "Excess Mana",
"CS2_108": "Execute",
"EX1_129": "Fan of Knives",
"CS2_106": "Fiery War Axe",
"CS2_042": "Fire Elemental",
"CS2_029": "Fireball",
"CS2_032": "Flamestrike",
"EX1_565": "Flametongue Totem",
"hexfrog": "Frog",
"CS2_026": "Frost Nova",
"CS2_037": "Frost Shock",
"CS2_024": "Frostbolt",
"CS2_121": "Frostwolf Grunt",
"CS2_226": "Frostwolf Warlord",
"CS2_147": "Gnomish Inventor",
"CS1_042": "Goldshire Footman",
"EX1_508": "Grimscale Oracle",
"CS2_088": "Guardian of Kings",
"EX1_399": "Gurubashi Berserker",
"CS2_094": "Hammer of Wrath",
"EX1_371": "Hand of Protection",
"NEW1_009": "Healing Totem",
"CS2_007": "Healing Touch",
"CS2_062": "Hellfire",
"CS2_105": "Heroic Strike",
"EX1_246": "Hex",
"CS2_089": "Holy Light",
"CS1_112": "Holy Nova",
"CS1_130": "Holy Smite",
"DS1_070": "Houndmaster",
"NEW1_034": "Huffer",
"EX1_360": "Humility",
"CS2_084": "Hunter's Mark",
"EX1_169": "Innervate",
"CS2_232": "Ironbark Protector",
"CS2_141": "Ironforge Rifleman",
"CS2_125": "Ironfur Grizzly",
"EX1_539": "Kill Command",
"CS2_142": "Kobold Geomancer",
"NEW1_011": "Kor'kron Elite",
"NEW1_033": "Leokk",
"CS2_091": "Light's Justice",
"CS2_162": "Lord of the Arena",
"CS2_118": "Magma Rager",
"CS2_009": "Mark of the Wild",
"EX1_025t": "Mechanical Dragonling",
"DS1_233": "Mind Blast",
"CS1_113": "Mind Control",
"CS2_003": "Mind Vision",
"CS2_mirror": "Mirror Image (minion)",
"CS2_027": "Mirror Image",
"NEW1_032": "Misha",
"CS2_008": "Moonfire",
"EX1_302": "Mortal Coil",
"DS1_183": "Multi-Shot",
"CS2_168": "Murloc Raider",
"EX1_506a": "Murloc Scout",
"EX1_506": "Murloc Tidehunter",
"GAME_006": "NOOOOOOOOOOOO",
"EX1_593": "Nightblade",
"CS2_235": "Northshire Cleric",
"EX1_015": "Novice Engineer",
"CS2_119": "Oasis Snapjaw",
"CS2_197": "Ogre Magi",
"CS2_022": "Polymorph",
"CS2_004": "Power Word: Shield",
"CS2_122": "Raid Leader",
"CS2_196": "Razorfen Hunter",
"CS2_213": "Reckless Rocketeer",
"CS2_120": "River Crocolisk",
"CS2_045": "Rockbiter Weapon",
"NEW1_003": "Sacrificial Pact",
"EX1_581": "Sap",
"CS2_011": "Savage Roar",
"CS2_050": "Searing Totem",
"CS2_179": "Sen'jin Shieldmasta",
"CS2_057": "Shadow Bolt",
"EX1_622": "Shadow Word: Death",
"CS2_234": "Shadow Word: Pain",
"EX1_019": "Shattered Sun Cleric",
"CS2_tk1": "Sheep",
"EX1_606": "Shield Block",
"EX1_278": "Shiv",
"CS2_101t": "Silver Hand Recruit",
"CS2_127": "Silverback Patriarch",
"CS2_075": "Sinister Strike",
"skele11": "Skeleton",
"EX1_308": "Soulfire",
"CS2_077": "Sprint",
"EX1_173": "Starfire",
"CS2_237": "Starving Buzzard",
"CS2_051": "Stoneclaw Totem",
"CS2_171": "Stonetusk Boar",
"CS2_150": "Stormpike Commando",
"CS2_222": "Stormwind Champion",
"CS2_131": "Stormwind Knight",
"EX1_306": "Succubus",
"CS2_012": "Swipe",
"GAME_005": "The Coin",
"DS1_175": "Timber Wolf",
"EX1_244": "Totemic Might",
"DS1_184": "Tracking",
"CS2_097": "Truesilver Champion",
"DS1_178": "Tundra Rhino",
"NEW1_004": "Vanish",
"CS2_065": "Voidwalker",
"EX1_011": "Voodoo Doctor",
"CS2_186": "War Golem",
"EX1_084": "Warsong Commander",
"CS2_033": "Water Elemental",
"EX1_400": "Whirlwind",
"CS2_082": "Wicked Knife",
"CS2_013": "Wild Growth",
"CS2_039": "Windfury",
"EX1_587": "Windspeaker",
"CS2_124": "Wolfrider",
"CS2_052": "Wrath of Air Totem",
"FP1_026": "Anub'ar Ambusher",
"FP1_020": "Avenge",
"FP1_031": "Baron Rivendare",
"FP1_029": "Dancing Swords",
"FP1_023": "Dark Cultist",
"FP1_021": "Death's Bite",
"NAX6_03": "Deathbloom",
"FP1_006": "Deathcharger",
"FP1_009": "Deathlord",
"FP1_018": "Duplicate",
"FP1_003": "Echoing Ooze",
"NAX12_04": "Enrage",
"NAX11_03": "Fallout Slime",
"NAX13_04H": "Feugen",
"FP1_015": "Feugen",
"NAX14_03": "Frozen Champion",
"NAX15_03t": "Guardian of Icecrown",
"NAX15_03n": "Guardian of Icecrown",
"FP1_002": "Haunted Creeper",
"NAX10_02": "Hook",
"NAX10_02H": "Hook",
"NAX12_03": "Jaws",
"NAX12_03H": "Jaws",
"FP1_013": "Kel'Thuzad",
"NAX9_02H": "Lady Blaumeux",
"NAX9_02": "Lady Blaumeux",
"FP1_030": "Loatheb",
"NAX1_05": "Locust Swarm",
"FP1_004": "Mad Scientist",
"FP1_010": "Maexxna",
"NAX9_07": "Mark of the Horsemen",
"NAX7_04H": "Massive Runeblade",
"NAX7_04": "Massive Runeblade",
"NAX7_05": "Mind Control Crystal",
"NAX5_03": "Mindpocalypse",
"NAX15_05": "Mr. Bigglesworth",
"NAX11_04": "Mutating Injection",
"NAXM_001": "Necroknight",
"NAX3_03": "Necrotic Poison",
"FP1_017": "Nerub'ar Weblord",
"NAX1h_03": "Nerubian (normal)",
"NAX1_03": "Nerubian (heroic)",
"FP1_007t": "Nerubian",
"FP1_007": "Nerubian Egg",
"NAX4_05": "Plague",
"FP1_019": "Poison Seeds",
"NAX14_04": "Pure Cold",
"FP1_025": "Reincarnate",
"NAX9_05H": "Runeblade",
"NAX9_05": "Runeblade",
"FP1_005": "Shade of Naxxramas",
"NAX9_04": "Sir Zeliek",
"NAX9_04H": "Sir Zeliek",
"NAXM_002": "Skeletal Smith",
"NAX4_03H": "Skeleton",
"NAX4_03": "Skeleton",
"FP1_012t": "Slime",
"FP1_012": "Sludge Belcher",
"FP1_008": "Spectral Knight",
"NAX8_05t": "Spectral Rider",
"FP1_002t": "Spectral Spider",
"NAX8_03t": "Spectral Trainee",
"NAX8_04t": "Spectral Warrior",
"NAX6_03t": "Spore",
"NAX6_04": "Sporeburst",
"NAX13_05H": "Stalagg",
"FP1_014": "Stalagg",
"FP1_027": "Stoneskin Gargoyle",
"NAX13_03": "Supercharge",
"FP1_014t": "Thaddius",
"NAX9_03H": "Thane Korth'azz",
"NAX9_03": "Thane Korth'azz",
"FP1_019t": "Treant (poison seeds)",
"NAX7_02": "Understudy",
"FP1_028": "Undertaker",
"NAX8_05": "Unrelenting Rider",
"NAX8_03": "Unrelenting Trainee",
"NAX8_04": "Unrelenting Warrior",
"FP1_024": "Unstable Ghoul",
"FP1_022": "Voidcaller",
"FP1_016": "Wailing Soul",
"FP1_011": "Webspinner",
"NAX2_05": "Worshipper",
"NAX2_05H": "Worshipper",
"FP1_001": "Zombie Chow",
"GVG_029": "Ancestor's Call",
"GVG_077": "Anima Golem",
"GVG_085": "Annoy-o-Tron",
"GVG_030": "Anodized Robo Cub",
"GVG_069": "Antique Healbot",
"GVG_091": "Arcane Nullifier X-21",
"PART_001": "Armor Plating",
"GVG_030a": "Attack Mode",
"GVG_119": "Blingtron 3000",
"GVG_063": "Bolvar Fordragon",
"GVG_099": "Bomb Lobber",
"GVG_110t": "Boom Bot",
"GVG_050": "Bouncing Blade",
"GVG_068": "Burly Rockjaw Trogg",
"GVG_056t": "Burrowing Mine",
"GVG_017": "Call Pet",
"GVG_092t": "Chicken (Gnomish Experimenter)",
"GVG_121": "Clockwork Giant",
"GVG_082": "Clockwork Gnome",
"GVG_062": "Cobalt Guardian",
"GVG_073": "Cobra Shot",
"GVG_059": "Coghammer",
"GVG_013": "Cogmaster",
"GVG_024": "Cogmaster's Wrench",
"GVG_038": "Crackle",
"GVG_052": "Crush",
"GVG_041": "Dark Wispers",
"GVG_041b": "Dark Wispers",
"GVG_041a": "Dark Wispers",
"GVG_015": "Darkbomb",
"GVG_019": "Demonheart",
"GVG_110": "Dr. Boom",
"GVG_080t": "Druid of the Fang (cobra)",
"GVG_080": "Druid of the Fang",
"GVG_066": "Dunemaul Shaman",
"GVG_005": "Echo of Medivh",
"PART_005": "Emergency Coolant",
"GVG_107": "Enhance-o Mechano",
"GVG_076": "Explosive Sheep",
"GVG_026": "Feign Death",
"GVG_020": "Fel Cannon",
"GVG_016": "Fel Reaver",
"PART_004": "Finicky Cloakfield",
"GVG_007": "Flame Leviathan",
"GVG_001": "Flamecannon",
"GVG_100": "Floating Watcher",
"GVG_084": "Flying Machine",
"GVG_113": "Foe Reaper 4000",
"GVG_079": "Force-Tank MAX",
"GVG_049": "Gahz'rilla",
"GVG_028t": "Gallywix's Coin",
"GVG_117": "Gazlowe",
"GVG_032b": "Gift of Cards",
"GVG_032a": "Gift of Mana",
"GVG_081": "Gilblin Stalker",
"GVG_043": "Glaivezooka",
"GVG_098": "Gnomeregan Infantry",
"GVG_092": "Gnomish Experimenter",
"GVG_023": "Goblin Auto-Barber",
"GVG_004": "Goblin Blastmage",
"GVG_095": "Goblin Sapper",
"GVG_032": "Grove Tender",
"GVG_120": "Hemet Nesingwary",
"GVG_104": "Hobgoblin",
"GVG_089": "Illuminator",
"GVG_045t": "Imp (warlock)",
"GVG_045": "Imp-losion",
"GVG_056": "Iron Juggernaut",
"GVG_027": "Iron Sensei",
"GVG_094": "Jeeves",
"GVG_106": "Junkbot",
"GVG_074": "Kezan Mystic",
"GVG_046": "King of Beasts",
"GVG_012": "Light of the Naaru",
"GVG_008": "Lightbomb",
"GVG_097": "Lil' Exorcist",
"GVG_071": "Lost Tallstrider",
"GVG_090": "Madder Bomber",
"GVG_021": "Mal'Ganis",
"GVG_035": "Malorne",
"GVG_034": "Mech-Bear-Cat",
"GVG_078": "Mechanical Yeti",
"GVG_006": "Mechwarper",
"GVG_116": "Mekgineer Thermaplugg",
"GVG_048": "Metaltooth Leaper",
"GVG_103": "Micro Machine",
"GVG_111": "Mimiron's Head",
"GVG_109": "Mini-Mage",
"GVG_018": "Mistress of Pain",
"GVG_112": "Mogor the Ogre",
"GVG_061": "Muster for Battle",
"GVG_042": "Neptulon",
"GVG_065": "Ogre Brute",
"GVG_088": "Ogre Ninja",
"GVG_054": "Ogre Warmaul",
"GVG_025": "One-eyed Cheat",
"GVG_096": "Piloted Shredder",
"GVG_105": "Piloted Sky Golem",
"GVG_036": "Powermace",
"GVG_064": "Puddlestomper",
"GVG_060": "Quartermaster",
"GVG_108": "Recombobulator",
"GVG_031": "Recycle",
"PART_006": "Reversing Switch",
"PART_003": "Rusty Horn",
"GVG_047": "Sabotage",
"GVG_070": "Salty Dog",
"GVG_101": "Scarlet Purifier",
"GVG_055": "Screwjank Clunker",
"GVG_057": "Seal of Light",
"GVG_009": "Shadowbomber",
"GVG_072": "Shadowboxer",
"GVG_058": "Shielded Minibot",
"GVG_053": "Shieldmaiden",
"GVG_075": "Ship's Cannon",
"GVG_011": "Shrinkmeister",
"GVG_086": "Siege Engine",
"GVG_040": "Siltfin Spiritwalker",
"GVG_114": "Sneed's Old Shredder",
"GVG_002": "Snowchugger",
"GVG_123": "Soot Spewer",
"GVG_044": "Spider Tank",
"GVG_087": "Steamwheedle Sniper",
"GVG_067": "Stonesplinter Trogg",
"GVG_030b": "Tank Mode",
"GVG_093": "Target Dummy",
"PART_002": "Time Rewinder",
"GVG_022": "Tinker's Sharpsword Oil",
"GVG_102": "Tinkertown Technician",
"GVG_115": "Toshley",
"GVG_028": "Trade Prince Gallywix",
"GVG_033": "Tree of Life",
"GVG_118": "Troggzor the Earthinator",
"GVG_003": "Unstable Portal",
"GVG_083": "Upgraded Repair Bot",
"GVG_111t": "V-07-TR-0N",
"GVG_010": "Velen's Chosen",
"GVG_039": "Vitality Totem",
"GVG_014": "Vol'jin",
"GVG_051": "Warbot",
"GVG_122": "Wee Spellstopper",
"PART_007": "Whirling Blades",
"GVG_037": "Whirling Zap-o-matic",
"NEW1_016": "Captain's Parrot",
"EX1_062": "Old Murk-Eye",
"Mekka4t": "Chicken",
"PRO_001": "Elite Tauren Chieftain",
"Mekka3": "Emboldener 3000",
"EX1_112": "Gelbin Mekkatorque",
"Mekka1": "Homing Chicken",
"PRO_001a": "I Am Murloc",
"PRO_001at": "Murloc",
"Mekka4": "Poultryizer",
"PRO_001c": "Power of the Horde",
"Mekka2": "Repair Bot",
"PRO_001b": "Rogues Do It...",
"BRM_016": "Axe Flinger",
"BRM_034": "Blackwing Corruptor",
"BRM_033": "Blackwing Technician",
"BRM_031": "Chromaggus",
"BRM_014": "Core Rager",
"BRM_008": "Dark Iron Skulker",
"BRM_005": "Demonwrath",
"BRM_018": "Dragon Consort",
"BRM_022": "Dragon Egg",
"BRM_003": "Dragon's Breath",
"BRM_020": "Dragonkin Sorcerer",
"BRM_024": "Drakonid Crusher",
"BRM_010": "Druid of the Flame",
"BRM_028": "Emperor Thaurissan",
"BRM_012": "Fireguard Destroyer",
"BRM_002": "Flamewaker",
"BRM_007": "Gang Up",
"BRM_019": "Grim Patron",
"BRM_026": "Hungry Dragon",
"BRM_006": "Imp Gang Boss",
"BRM_011": "Lava Shock",
"BRM_027": "Majordomo Executus",
"BRM_030": "Nefarian",
"BRM_013": "Quick Shot",
"BRM_029": "Rend Blackhand",
"BRM_017": "Resurrect",
"BRM_015": "Revenge",
"BRM_001": "Solemn Vigil",
"BRM_004": "Twilight Whelp",
"BRM_025": "Volcanic Drake",
"BRM_009": "Volcanic Lumberer",
}<|fim▁end|> | "EX1_564": "Faceless Manipulator", |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>define(['app', 'directives/search/search'], function() { 'use strict';<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Author: Vova Zaytsev <[email protected]>
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nlcd.settings")<|fim▁hole|><|fim▁end|> |
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application() |
<|file_name|>crawler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#encoding:utf-8
import os
import sys
import requests
import MySQLdb
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
if len(sys.argv) != 4:
print 'Invalid parameters!'
exit(1)
print '=' * 60
print 'start:', sys.argv
aim_category_id = int(sys.argv[1])
start_point = (int(sys.argv[2]), int(sys.argv[3]))
immediate_download = False
base_url = 'http://www.3che.com'
session = requests.Session()
username = ''
password = ''
record = {
'category': '',
'detail_category': '',
'post_url': '',
'filename': '',
'url': ''
}
sql_cnt = 0
connection = None
cursor = None
def record_to_mysql():
global sql_cnt, connection, cursor
if sql_cnt % 20 == 0:
if connection:
connection.commit()
connection.close()
cursor.close()
connection = MySQLdb.connect(host='', user='', passwd='', db='', port=3306, charset='utf8')
cursor = connection.cursor()
sql_cnt += 1
cursor.execute('insert into san_che(`category`, `detail_category`, `post_url`, `filename`, `url`) values (%s, %s, %s, %s, %s)',
(record['category'], record['detail_category'], record['post_url'], record['filename'], record['url']))
def login():<|fim▁hole|> login_path = '/member.php?mod=logging&action=login&loginsubmit=yes&infloat=yes&lssubmit=yes&inajax=1'
session.post(base_url + login_path, {'username': username, 'password': password})
def enter_directory(name):
if immediate_download:
if not os.path.exists(name):
os.mkdir(name)
os.chdir(name)
def get_soup(url, parse_only=None):
text = session.get(url).text
return BeautifulSoup(text, 'lxml', parse_only=parse_only)
def download_file(url, filename):
print 'Downloading:', filename, '=>', url
record['url'] = url
record['filename'] = filename
if immediate_download:
with open(filename, 'w') as fp:
res = requests.get(url)
fp.write(res.content)
fp.close()
else:
record_to_mysql()
def crawl_file(url, filename):
try:
soup = get_soup(url, SoupStrainer(id='attachpayform'))
attach_form = soup.find('form', id='attachpayform')
link = attach_form.table.find_all('a')[-1]
except Exception as e:
print 'Error! file url:', url
else:
download_file(link['href'], filename)
# Crawl detail data of one post.
def crawl_detail(detail_category, title, detail_url):
print '-' * 100
print 'Crawling Post:', detail_category, title, '=>', detail_url
record['detail_category'] = detail_category
record['post_url'] = detail_url
# Enter detail directory.
enter_directory(detail_category)
prefix = detail_url.rsplit('/', 1)[-1].split('.', 1)[0]
enter_directory(prefix + title)
soup = get_soup(detail_url, SoupStrainer('p', {'class': 'attnm'}))
attnms = soup.find_all('p', {'class': 'attnm'})
for attnm in attnms:
url = '{0}/{1}'.format(base_url, attnm.a['href'])
crawl_file(url, attnm.a.text.strip(u'[下载]'))
# Leave detail directory.
if immediate_download:
os.chdir('../..')
# Crawl data of one category.
def crawl_category(category, list_url):
print '=' * 100
print 'Crawling category:', category, '=>', list_url
record['category'] = category
# Create corresponding directory and enter.
enter_directory(category)
cur_page_id = 0
url = list_url
while url is not None:
cur_page_id += 1
print 'Crawling page url:', url
soup = get_soup(url, SoupStrainer('span'))
xsts = soup.find_all('span', {'class': 'xst'})
if cur_page_id >= start_point[0]:
cur_in_page_id = 0
for xst in xsts:
cur_in_page_id += 1
detail = xst.find('a', {'class': 'xst'})
if cur_page_id > start_point[0] or cur_in_page_id >= start_point[1]:
crawl_detail(xst.em and xst.em.a.text or '', detail.text, detail['href'])
page_footer = soup.find('span', id='fd_page_top')
next_link = page_footer.label.next_sibling
if next_link is not None:
url = next_link['href']
else:
url = None
# Leave the directory.
if immediate_download:
os.chdir('..')
if __name__ == '__main__':
login()
# Extract categories from home page.
soup = get_soup(base_url, SoupStrainer(id='nv'))
category_lis = soup.find('div', id='nv').ul.find_all('li')
categories = map(lambda x: (x.a.text, x.a['href']), category_lis)
categories = filter(lambda x: x[1] != '/', categories)
crawl_category(categories[aim_category_id][0], categories[aim_category_id][1])
# for category in categories:
# crawl_category(category[0], category[1])<|fim▁end|> | |
<|file_name|>diffie_hellman.rs<|end_file_name|><|fim▁begin|>use num_bigint::{BigUint, RandBigInt};
use num_integer::Integer;
use num_traits::{One, Zero};
use once_cell::sync::Lazy;
use rand::{CryptoRng, Rng};
static DH_GENERATOR: Lazy<BigUint> = Lazy::new(|| BigUint::from_bytes_be(&[0x02]));
static DH_PRIME: Lazy<BigUint> = Lazy::new(|| {
BigUint::from_bytes_be(&[
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc9, 0x0f, 0xda, 0xa2, 0x21, 0x68, 0xc2,
0x34, 0xc4, 0xc6, 0x62, 0x8b, 0x80, 0xdc, 0x1c, 0xd1, 0x29, 0x02, 0x4e, 0x08, 0x8a, 0x67,
0xcc, 0x74, 0x02, 0x0b, 0xbe, 0xa6, 0x3b, 0x13, 0x9b, 0x22, 0x51, 0x4a, 0x08, 0x79, 0x8e,
0x34, 0x04, 0xdd, 0xef, 0x95, 0x19, 0xb3, 0xcd, 0x3a, 0x43, 0x1b, 0x30, 0x2b, 0x0a, 0x6d,
0xf2, 0x5f, 0x14, 0x37, 0x4f, 0xe1, 0x35, 0x6d, 0x6d, 0x51, 0xc2, 0x45, 0xe4, 0x85, 0xb5,
0x76, 0x62, 0x5e, 0x7e, 0xc6, 0xf4, 0x4c, 0x42, 0xe9, 0xa6, 0x3a, 0x36, 0x20, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
])
});
fn powm(base: &BigUint, exp: &BigUint, modulus: &BigUint) -> BigUint {
let mut base = base.clone();
let mut exp = exp.clone();
let mut result: BigUint = One::one();
while !exp.is_zero() {
if exp.is_odd() {
result = (result * &base) % modulus;
}
exp >>= 1;
base = (&base * &base) % modulus;
}
result
}
pub struct DhLocalKeys {
private_key: BigUint,
public_key: BigUint,
}
impl DhLocalKeys {
pub fn random<R: Rng + CryptoRng>(rng: &mut R) -> DhLocalKeys {
let private_key = rng.gen_biguint(95 * 8);
let public_key = powm(&DH_GENERATOR, &private_key, &DH_PRIME);
DhLocalKeys {
private_key,
public_key,
}
}
pub fn public_key(&self) -> Vec<u8> {
self.public_key.to_bytes_be()
}<|fim▁hole|> &BigUint::from_bytes_be(remote_key),
&self.private_key,
&DH_PRIME,
);
shared_key.to_bytes_be()
}
}<|fim▁end|> |
pub fn shared_secret(&self, remote_key: &[u8]) -> Vec<u8> {
let shared_key = powm( |
<|file_name|>array.js<|end_file_name|><|fim▁begin|>define(['underscore', 'util', 'collection/abstract'],
function(_, Util, AbstractCollection)
{
'use strict';
var _parent = AbstractCollection.prototype;
var ArrayCollection = Util.extend(AbstractCollection, {
_registry: null,
create: function() {
var me = this;
_parent.create.apply(me, arguments);
me._registry = [];
},
add: function(item) {
var me = this;
if (arguments.length > 1) {
_.each(arguments, function(arg) {
me.add(arg)
});
return;
}
if (!_.contains(me._registry, item)) {
me._registry.push(item);
_parent.add.apply(me, arguments);
}
},
remove: function(item) {
var me = this;
if (arguments.length > 1) {
_.each(arguments, function(arg) {
me.remove(arg);
});
return;
}
var index = _.indexOf(me._registry, item);
if (index >= 0) {
me._registry.splice(index, 1);
_parent.remove.apply(me, arguments);
}
},
each: function(iterator) {
_.each(this._registry, iterator);
},
some: function(iterator) {
return _.some(this._registry, iterator);
},
count: function() {
return this._registry.length;
},
getAll: function() {<|fim▁hole|>
return ArrayCollection;
});<|fim▁end|> | return this._map.slice();
}
}); |
<|file_name|>FormGroupSize.java<|end_file_name|><|fim▁begin|>/*
* #%L
* Diana UI Core
* %%
* Copyright (C) 2014 Diana UI
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.dianaui.universal.core.client.ui.constants;
import com.dianaui.universal.core.client.ui.base.helper.EnumHelper;
import com.google.gwt.dom.client.Style;
/**
* Only relevant to horizontal forms
*
* @author Xiaodong Sun
*/
public enum FormGroupSize implements Size, Style.HasCssName {
LARGE("form-group-lg"),
DEFAULT(""),
SMALL("form-group-sm"),;
private final String cssClass;
private FormGroupSize(final String cssClass) {
this.cssClass = cssClass;
}
@Override
public String getCssName() {
return cssClass;
}
public static FormGroupSize fromStyleName(final String styleName) {
return EnumHelper.fromStyleName(styleName, FormGroupSize.class, DEFAULT);
}
}<|fim▁end|> | |
<|file_name|>navtreeindex7.js<|end_file_name|><|fim▁begin|>var NAVTREEINDEX7 =
{
"classIogn_1_1DatabaseIO.html#a4d31b02871089cc417ea990591acb9e7":[4,0,64,2,9],
"classIogn_1_1DatabaseIO.html#a4f78970eb21804fb1eb863b93d174e0c":[4,0,64,2,42],
"classIogn_1_1DatabaseIO.html#a509bdb2de719b050921fd03bd207361e":[4,0,64,2,21],
"classIogn_1_1DatabaseIO.html#a567e232671c85ec139a7f32faf76c877":[4,0,64,2,10],
"classIogn_1_1DatabaseIO.html#a5904dba99e21eaf6282d37394f2a513b":[4,0,64,2,54],
"classIogn_1_1DatabaseIO.html#a64a655c1a5a50cba26c3fd87ab837b56":[4,0,64,2,6],
"classIogn_1_1DatabaseIO.html#a6503f106c8686fde564c0966836f6ea3":[4,0,64,2,55],
"classIogn_1_1DatabaseIO.html#a68969af20be800a8379c5940638d837b":[4,0,64,2,18],
"classIogn_1_1DatabaseIO.html#a6d30dd13247c8da4de57d6a404c947f3":[4,0,64,2,27],
"classIogn_1_1DatabaseIO.html#a704f25730c532ac67b7b71a9675d9de7":[4,0,64,2,39],
"classIogn_1_1DatabaseIO.html#a71051cdc0000111a2d54e11f72929bc3":[4,0,64,2,53],
"classIogn_1_1DatabaseIO.html#a77caac769115f8fb21a7ebef496f3220":[4,0,64,2,23],
"classIogn_1_1DatabaseIO.html#a7a7040731f50f870721dd9803bbdcec6":[4,0,64,2,14],
"classIogn_1_1DatabaseIO.html#a7c703a53160d8860611fd55d787ace3c":[4,0,64,2,1],
"classIogn_1_1DatabaseIO.html#a7cecaf86bfedc04216c4a30697e875fd":[4,0,64,2,57],
"classIogn_1_1DatabaseIO.html#a7ddf05aad9f647150922a91999c08cd9":[4,0,64,2,11],
"classIogn_1_1DatabaseIO.html#a84a1354c5b74eafc2df1c666ecd2812d":[4,0,64,2,50],
"classIogn_1_1DatabaseIO.html#a86c7a4e4185be29afc8b4d274a08e566":[4,0,64,2,34],
"classIogn_1_1DatabaseIO.html#a8c6f6e516b509159942fdffa27d1cc9b":[4,0,64,2,46],
"classIogn_1_1DatabaseIO.html#a8e36d96d6dd77f55db2a45401e9bc425":[4,0,64,2,51],
"classIogn_1_1DatabaseIO.html#a8fb6a277f2b675197194c41db4f7bc7e":[4,0,64,2,31],
"classIogn_1_1DatabaseIO.html#a99459762d0ff7aebe486ba5052baa386":[4,0,64,2,2],
"classIogn_1_1DatabaseIO.html#aa021bc9f0a9d3b8214f8608d6abefa9c":[4,0,64,2,17],
"classIogn_1_1DatabaseIO.html#aad08ce04e22d8b5665da7b627f1379f6":[4,0,64,2,41],
"classIogn_1_1DatabaseIO.html#ab0d22387b04886ef956b3b8bf3a62c4a":[4,0,64,2,7],
"classIogn_1_1DatabaseIO.html#ab1f013f572ff7e023d6c23a27c6b51b9":[4,0,64,2,48],
"classIogn_1_1DatabaseIO.html#ab67092adab83d99fca4dffce6e4c1762":[4,0,64,2,20],
"classIogn_1_1DatabaseIO.html#ac5739ee985ef7768a9828d56b7e65133":[4,0,64,2,44],
"classIogn_1_1DatabaseIO.html#ac668eea5a603278f4029797e0ecef72b":[4,0,64,2,16],
"classIogn_1_1DatabaseIO.html#acc9d78bc61d7a1189d39c4848050b92c":[4,0,64,2,56],
"classIogn_1_1DatabaseIO.html#acfe917b575765e8045e9bce9ac83e4f3":[4,0,64,2,28],
"classIogn_1_1DatabaseIO.html#adaeda3abcf261130233d0e34db0c0024":[4,0,64,2,22],
"classIogn_1_1DatabaseIO.html#ae4a0a9b2d8458e3fc4a5496365ff4088":[4,0,64,2,38],
"classIogn_1_1DatabaseIO.html#ae4d037f0c59d8f33078c1468d9fbaf97":[4,0,64,2,0],
"classIogn_1_1DatabaseIO.html#ae56bab2a11f21796f54b70544207197a":[4,0,64,2,13],
"classIogn_1_1DatabaseIO.html#ae57de4f765f5c7446cd787fdece6ff40":[4,0,64,2,60],
"classIogn_1_1DatabaseIO.html#ae62830685d87e86bec496701000c17fa":[4,0,64,2,58],
"classIogn_1_1DatabaseIO.html#aec9e0571cc2a24aa062acaa2c23b60b8":[4,0,64,2,8],
"classIogn_1_1DatabaseIO.html#aede0b2afbf73abf2a2effffffe275d6c":[4,0,64,2,24],
"classIogn_1_1DatabaseIO.html#af218ff0bca527558d2673557f0bd40ea":[4,0,64,2,36],
"classIogn_1_1DatabaseIO.html#af3e3864087448c008a3b7d1ffd9365e0":[4,0,64,2,35],
"classIogn_1_1DatabaseIO.html#af5a0f36dfdcf3f369965bdc96db9624c":[4,0,64,2,52],
"classIogn_1_1DatabaseIO.html#af8d1d087e623fa885a0f53b865319979":[4,0,64,2,62],
"classIogn_1_1ExodusMesh.html":[4,0,64,4],
"classIogn_1_1ExodusMesh.html#a1426cd0ad97c69c7b46b2b1fb271f089":[4,0,64,4,13],
"classIogn_1_1ExodusMesh.html#a196c58a4ebc812247e66825e2ecf0e57":[4,0,64,4,25],
"classIogn_1_1ExodusMesh.html#a1f84298bd61b9fc904a2b194196a7ecd":[4,0,64,4,0],
"classIogn_1_1ExodusMesh.html#a228bc43d440665c9ffa7a971ff2a205d":[4,0,64,4,15],
"classIogn_1_1ExodusMesh.html#a2785e4f6e92f0bc147dd88c0dcac75c7":[4,0,64,4,12],
"classIogn_1_1ExodusMesh.html#a2b8f4f67766498993cde2752e223313f":[4,0,64,4,24],
"classIogn_1_1ExodusMesh.html#a55f1e2d41f7f457491c9f705668e1603":[4,0,64,4,33],
"classIogn_1_1ExodusMesh.html#a57a246a3b031b623b514a577ec0c4c90":[4,0,64,4,8],
"classIogn_1_1ExodusMesh.html#a590d018d89ba2e5799c310cbecdad4bb":[4,0,64,4,23],
"classIogn_1_1ExodusMesh.html#a5dcdfdce5b8df21e6133109fd567cd5a":[4,0,64,4,32],
"classIogn_1_1ExodusMesh.html#a60b7772a5af42b5fb26ae6b0c78863a0":[4,0,64,4,18],
"classIogn_1_1ExodusMesh.html#a6cfaef3ab8a71d133c190aa63d168146":[4,0,64,4,6],
"classIogn_1_1ExodusMesh.html#a6ec05cc82636f3eff7615210cf4b1a8c":[4,0,64,4,9],
"classIogn_1_1ExodusMesh.html#a7721c157ee2f1c84c89c012642d99a27":[4,0,64,4,22],
"classIogn_1_1ExodusMesh.html#a798037e20bd738e9fe4fbea5c25dca93":[4,0,64,4,10],
"classIogn_1_1ExodusMesh.html#a86158efe7fe9c8598f92f0fb0e31a50e":[4,0,64,4,20],
"classIogn_1_1ExodusMesh.html#aa1abff5fd86c3b561bb95cb245202227":[4,0,64,4,27],
"classIogn_1_1ExodusMesh.html#aa4105a915087d145cd9d01b9f0f5248d":[4,0,64,4,2],
"classIogn_1_1ExodusMesh.html#aa51baab6b21b7d6d1e0ce04c4337b127":[4,0,64,4,7],
"classIogn_1_1ExodusMesh.html#ab886006a170dcb74faa6ecd375c1f459":[4,0,64,4,19],
"classIogn_1_1ExodusMesh.html#abc7d9d1de0fa8c9b042b0aadd71df784":[4,0,64,4,1],
"classIogn_1_1ExodusMesh.html#ac78df6eec1ade158f8371c4fa5d30baa":[4,0,64,4,11],
"classIogn_1_1ExodusMesh.html#acfb7baea4bf762edb1076a976a75b055":[4,0,64,4,30],
"classIogn_1_1ExodusMesh.html#ad5497328356b83714c576263bb0f1356":[4,0,64,4,21],
"classIogn_1_1ExodusMesh.html#ad99efa8471084a0e2b8c168674f4e47d":[4,0,64,4,28],
"classIogn_1_1ExodusMesh.html#add86edcc62c60a937435152d5609aed7":[4,0,64,4,16],
"classIogn_1_1ExodusMesh.html#ae6f9c1b7479ec7356fda8d3ec624bdf3":[4,0,64,4,17],
"classIogn_1_1ExodusMesh.html#ae75503dc4e3be1224fc57af8172a75f7":[4,0,64,4,31],
"classIogn_1_1ExodusMesh.html#aeba5d79ab480d02e077c3d5b114f5337":[4,0,64,4,29],
"classIogn_1_1ExodusMesh.html#aebfe20a7233940c8f1bc4ec1244fec69":[4,0,64,4,4],
"classIogn_1_1ExodusMesh.html#aec704d94557fd943f21fc1a050cb6062":[4,0,64,4,14],
"classIogn_1_1ExodusMesh.html#af51819218b717ae55f90e6dd0b8b4f75":[4,0,64,4,5],
"classIogn_1_1ExodusMesh.html#afe3bb8943aeea6598b1c2f7e54f2c532":[4,0,64,4,26],
"classIogn_1_1ExodusMesh.html#aff18e3ce3a36ce1b2c4a7c48f34bb380":[4,0,64,4,3],
"classIogn_1_1GeneratedMesh.html":[4,0,64,5],
"classIogn_1_1GeneratedMesh.html#a019976d10c69f756873e0a6fadf2e8cc":[4,0,64,5,65],
"classIogn_1_1GeneratedMesh.html#a02e95e6ec75bd7ba0f06e6fcdd1e7227":[4,0,64,5,19],
"classIogn_1_1GeneratedMesh.html#a039f2e588989fd56835209f3ca9dd42c":[4,0,64,5,49],
"classIogn_1_1GeneratedMesh.html#a044bd2933b7defc613d2575e0784ead9":[4,0,64,5,78],
"classIogn_1_1GeneratedMesh.html#a0da6689bb52014b75b29bc2422f93931":[4,0,64,5,66],
"classIogn_1_1GeneratedMesh.html#a0e2fdc0a35b15deb16e77d4abb6bbb5e":[4,0,64,5,52],
"classIogn_1_1GeneratedMesh.html#a173f48504d945ec4ad891c29c0e6ac34":[4,0,64,5,8],
"classIogn_1_1GeneratedMesh.html#a1948a31534d670445b661d9a6156045c":[4,0,64,5,67],
"classIogn_1_1GeneratedMesh.html#a198fba99565d1b8a4943fee4cb340dc6":[4,0,64,5,55],
"classIogn_1_1GeneratedMesh.html#a1af1eec83012f46c09bb83ac483fdee5":[4,0,64,5,61],
"classIogn_1_1GeneratedMesh.html#a1c0ed57406d0a57db7138ae065eda366":[4,0,64,5,40],
"classIogn_1_1GeneratedMesh.html#a1d7592877dd54663fd5a863737afca2a":[4,0,64,5,18],
"classIogn_1_1GeneratedMesh.html#a1ef6c65fbbec4f57ed6afde59a0d8bd9":[4,0,64,5,11],
"classIogn_1_1GeneratedMesh.html#a1f3b8743a90effc7e9709249c98abdf0":[4,0,64,5,70],
"classIogn_1_1GeneratedMesh.html#a2542d5e3dfad60f2f721dae5219ca3db":[4,0,64,5,68],
"classIogn_1_1GeneratedMesh.html#a281ca8a27246c8106d3694b4ddf3bb24":[4,0,64,5,75],
"classIogn_1_1GeneratedMesh.html#a29edc9ec94c7f7d496c1d897adbbf4de":[4,0,64,5,47],
"classIogn_1_1GeneratedMesh.html#a2d860ec5ea8c4f52103e699f59bf3802":[4,0,64,5,85],
"classIogn_1_1GeneratedMesh.html#a2e22640f336e797a31806e5850e696d3":[4,0,64,5,4],
"classIogn_1_1GeneratedMesh.html#a3ee445d2299a764c57a2c385f2ce79fd":[4,0,64,5,34],
"classIogn_1_1GeneratedMesh.html#a4356d7dc3c22525095b7154db26af75d":[4,0,64,5,82],
"classIogn_1_1GeneratedMesh.html#a45b80209c9e51e84ed41483101b8e845":[4,0,64,5,16],
"classIogn_1_1GeneratedMesh.html#a48aeb94ab3d67cb0b9d9ab050fdbe1f9":[4,0,64,5,24],
"classIogn_1_1GeneratedMesh.html#a50bf123b6548caa25e31d466eba62daa":[4,0,64,5,73],
"classIogn_1_1GeneratedMesh.html#a596a8ae9cebb5d8ac0855e53d36d6e95":[4,0,64,5,44],
"classIogn_1_1GeneratedMesh.html#a5cb67eb372c3d129a31ae3f9c40c8958":[4,0,64,5,25],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00e":[4,0,64,5,0],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00ea0e3164560744889a3e70ace0217b71dd":[4,0,64,5,0,0],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00ea52d815fa530a74910df05c2a63e82056":[4,0,64,5,0,5],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00eaab7de06ad1d68fb486d80310fd6bd609":[4,0,64,5,0,4],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00ead39695af2d82deb9bb63b6bb0ff15594":[4,0,64,5,0,3],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00eae7b785c0c345a7a665299b2ff2d20953":[4,0,64,5,0,1],
"classIogn_1_1GeneratedMesh.html#a5fda1c9c6eedb56f21a80c70649cf00eaf2f79c3f8f193864691d4ae78b3ddfbf":[4,0,64,5,0,2],
"classIogn_1_1GeneratedMesh.html#a607a532e4e897982015d5e15108bb5b6":[4,0,64,5,56],
"classIogn_1_1GeneratedMesh.html#a60cc36536d183fc61255883887679477":[4,0,64,5,23],
"classIogn_1_1GeneratedMesh.html#a6153d3269cadb8a0074083ae42ec732d":[4,0,64,5,35],
"classIogn_1_1GeneratedMesh.html#a646768c0285a3a86fc8d66416907018a":[4,0,64,5,77],
"classIogn_1_1GeneratedMesh.html#a67a83ff6386e9f39aee35e1604c50cc4":[4,0,64,5,32],
"classIogn_1_1GeneratedMesh.html#a67f12e833eb2ae2a8b0a8ad53b8f2de8":[4,0,64,5,13],
"classIogn_1_1GeneratedMesh.html#a6abf96a7155db5b41acb68c5e5e5bfdf":[4,0,64,5,15],
"classIogn_1_1GeneratedMesh.html#a73d09586d07597a0f6c9afa7536e7254":[4,0,64,5,29],
"classIogn_1_1GeneratedMesh.html#a774f5d2cc3c9d735959121260a05f571":[4,0,64,5,84],
"classIogn_1_1GeneratedMesh.html#a78b029bb950f8c58a6576ead6b4f00f1":[4,0,64,5,81],<|fim▁hole|>"classIogn_1_1GeneratedMesh.html#a7d57a6c1a02f37b387a393f6895ebafd":[4,0,64,5,20],
"classIogn_1_1GeneratedMesh.html#a80b39683b24235fcc06f1b8e1df04e4a":[4,0,64,5,9],
"classIogn_1_1GeneratedMesh.html#a81d1adac34424c042de064e78377e1b4":[4,0,64,5,76],
"classIogn_1_1GeneratedMesh.html#a83a6078a3e0fd8f119a864b694c275b7":[4,0,64,5,36],
"classIogn_1_1GeneratedMesh.html#a854d1a47c083dda94d1d062715abbd99":[4,0,64,5,45],
"classIogn_1_1GeneratedMesh.html#a87d33fb8709f4af5f9cdc68253e52907":[4,0,64,5,39],
"classIogn_1_1GeneratedMesh.html#a88c210ac9a3d7218b2d5b61a320e40dc":[4,0,64,5,74],
"classIogn_1_1GeneratedMesh.html#a8ca47dae9ba3512c1c8f97c553c646dc":[4,0,64,5,38],
"classIogn_1_1GeneratedMesh.html#a8fc90fe583579d2e517886d61921f783":[4,0,64,5,57],
"classIogn_1_1GeneratedMesh.html#a8ff4045f9e46990d7e02444d539b7013":[4,0,64,5,46],
"classIogn_1_1GeneratedMesh.html#a915aaa8cba0e22c372d06453f66821f0":[4,0,64,5,69],
"classIogn_1_1GeneratedMesh.html#a9917d28a15baa87231b8cc22929601f3":[4,0,64,5,83],
"classIogn_1_1GeneratedMesh.html#a99c789006b20196fb83b6e9a801740e0":[4,0,64,5,59],
"classIogn_1_1GeneratedMesh.html#a9e825e7d61b0484d2ad461f3fa24bf09":[4,0,64,5,42],
"classIogn_1_1GeneratedMesh.html#a9fc4cb3c4fde1b63680838e7dda5a019":[4,0,64,5,27],
"classIogn_1_1GeneratedMesh.html#aa79480f29286ba894936d3953f117356":[4,0,64,5,10],
"classIogn_1_1GeneratedMesh.html#aa83160a16f7dde96086c096f9811a5ea":[4,0,64,5,3],
"classIogn_1_1GeneratedMesh.html#aa8fecab82c56a3b0580163418d7c69e8":[4,0,64,5,80],
"classIogn_1_1GeneratedMesh.html#aaaed84638dcfcacc18922d35cc8348ec":[4,0,64,5,30],
"classIogn_1_1GeneratedMesh.html#aaf0d8cfcac9e12393b81d3f2e73f4c43":[4,0,64,5,48],
"classIogn_1_1GeneratedMesh.html#ab14c39196bcfd234b8d737dde5d835b7":[4,0,64,5,31],
"classIogn_1_1GeneratedMesh.html#ab1c9654c9c11407764fe61d6fdf2960f":[4,0,64,5,51],
"classIogn_1_1GeneratedMesh.html#ab2e482c703e0ee0ec2087ea749c64cf3":[4,0,64,5,79],
"classIogn_1_1GeneratedMesh.html#ab509db4f12cf2bcaee8763c56ac6d187":[4,0,64,5,7],
"classIogn_1_1GeneratedMesh.html#aba34619c42e1af678511fd5718781932":[4,0,64,5,43],
"classIogn_1_1GeneratedMesh.html#ac787e9970f88a517c64b943a842910c9":[4,0,64,5,33],
"classIogn_1_1GeneratedMesh.html#ac932df42f2cf2a379e5359be1d9e8d94":[4,0,64,5,5],
"classIogn_1_1GeneratedMesh.html#acab032df8c98616a0553930b344dbf6c":[4,0,64,5,12],
"classIogn_1_1GeneratedMesh.html#acb6607d069e9c847c6ea27929758e706":[4,0,64,5,58],
"classIogn_1_1GeneratedMesh.html#acd84560dcba7bffdbcdbe4c12318450f":[4,0,64,5,50],
"classIogn_1_1GeneratedMesh.html#acf0b8edd6835b477db3fb891ec873cc0":[4,0,64,5,63],
"classIogn_1_1GeneratedMesh.html#ad7301d73848b20e2590b56763de521b8":[4,0,64,5,2],
"classIogn_1_1GeneratedMesh.html#ada5d7475f9b4cc33312181db5802038b":[4,0,64,5,22],
"classIogn_1_1GeneratedMesh.html#ada65bf0562967a55ab5015cabfcd31bc":[4,0,64,5,72],
"classIogn_1_1GeneratedMesh.html#adc70aa944b8e55eb598e1ada8fb003ec":[4,0,64,5,26],
"classIogn_1_1GeneratedMesh.html#add032602121f08555548d8c8a58d0ba7":[4,0,64,5,6],
"classIogn_1_1GeneratedMesh.html#ae63bcfa4dec36e883b5c1333154f389b":[4,0,64,5,37],
"classIogn_1_1GeneratedMesh.html#ae7415cdfce392fc2c993f20fc258eb9e":[4,0,64,5,17],
"classIogn_1_1GeneratedMesh.html#ae872b2bc88870537de38c044e3effcdc":[4,0,64,5,21],
"classIogn_1_1GeneratedMesh.html#ae8c57a72f98f9a3c31f33a79836cdf51":[4,0,64,5,64],
"classIogn_1_1GeneratedMesh.html#ae95a0d0eed44d809e04e839abfd9995f":[4,0,64,5,54],
"classIogn_1_1GeneratedMesh.html#af0a5cb6153f5389b4d733cd1b2eea1ea":[4,0,64,5,60],
"classIogn_1_1GeneratedMesh.html#af128caee4ea3af583b8e585540559945":[4,0,64,5,28],
"classIogn_1_1GeneratedMesh.html#af8f492bc76477998966f104debc98d0b":[4,0,64,5,53],
"classIogn_1_1GeneratedMesh.html#af9dde723f32dbd382071ca48cd3603d8":[4,0,64,5,41],
"classIogn_1_1GeneratedMesh.html#afa54f23882f6ce165c0faa35ca90e0ea":[4,0,64,5,14],
"classIogn_1_1GeneratedMesh.html#afb85c3f28b89a17725a0fae633f1e5a3":[4,0,64,5,1],
"classIogn_1_1GeneratedMesh.html#aff06ce6326497f17d293173f117fd6eb":[4,0,64,5,62],
"classIogn_1_1IOFactory.html":[4,0,64,6],
"classIogn_1_1IOFactory.html#a40e5178c58ee47278e5834961cd2445e":[4,0,64,6,2],
"classIogn_1_1IOFactory.html#a5c5ea69ac3f7d0697e533b1e2272a964":[4,0,64,6,0],
"classIogn_1_1IOFactory.html#aa48939456bf85eb76980a38a7b2acdff":[4,0,64,6,1],
"classIohb_1_1DatabaseIO.html":[4,0,65,0],
"classIohb_1_1DatabaseIO.html#a0425afc6a1c3836a00bcd524385f3b5a":[4,0,65,0,48],
"classIohb_1_1DatabaseIO.html#a06498f2ca86695f8834d36f027bcee45":[4,0,65,0,16],
"classIohb_1_1DatabaseIO.html#a11a475218eb43a9515f1aa02920e4e8e":[4,0,65,0,33],
"classIohb_1_1DatabaseIO.html#a149339d6f87ed1b831a78a531288b589":[4,0,65,0,23],
"classIohb_1_1DatabaseIO.html#a14979888aa691450b6cdc5ba6ce06f4d":[4,0,65,0,45],
"classIohb_1_1DatabaseIO.html#a1540090f1e82377b6fded5e37258ba69":[4,0,65,0,35],
"classIohb_1_1DatabaseIO.html#a171d4690a7721dae6fd5381666f57a04":[4,0,65,0,53],
"classIohb_1_1DatabaseIO.html#a1ed98178daf9881a9fd4905253b4a34a":[4,0,65,0,9],
"classIohb_1_1DatabaseIO.html#a222031fa340da9493affabb6e0fb8850":[4,0,65,0,36],
"classIohb_1_1DatabaseIO.html#a2e6e23b7b5f46d63ae168a40b5e85512":[4,0,65,0,26],
"classIohb_1_1DatabaseIO.html#a2f85e187e5aa6685d08614585b32eb58":[4,0,65,0,28],
"classIohb_1_1DatabaseIO.html#a334bc9a842b1c16e877f49ecefda6439":[4,0,65,0,37],
"classIohb_1_1DatabaseIO.html#a372df18e1843d7f40a246cc891d6063f":[4,0,65,0,14],
"classIohb_1_1DatabaseIO.html#a3beb94420150ba18cbe524224e09c171":[4,0,65,0,43],
"classIohb_1_1DatabaseIO.html#a3d2c9c0bc0e681c6e2f2e7a4d85ac5f5":[4,0,65,0,52],
"classIohb_1_1DatabaseIO.html#a3f4e7d713e2ca1262bf917e8d62f0397":[4,0,65,0,20],
"classIohb_1_1DatabaseIO.html#a409f72d2949f834ff1ee9a3544c019bc":[4,0,65,0,1],
"classIohb_1_1DatabaseIO.html#a4555c992e67be7ab9bc2e3435c4872c0":[4,0,65,0,8],
"classIohb_1_1DatabaseIO.html#a4a5e3ca261a6c679496f40b780945872":[4,0,65,0,34],
"classIohb_1_1DatabaseIO.html#a55f12c7379af5e04a6be07ec75ff5252":[4,0,65,0,40],
"classIohb_1_1DatabaseIO.html#a5b7db18a777b445caa1880871f86581b":[4,0,65,0,11],
"classIohb_1_1DatabaseIO.html#a60b6274217cb4675a11fad99797efec5":[4,0,65,0,41],
"classIohb_1_1DatabaseIO.html#a61deb9361614d19a550b21ff5e8b8d51":[4,0,65,0,32],
"classIohb_1_1DatabaseIO.html#a62086b17aff53d4bc28db295f535b026":[4,0,65,0,3],
"classIohb_1_1DatabaseIO.html#a6295069bf27f1fba8787abe863aebe32":[4,0,65,0,51],
"classIohb_1_1DatabaseIO.html#a67430c1bb08dffa5c41115cc1c08f759":[4,0,65,0,38],
"classIohb_1_1DatabaseIO.html#a6f51330130211da88fed3f6c7109dfe2":[4,0,65,0,0],
"classIohb_1_1DatabaseIO.html#a74f8f1c5f18f07c06f725511b55bc2a3":[4,0,65,0,7],
"classIohb_1_1DatabaseIO.html#a75ab126ae48a989bc5c4b918f72c1a07":[4,0,65,0,17],
"classIohb_1_1DatabaseIO.html#a75cc65cc986e2eb24cc915ab33183d80":[4,0,65,0,29],
"classIohb_1_1DatabaseIO.html#a75df47c7782cbe86e68696a77dee0f1e":[4,0,65,0,50],
"classIohb_1_1DatabaseIO.html#a7840bb08b4fd252a1824bd34c3fdd143":[4,0,65,0,13],
"classIohb_1_1DatabaseIO.html#a80eb0c0374fda330f9f16965bcfec5a0":[4,0,65,0,30],
"classIohb_1_1DatabaseIO.html#a85c7d1a98d7adc8205a95ade7bbc3f8b":[4,0,65,0,21],
"classIohb_1_1DatabaseIO.html#a8a7e9ea6c9ee4886245cf9854b30ca92":[4,0,65,0,12],
"classIohb_1_1DatabaseIO.html#a940b15793476c848161853b0c399f8af":[4,0,65,0,44],
"classIohb_1_1DatabaseIO.html#a9821d1a682d3e3a0292e4a5b0ce0c374":[4,0,65,0,31],
"classIohb_1_1DatabaseIO.html#a9d78c07810fbaa22e9729eb191d45752":[4,0,65,0,5],
"classIohb_1_1DatabaseIO.html#aa4259c0205b7338e36e8ebf5ed142262":[4,0,65,0,4],
"classIohb_1_1DatabaseIO.html#aa7d1cab20bcb347e2a10e4a699b4bb01":[4,0,65,0,19],
"classIohb_1_1DatabaseIO.html#aacbf1478e80114cf69770b2c4d868f70":[4,0,65,0,55],
"classIohb_1_1DatabaseIO.html#ab890a36edcf5f9cd61f255cbe14058b2":[4,0,65,0,54],
"classIohb_1_1DatabaseIO.html#ab96a03cea2a276884b7e5a60d5730c48":[4,0,65,0,56],
"classIohb_1_1DatabaseIO.html#abefb27b6cb9ab7fbfae439b9023dbeb6":[4,0,65,0,6],
"classIohb_1_1DatabaseIO.html#ac0f3172b2fc161f2208945788ea85d65":[4,0,65,0,49],
"classIohb_1_1DatabaseIO.html#ac2ce5a3711300a43f9b9381e472bd625":[4,0,65,0,15],
"classIohb_1_1DatabaseIO.html#ac6d14b0261a07dd05402a149194a435b":[4,0,65,0,18],
"classIohb_1_1DatabaseIO.html#ad1fd4fa960591553086edff8a6ee20e4":[4,0,65,0,10],
"classIohb_1_1DatabaseIO.html#add79581e85907669ee33c797e633b9c8":[4,0,65,0,25],
"classIohb_1_1DatabaseIO.html#adf759eddbe96e0b09e6807ea2680a2f2":[4,0,65,0,2],
"classIohb_1_1DatabaseIO.html#ae4cbd1877902f335396d1aa5b1e4218d":[4,0,65,0,47],
"classIohb_1_1DatabaseIO.html#ae8ffa290f634ae6ac7a016ee03abd1d3":[4,0,65,0,22],
"classIohb_1_1DatabaseIO.html#ae9a0e90a733b8b1e75810f5c63e39edd":[4,0,65,0,24],
"classIohb_1_1DatabaseIO.html#aec1bc1b47445d567a2ec33881a493984":[4,0,65,0,27],
"classIohb_1_1DatabaseIO.html#af2679b5df217d772129357bc2a648914":[4,0,65,0,39],
"classIohb_1_1DatabaseIO.html#afa89fbf0d222beee5ea4f0dc64790f2e":[4,0,65,0,46],
"classIohb_1_1DatabaseIO.html#afc13975ead75040d3f6e4fe1ce8dc778":[4,0,65,0,42],
"classIohb_1_1IOFactory.html":[4,0,65,1],
"classIohb_1_1IOFactory.html#a2433d4de9ffa2d11dab63d4a48ea5c4c":[4,0,65,1,1],
"classIohb_1_1IOFactory.html#ab892fb1738380590739d96fe1263e2c1":[4,0,65,1,0],
"classIohb_1_1IOFactory.html#ad56f59e03b21fbeabe99904ac286c436":[4,0,65,1,2],
"classIohb_1_1Layout.html":[4,0,65,2],
"classIohb_1_1Layout.html#a008a95e0563088aa81320b5bcd1c8222":[4,0,65,2,4],
"classIohb_1_1Layout.html#a112bbc36bd1e272ba407647d1d9174f6":[4,0,65,2,21],
"classIohb_1_1Layout.html#a193e36c297502e28fe14d1d827dfc078":[4,0,65,2,20],
"classIohb_1_1Layout.html#a1ce5bece951fce1536aed3f2dfc28a29":[4,0,65,2,14],
"classIohb_1_1Layout.html#a1d169dbf45b68b1a722dcb787cf04eea":[4,0,65,2,8],
"classIohb_1_1Layout.html#a1e708b18a1911d8e1267ccd753bc3594":[4,0,65,2,15],
"classIohb_1_1Layout.html#a239fc494da656cd053af23b3d783db75":[4,0,65,2,0],
"classIohb_1_1Layout.html#a5db9e63309152d635f747ec568858ab4":[4,0,65,2,18],
"classIohb_1_1Layout.html#a65cd690cb4527012323ddf8cf781959a":[4,0,65,2,12],
"classIohb_1_1Layout.html#a68f78edcffaa448b8d785723840af94a":[4,0,65,2,11],
"classIohb_1_1Layout.html#a729244eba239301a4d18f70775bc8a7e":[4,0,65,2,3],
"classIohb_1_1Layout.html#a735fee98692c08c54500e414a0f411e3":[4,0,65,2,2]
};<|fim▁end|> | "classIogn_1_1GeneratedMesh.html#a78ef32eaeec128564a364248a33f3dbf":[4,0,64,5,71], |
<|file_name|>_mod1_0_1_0_0_4.py<|end_file_name|><|fim▁begin|>name1_0_1_0_0_4_0 = None
name1_0_1_0_0_4_1 = None
name1_0_1_0_0_4_2 = None
name1_0_1_0_0_4_3 = None
<|fim▁hole|><|fim▁end|> | name1_0_1_0_0_4_4 = None |
<|file_name|>AutoComplete.py<|end_file_name|><|fim▁begin|>"""AutoComplete.py - An IDLE extension for automatically completing names.
This extension can complete either attribute names of file names. It can pop
a window with all available names, for the user to select from.
"""
import os
import sys
import string
from configHandler import idleConf
import AutoCompleteWindow
from HyperParser import HyperParser
import __main__
# This string includes all chars that may be in a file name (without a path
# separator)
FILENAME_CHARS = string.ascii_letters + string.digits + os.curdir + "._~#$:-"
# This string includes all chars that may be in an identifier
ID_CHARS = string.ascii_letters + string.digits + "_"
# These constants represent the two different types of completions
COMPLETE_ATTRIBUTES, COMPLETE_FILES = range(1, 2+1)
SEPS = os.sep
if os.altsep: # e.g. '/' on Windows...
SEPS += os.altsep
class AutoComplete:
menudefs = [
('edit', [
("Show Completions", "<<force-open-completions>>"),
])
]
popupwait = idleConf.GetOption("extensions", "AutoComplete",
"popupwait", type="int", default=0)
def __init__(self, editwin=None):
self.editwin = editwin
if editwin is None: # subprocess and test
return
self.text = editwin.text
self.autocompletewindow = None
# id of delayed call, and the index of the text insert when the delayed
# call was issued. If _delayed_completion_id is None, there is no
# delayed call.
self._delayed_completion_id = None
self._delayed_completion_index = None
def _make_autocomplete_window(self):
return AutoCompleteWindow.AutoCompleteWindow(self.text)
def _remove_autocomplete_window(self, event=None):
if self.autocompletewindow:
self.autocompletewindow.hide_window()
self.autocompletewindow = None
def force_open_completions_event(self, event):
"""Happens when the user really wants to open a completion list, even
if a function call is needed.
"""
self.open_completions(True, False, True)
def try_open_completions_event(self, event):
"""Happens when it would be nice to open a completion list, but not
really neccesary, for example after an dot, so function
calls won't be made.
"""
lastchar = self.text.get("insert-1c")
if lastchar == ".":
self._open_completions_later(False, False, False,
COMPLETE_ATTRIBUTES)
elif lastchar in SEPS:
self._open_completions_later(False, False, False,
COMPLETE_FILES)
def autocomplete_event(self, event):
"""Happens when the user wants to complete his word, and if neccesary,
open a completion list after that (if there is more than one
completion)
"""
if hasattr(event, "mc_state") and event.mc_state:
# A modifier was pressed along with the tab, continue as usual.
return
if self.autocompletewindow and self.autocompletewindow.is_active():
self.autocompletewindow.complete()
return "break"
else:
opened = self.open_completions(False, True, True)
if opened:
return "break"
def _open_completions_later(self, *args):
self._delayed_completion_index = self.text.index("insert")
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = \
self.text.after(self.popupwait, self._delayed_open_completions,
*args)
def _delayed_open_completions(self, *args):
self._delayed_completion_id = None
if self.text.index("insert") != self._delayed_completion_index:
return
self.open_completions(*args)
def open_completions(self, evalfuncs, complete, userWantsWin, mode=None):<|fim▁hole|> if complete is True, then if there's nothing to complete and no
start of completion, won't open completions and return False.
If mode is given, will open a completion list only in this mode.
"""
# Cancel another delayed call, if it exists.
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = None
hp = HyperParser(self.editwin, "insert")
curline = self.text.get("insert linestart", "insert")
i = j = len(curline)
if hp.is_in_string() and (not mode or mode==COMPLETE_FILES):
self._remove_autocomplete_window()
mode = COMPLETE_FILES
while i and curline[i-1] in FILENAME_CHARS:
i -= 1
comp_start = curline[i:j]
j = i
while i and curline[i-1] in FILENAME_CHARS + SEPS:
i -= 1
comp_what = curline[i:j]
elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES):
self._remove_autocomplete_window()
mode = COMPLETE_ATTRIBUTES
while i and curline[i-1] in ID_CHARS:
i -= 1
comp_start = curline[i:j]
if i and curline[i-1] == '.':
hp.set_index("insert-%dc" % (len(curline)-(i-1)))
comp_what = hp.get_expression()
if not comp_what or \
(not evalfuncs and comp_what.find('(') != -1):
return
else:
comp_what = ""
else:
return
if complete and not comp_what and not comp_start:
return
comp_lists = self.fetch_completions(comp_what, mode)
if not comp_lists[0]:
return
self.autocompletewindow = self._make_autocomplete_window()
self.autocompletewindow.show_window(comp_lists,
"insert-%dc" % len(comp_start),
complete,
mode,
userWantsWin)
return True
def fetch_completions(self, what, mode):
"""Return a pair of lists of completions for something. The first list
is a sublist of the second. Both are sorted.
If there is a Python subprocess, get the comp. list there. Otherwise,
either fetch_completions() is running in the subprocess itself or it
was called in an IDLE EditorWindow before any script had been run.
The subprocess environment is that of the most recently run script. If
two unrelated modules are being edited some calltips in the current
module may be inoperative if the module was not the last to run.
"""
try:
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
except:
rpcclt = None
if rpcclt:
return rpcclt.remotecall("exec", "get_the_completion_list",
(what, mode), {})
else:
if mode == COMPLETE_ATTRIBUTES:
if what == "":
namespace = __main__.__dict__.copy()
namespace.update(__main__.__builtins__.__dict__)
bigl = eval("dir()", namespace)
bigl.sort()
if "__all__" in bigl:
smalll = eval("__all__", namespace)
smalll.sort()
else:
smalll = filter(lambda s: s[:1] != '_', bigl)
else:
try:
entity = self.get_entity(what)
bigl = dir(entity)
bigl.sort()
if "__all__" in bigl:
smalll = entity.__all__
smalll.sort()
else:
smalll = filter(lambda s: s[:1] != '_', bigl)
except:
return [], []
elif mode == COMPLETE_FILES:
if what == "":
what = "."
try:
expandedpath = os.path.expanduser(what)
bigl = os.listdir(expandedpath)
bigl.sort()
smalll = filter(lambda s: s[:1] != '.', bigl)
except OSError:
return [], []
if not smalll:
smalll = bigl
return smalll, bigl
def get_entity(self, name):
"""Lookup name in a namespace spanning sys.modules and __main.dict__"""
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
return eval(name, namespace)<|fim▁end|> | """Find the completions and create the AutoCompleteWindow.
Return True if successful (no syntax error or so found). |
<|file_name|>rest-builder.test.js<|end_file_name|><|fim▁begin|>var assert = require('assert');
var RequestBuilder = require('../lib/rest-builder');
describe('REST Request Builder', function () {
describe('Request templating', function () {
var server = null;
before(function (done) {
var express = require('express');
var app = express();
app.configure(function () {
app.set('port', process.env.PORT || 3000);
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.use(express.favicon());
// app.use(express.logger('dev'));
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(app.router);
});
app.all('*', function (req, res, next) {
res.setHeader('Content-Type', 'application/json');
var payload = {
method: req.method,
url: req.url,
headers: req.headers,
query: req.query,
body: req.body
};
res.json(200, payload);
});
server = app.listen(app.get('port'), function (err, data) {
// console.log('Server listening on ', app.get('port'));
done(err, data);
});
});
after(function(done) {
server && server.close(done);
});
it('should substitute the variables', function (done) {
var builder = new RequestBuilder('GET', 'http://localhost:3000/{p}').query({x: '{x}', y: 2});
builder.invoke({p: 1, x: 'X'},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(body.query.x, 'X');
assert.equal(body.query.y, 2);
done(err, body);
});
});
it('should support default variables', function (done) {
var builder = new RequestBuilder('GET', 'http://localhost:3000/{p=100}').query({x: '{x=ME}', y: 2});
builder.invoke({p: 1},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal('ME', body.query.x);
assert.equal(2, body.query.y);
done(err, body);
});
});
it('should support typed variables', function (done) {
var builder = new RequestBuilder('POST', 'http://localhost:3000/{p=100}').query({x: '{x=100:number}', y: 2})
.body({a: '{a=1:number}', b: '{b=true:boolean}'});
builder.invoke({p: 1, a: 100, b: false},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal(100, body.query.x);
assert.equal(2, body.query.y);
assert.equal(100, body.body.a);
assert.equal(false, body.body.b);
done(err, body);
});
});
it('should report missing required variables', function (done) {
var builder = new RequestBuilder('POST', 'http://localhost:3000/{!p}').query({x: '{x=100:number}', y: 2})
.body({a: '{^a:number}', b: '{!b=true:boolean}'});
try {
builder.invoke({a: 100, b: false},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
done(err, body);
});
assert.fail();
} catch(err) {
// This is expected
done(null, null);
}
});
it('should support required variables', function (done) {
var builder = new RequestBuilder('POST', 'http://localhost:3000/{!p}').query({x: '{x=100:number}', y: 2})
.body({a: '{^a:number}', b: '{!b=true:boolean}'});
builder.invoke({p: 1, a: 100, b: false},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal(100, body.query.x);
assert.equal(2, body.query.y);
assert.equal(100, body.body.a);
assert.equal(false, body.body.b);
done(err, body);
});
});
<|fim▁hole|>
fn(1, 'X',
function (err, body, response) {
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal('X', body.query.x);
assert.equal(2, body.query.y);
// console.log(body);
done(err, body);
});
});
it('should build an operation with the parameter names as args', function (done) {
var builder = new RequestBuilder('POST', 'http://localhost:3000/{p}').query({x: '{x}', y: 2});
var fn = builder.operation('p', 'x');
fn(1, 'X',
function (err, body, response) {
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal('X', body.query.x);
assert.equal(2, body.query.y);
// console.log(body);
done(err, body);
});
});
it('should build from a json doc', function (done) {
var builder = new RequestBuilder(require('./request-template.json'));
// console.log(builder.parse());
builder.invoke({p: 1, a: 100, b: false},
function (err, body, response) {
// console.log(response.headers);
assert.equal(200, response.statusCode);
if (typeof body === 'string') {
body = JSON.parse(body);
}
// console.log(body);
assert.equal(0, body.url.indexOf('/1'));
assert.equal(100, body.query.x);
assert.equal(2, body.query.y);
assert.equal(100, body.body.a);
assert.equal(false, body.body.b);
done(err, body);
});
});
});
});<|fim▁end|> | it('should build an operation with the parameter names', function (done) {
var builder = new RequestBuilder('POST', 'http://localhost:3000/{p}').query({x: '{x}', y: 2});
var fn = builder.operation(['p', 'x']); |
<|file_name|>batch_tests.py<|end_file_name|><|fim▁begin|>import itertools
from batchy.runloop import coro_return, runloop_coroutine
from batchy.batch_coroutine import batch_coroutine, class_batch_coroutine
from . import BaseTestCase
CALL_COUNT = 0
@batch_coroutine()
def increment(arg_lists):
def increment_single(n):
return n + 1<|fim▁hole|> yield
@batch_coroutine(accepts_kwargs=False)
def increment_nokwargs(arg_lists):
global CALL_COUNT
CALL_COUNT += 1
coro_return(list(itertools.starmap(lambda _n: _n + 1, arg_lists)))
yield
class BatchClient(object):
def __init__(self):
self.get_call_count = 0
self.set_call_count = 0
self.run_call_count = 0
self.throw_count = 0
@class_batch_coroutine(1)
def get(self, arg_lists):
self.get_call_count += 1
yield self.run()
coro_return([0] * len(arg_lists))
@class_batch_coroutine(1)
def set(self, _):
self.set_call_count += 1
yield self.run()
@class_batch_coroutine(0)
def run(self, _):
self.run_call_count += 1
yield
@class_batch_coroutine(0)
def throw(self, _):
self.throw_count += 1
raise ValueError()
yield # pylint: disable-msg=W0101
@class_batch_coroutine(2)
def throw_sooner(self, _):
self.throw_count += 1
raise ValueError()
yield # pylint: disable-msg=W0101
def reset(self):
self.get_call_count = self.set_call_count = self.run_call_count = self.throw_count = 0
class BatchTests(BaseTestCase):
def setup(self):
global CALL_COUNT
CALL_COUNT = 0
def test_simple_batch(self):
@runloop_coroutine()
def test():
a, b, c = yield increment(1), increment(2), increment(3)
coro_return((a, b, c))
self.assert_equals((2,3,4), test())
self.assert_equals(1, CALL_COUNT)
def test_batch_no_kwargs(self):
@runloop_coroutine()
def test():
a, b, c = yield increment_nokwargs(1), increment_nokwargs(2), increment_nokwargs(3)
coro_return((a, b, c))
self.assert_equals((2,3,4), test())
self.assert_equals(1, CALL_COUNT)
def test_multi_clients(self):
client1, client2 = BatchClient(), BatchClient()
@runloop_coroutine()
def sub_1(client):
rv = yield client.get()
yield client.set()
coro_return(rv)
@runloop_coroutine()
def sub_2(client):
rv = yield client.get()
yield client.set()
coro_return(rv)
@runloop_coroutine()
def test1():
rv = yield sub_1(client1), sub_2(client2)
coro_return(rv)
test1()
self.assert_equal(1, client1.get_call_count)
self.assert_equal(1, client1.set_call_count)
self.assert_equal(2, client1.run_call_count)
self.assert_equal(1, client2.get_call_count)
self.assert_equal(1, client2.set_call_count)
self.assert_equal(2, client2.run_call_count)
client1.reset()
client2.reset()
@runloop_coroutine()
def test2():
rv = yield sub_1(client1), sub_2(client1)
coro_return(rv)
test2()
self.assert_equal(1, client1.get_call_count)
self.assert_equal(1, client1.set_call_count)
self.assert_equal(2, client1.run_call_count)
self.assert_equal(0, client2.get_call_count)
self.assert_equal(0, client2.set_call_count)
self.assert_equal(0, client2.run_call_count)
def test_exception(self):
client = BatchClient()
@runloop_coroutine()
def action_1():
yield client.throw()
@runloop_coroutine()
def action_2():
yield client.get('a')
yield client.throw()
@runloop_coroutine()
def test():
yield action_1(), action_1(), action_2()
self.assert_raises(ValueError, test)
def test_exception_sooner(self):
client = BatchClient()
@runloop_coroutine()
def action_1():
yield client.throw_sooner()
@runloop_coroutine()
def action_2():
yield client.get('a')
yield client.throw_sooner()
@runloop_coroutine()
def test():
yield action_1(), action_1(), action_2()
self.assert_raises(ValueError, test)<|fim▁end|> |
global CALL_COUNT
CALL_COUNT += 1
coro_return([increment_single(*ar, **kw) for ar, kw in arg_lists]) |
<|file_name|>TextIO.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | TextIO |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import os
import amo.search
from .models import Reindexing
from django.core.management.base import CommandError
# shortcut functions<|fim▁hole|>is_reindexing_amo = Reindexing.objects.is_reindexing_amo
flag_reindexing_amo = Reindexing.objects.flag_reindexing_amo
unflag_reindexing_amo = Reindexing.objects.unflag_reindexing_amo
get_indices = Reindexing.objects.get_indices
def index_objects(ids, model, search, index=None, transforms=None):
if index is None:
index = model._get_index()
indices = Reindexing.objects.get_indices(index)
if transforms is None:
transforms = []
qs = model.objects.no_cache().filter(id__in=ids)
for t in transforms:
qs = qs.transform(t)
for ob in qs:
data = search.extract(ob)
for index in indices:
model.index(data, bulk=True, id=ob.id, index=index)
amo.search.get_es().flush_bulk(forced=True)
def raise_if_reindex_in_progress(site):
"""Checks if the database indexation flag is on for the given site.
If it's on, and if no "FORCE_INDEXING" variable is present in the env,
raises a CommandError.
"""
already_reindexing = Reindexing.objects._is_reindexing(site)
if already_reindexing and 'FORCE_INDEXING' not in os.environ:
raise CommandError("Indexation already occuring. Add a FORCE_INDEXING "
"variable in the environ to force it")<|fim▁end|> | |
<|file_name|>cellNavigationService.js<|end_file_name|><|fim▁begin|>/**
* ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v14.0.0
* @link http://www.ag-grid.com/
* @license MIT
*/
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
var context_1 = require("./context/context");
var constants_1 = require("./constants");
var columnController_1 = require("./columnController/columnController");
var utils_1 = require("./utils");
var gridRow_1 = require("./entities/gridRow");
var gridCell_1 = require("./entities/gridCell");
var gridOptionsWrapper_1 = require("./gridOptionsWrapper");
var pinnedRowModel_1 = require("./rowModels/pinnedRowModel");
var CellNavigationService = (function () {
function CellNavigationService() {
}
// returns null if no cell to focus on, ie at the end of the grid
CellNavigationService.prototype.getNextCellToFocus = function (key, lastCellToFocus) {
// starting with the provided cell, we keep moving until we find a cell we can
// focus on.
var pointer = lastCellToFocus;
var finished = false;
// finished will be true when either:
// a) cell found that we can focus on
// b) run out of cells (ie the method returns null)
while (!finished) {
switch (key) {
case constants_1.Constants.KEY_UP:
pointer = this.getCellAbove(pointer);
break;
case constants_1.Constants.KEY_DOWN:
pointer = this.getCellBelow(pointer);
break;
case constants_1.Constants.KEY_RIGHT:
if (this.gridOptionsWrapper.isEnableRtl()) {
pointer = this.getCellToLeft(pointer);
}
else {
pointer = this.getCellToRight(pointer);
}
break;
case constants_1.Constants.KEY_LEFT:
if (this.gridOptionsWrapper.isEnableRtl()) {
pointer = this.getCellToRight(pointer);
}
else {
pointer = this.getCellToLeft(pointer);
}
break;
default:
console.log('ag-Grid: unknown key for navigation ' + key);
pointer = null;
break;
}
if (pointer) {
finished = this.isCellGoodToFocusOn(pointer);
}
else {
finished = true;
}
}
return pointer;
};
CellNavigationService.prototype.isCellGoodToFocusOn = function (gridCell) {
var column = gridCell.column;
var rowNode;
switch (gridCell.floating) {
case constants_1.Constants.PINNED_TOP:
rowNode = this.pinnedRowModel.getPinnedTopRow(gridCell.rowIndex);
break;
case constants_1.Constants.PINNED_BOTTOM:
rowNode = this.pinnedRowModel.getPinnedBottomRow(gridCell.rowIndex);
break;
default:
rowNode = this.rowModel.getRow(gridCell.rowIndex);
break;
}
var suppressNavigable = column.isSuppressNavigable(rowNode);
return !suppressNavigable;
};
CellNavigationService.prototype.getCellToLeft = function (lastCell) {
var colToLeft = this.columnController.getDisplayedColBefore(lastCell.column);
if (!colToLeft) {
return null;
}
else {
var gridCellDef = { rowIndex: lastCell.rowIndex, column: colToLeft, floating: lastCell.floating };
return new gridCell_1.GridCell(gridCellDef);
}
};
CellNavigationService.prototype.getCellToRight = function (lastCell) {
var colToRight = this.columnController.getDisplayedColAfter(lastCell.column);
// if already on right, do nothing
if (!colToRight) {
return null;
}
else {
var gridCellDef = { rowIndex: lastCell.rowIndex, column: colToRight, floating: lastCell.floating };
return new gridCell_1.GridCell(gridCellDef);
}<|fim▁hole|> if (lastRow.isFloatingBottom()) {
return null;
}
else if (lastRow.isNotFloating()) {
if (this.pinnedRowModel.isRowsToRender(constants_1.Constants.PINNED_BOTTOM)) {
return new gridRow_1.GridRow(0, constants_1.Constants.PINNED_BOTTOM);
}
else {
return null;
}
}
else {
if (this.rowModel.isRowsToRender()) {
return new gridRow_1.GridRow(0, null);
}
else if (this.pinnedRowModel.isRowsToRender(constants_1.Constants.PINNED_BOTTOM)) {
return new gridRow_1.GridRow(0, constants_1.Constants.PINNED_BOTTOM);
}
else {
return null;
}
}
}
else {
return new gridRow_1.GridRow(lastRow.rowIndex + 1, lastRow.floating);
}
};
CellNavigationService.prototype.getCellBelow = function (lastCell) {
var rowBelow = this.getRowBelow(lastCell.getGridRow());
if (rowBelow) {
var gridCellDef = { rowIndex: rowBelow.rowIndex, column: lastCell.column, floating: rowBelow.floating };
return new gridCell_1.GridCell(gridCellDef);
}
else {
return null;
}
};
CellNavigationService.prototype.isLastRowInContainer = function (gridRow) {
if (gridRow.isFloatingTop()) {
var lastTopIndex = this.pinnedRowModel.getPinnedTopRowData().length - 1;
return lastTopIndex <= gridRow.rowIndex;
}
else if (gridRow.isFloatingBottom()) {
var lastBottomIndex = this.pinnedRowModel.getPinnedBottomRowData().length - 1;
return lastBottomIndex <= gridRow.rowIndex;
}
else {
var lastBodyIndex = this.rowModel.getPageLastRow();
return lastBodyIndex <= gridRow.rowIndex;
}
};
CellNavigationService.prototype.getRowAbove = function (lastRow) {
// if already on top row, do nothing
if (lastRow.rowIndex === 0) {
if (lastRow.isFloatingTop()) {
return null;
}
else if (lastRow.isNotFloating()) {
if (this.pinnedRowModel.isRowsToRender(constants_1.Constants.PINNED_TOP)) {
return this.getLastFloatingTopRow();
}
else {
return null;
}
}
else {
// last floating bottom
if (this.rowModel.isRowsToRender()) {
return this.getLastBodyCell();
}
else if (this.pinnedRowModel.isRowsToRender(constants_1.Constants.PINNED_TOP)) {
return this.getLastFloatingTopRow();
}
else {
return null;
}
}
}
else {
return new gridRow_1.GridRow(lastRow.rowIndex - 1, lastRow.floating);
}
};
CellNavigationService.prototype.getCellAbove = function (lastCell) {
var rowAbove = this.getRowAbove(lastCell.getGridRow());
if (rowAbove) {
var gridCellDef = { rowIndex: rowAbove.rowIndex, column: lastCell.column, floating: rowAbove.floating };
return new gridCell_1.GridCell(gridCellDef);
}
else {
return null;
}
};
CellNavigationService.prototype.getLastBodyCell = function () {
var lastBodyRow = this.rowModel.getPageLastRow();
return new gridRow_1.GridRow(lastBodyRow, null);
};
CellNavigationService.prototype.getLastFloatingTopRow = function () {
var lastFloatingRow = this.pinnedRowModel.getPinnedTopRowData().length - 1;
return new gridRow_1.GridRow(lastFloatingRow, constants_1.Constants.PINNED_TOP);
};
CellNavigationService.prototype.getNextTabbedCell = function (gridCell, backwards) {
if (backwards) {
return this.getNextTabbedCellBackwards(gridCell);
}
else {
return this.getNextTabbedCellForwards(gridCell);
}
};
CellNavigationService.prototype.getNextTabbedCellForwards = function (gridCell) {
var displayedColumns = this.columnController.getAllDisplayedColumns();
var newRowIndex = gridCell.rowIndex;
var newFloating = gridCell.floating;
// move along to the next cell
var newColumn = this.columnController.getDisplayedColAfter(gridCell.column);
// check if end of the row, and if so, go forward a row
if (!newColumn) {
newColumn = displayedColumns[0];
var rowBelow = this.getRowBelow(gridCell.getGridRow());
if (utils_1.Utils.missing(rowBelow)) {
return;
}
newRowIndex = rowBelow.rowIndex;
newFloating = rowBelow.floating;
}
var gridCellDef = { rowIndex: newRowIndex, column: newColumn, floating: newFloating };
return new gridCell_1.GridCell(gridCellDef);
};
CellNavigationService.prototype.getNextTabbedCellBackwards = function (gridCell) {
var displayedColumns = this.columnController.getAllDisplayedColumns();
var newRowIndex = gridCell.rowIndex;
var newFloating = gridCell.floating;
// move along to the next cell
var newColumn = this.columnController.getDisplayedColBefore(gridCell.column);
// check if end of the row, and if so, go forward a row
if (!newColumn) {
newColumn = displayedColumns[displayedColumns.length - 1];
var rowAbove = this.getRowAbove(gridCell.getGridRow());
if (utils_1.Utils.missing(rowAbove)) {
return;
}
newRowIndex = rowAbove.rowIndex;
newFloating = rowAbove.floating;
}
var gridCellDef = { rowIndex: newRowIndex, column: newColumn, floating: newFloating };
return new gridCell_1.GridCell(gridCellDef);
};
__decorate([
context_1.Autowired('columnController'),
__metadata("design:type", columnController_1.ColumnController)
], CellNavigationService.prototype, "columnController", void 0);
__decorate([
context_1.Autowired('rowModel'),
__metadata("design:type", Object)
], CellNavigationService.prototype, "rowModel", void 0);
__decorate([
context_1.Autowired('pinnedRowModel'),
__metadata("design:type", pinnedRowModel_1.PinnedRowModel)
], CellNavigationService.prototype, "pinnedRowModel", void 0);
__decorate([
context_1.Autowired('gridOptionsWrapper'),
__metadata("design:type", gridOptionsWrapper_1.GridOptionsWrapper)
], CellNavigationService.prototype, "gridOptionsWrapper", void 0);
CellNavigationService = __decorate([
context_1.Bean('cellNavigationService')
], CellNavigationService);
return CellNavigationService;
}());
exports.CellNavigationService = CellNavigationService;<|fim▁end|> | };
CellNavigationService.prototype.getRowBelow = function (lastRow) {
// if already on top row, do nothing
if (this.isLastRowInContainer(lastRow)) { |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import base64
import cPickle as pickle
import datetime
from email import message_from_string
from email.utils import getaddresses
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import models
from django.db.models import Q, F
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from kiki.message import KikiMessage
from kiki.validators import validate_local_part, validate_not_command
class ListUserMetadata(models.Model):
UNCONFIRMED = 0
SUBSCRIBER = 1
MODERATOR = 2
BLACKLISTED = 3
STATUS_CHOICES = (
(UNCONFIRMED, u'Unconfirmed'),
(SUBSCRIBER, u'Subscriber'),
(MODERATOR, u'Moderator'),
(BLACKLISTED, u'Blacklisted'),
)
user = models.ForeignKey(User)
mailing_list = models.ForeignKey('MailingList')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, default=UNCONFIRMED, db_index=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.user, self.mailing_list, self.get_status_display())
class Meta:
unique_together = ('user', 'mailing_list')
class MailingListManager(models.Manager):
def for_site(self, site):
return self.filter(site=site)
def for_addresses(self, addresses):
"""
Takes a an iterable of email addresses and returns a queryset of mailinglists attached to the current site with matching local parts.
"""
site = Site.objects.get_current()
local_parts = []
for addr in addresses:
addr = addr.rsplit('@', 1)
if addr[1] == site.domain:
local_parts.append(addr[0])
if not local_parts:
return self.none()
return self.filter(domain=site, local_part__in=local_parts)
class MailingList(models.Model):
"""
This model contains all options for a mailing list, as well as some helpful
methods for accessing subscribers, moderators, etc.
"""
objects = MailingListManager()
MODERATORS = "mod"
SUBSCRIBERS = "sub"<|fim▁hole|> (SUBSCRIBERS, 'Subscribers',),
(ANYONE, 'Anyone',),
)
name = models.CharField(max_length=50)
subject_prefix = models.CharField(max_length=10, blank=True)
local_part = models.CharField(max_length=64, validators=[validate_local_part, validate_not_command])
domain = models.ForeignKey(Site)
description = models.TextField(blank=True)
who_can_post = models.CharField(max_length=3, choices=PERMISSION_CHOICES, default=SUBSCRIBERS)
self_subscribe_enabled = models.BooleanField(verbose_name='self-subscribe enabled', default=True)
moderation_enabled = models.BooleanField(help_text="If enabled, messages that would be rejected will be marked ``Requires Moderation`` and an email will be sent to the list's moderators.", default=False)
# If is_anonymous becomes an option, the precooker will need to handle some anonymizing.
#is_anonymous = models.BooleanField()
users = models.ManyToManyField(
User,
related_name = 'mailinglists',
blank = True,
null = True,
through = ListUserMetadata
)
messages = models.ManyToManyField(
'Message',
related_name = 'mailinglists',
blank = True,
null = True,
through = 'ListMessage'
)
@property
def address(self):
return "%s@%s" % (self.local_part, self.domain.domain)
def _list_id_header(self):
# Does this need to be a byte string?
return smart_str(u"%s <%s.%s>" % (self.name, self.local_part, self.domain.domain))
def __unicode__(self):
return self.name
def clean(self):
validate_email(self.address)
# As per RFC 2919, the list_id_header has a max length of 255 octets.
if len(self._list_id_header()) > 254:
# Allow 4 extra spaces: the delimiters, the space, and the period.
raise ValidationError("The list name, local part, and site domain name can be at most 250 characters long together.")
def get_recipients(self):
"""Returns a queryset of :class:`User`\ s that should receive this message."""
qs = User.objects.filter(is_active=True)
qs = qs.filter(listusermetadata__mailing_list=self, listusermetadata__status__in=[ListUserMetadata.SUBSCRIBER, ListUserMetadata.MODERATOR])
return qs.distinct()
def _is_email_with_status(self, email, status):
if isinstance(email, basestring):
kwargs = {'user__email__iexact': email}
elif isinstance(email, User):
kwargs = {'user': email}
else:
return False
try:
self.listusermetadata_set.get(status=status, **kwargs)
except ListUserMetadata.DoesNotExist:
return False
return True
def is_subscriber(self, email):
return self._is_email_with_status(email, ListUserMetadata.SUBCRIBER)
def is_moderator(self, email):
return self._is_email_with_status(email, ListUserMetadata.MODERATOR)
def can_post(self, email):
if self.who_can_post == MailingList.ANYONE:
return True
if self.who_can_post == MailingList.SUBSCRIBERS and self.is_subscriber(email):
return True
if self.is_moderator(email):
return True
return False
class ProcessedMessageModel(models.Model):
"""
Encapsulates the logic required for storing and fetching pickled EmailMessage objects. This should eventually be replaced with a custom model field.
"""
processed_message = models.TextField(help_text="The processed form of the message at the current stage (pickled).", blank=True)
# Store the message as a base64-encoded pickle dump a la django-mailer.
def set_processed(self, msg):
self.processed_message = base64.encodestring(pickle.dumps(msg, pickle.HIGHEST_PROTOCOL))
self._processed = msg
def get_processed(self):
if not hasattr(self, '_processed'):
self._processed = pickle.loads(base64.decodestring(self.processed_message))
return self._processed
class Meta:
abstract = True
class Message(ProcessedMessageModel):
"""
Represents an email received by Kiki. Stores the original received message as well as a pickled version of the processed message.
"""
UNPROCESSED = 'u'
PROCESSED = 'p'
FAILED = 'f'
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message_id = models.CharField(max_length=255, unique=True)
#: The message_id of the email this is in reply to.
# in_reply_to = models.CharField(max_length=255, db_index=True, blank=True)
from_email = models.EmailField()
received = models.DateTimeField()
status = models.CharField(max_length=1, choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
original_message = models.TextField(help_text="The original raw text of the message.")
class ListMessage(ProcessedMessageModel):
"""
Represents the relationship between a :class:`Message` and a :class:`MailingList`. This is what is processed to handle the sending of a message to a list rather than the original message.
"""
ACCEPTED = 1
REQUIRES_MODERATION = 2
PREPPED = 3
SENT = 4
FAILED = 5
STATUS_CHOICES = (
(ACCEPTED, 'Accepted'),
(REQUIRES_MODERATION, 'Requires Moderation'),
(PREPPED, 'Prepped'),
(SENT, 'Sent'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True)
class Meta:
unique_together = ('message', 'mailing_list',)
class ListCommand(models.Model):
#: The ListCommand has not been processed.
UNPROCESSED = 1
#: The ListCommand has been rejected (e.g. for permissioning reasons.)
REJECTED = 2
#: Ths ListCommand has been processed completely.
PROCESSED = 3
#: An error occurred while processing the ListCommand.
FAILED = 4
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(REJECTED, 'Rejected'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
command = models.CharField(max_length=20)<|fim▁end|> | ANYONE = "all"
PERMISSION_CHOICES = (
(MODERATORS, 'Moderators',), |
<|file_name|>account.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, cint
from frappe import throw, _
from frappe.model.document import Document
class RootNotEditable(frappe.ValidationError): pass
class Account(Document):
nsm_parent_field = 'parent_account'
def onload(self):
frozen_accounts_modifier = frappe.db.get_value("Accounts Settings", "Accounts Settings",
"frozen_accounts_modifier")
if not frozen_accounts_modifier or frozen_accounts_modifier in frappe.get_roles():
self.get("__onload").can_freeze_account = True
def autoname(self):
self.name = self.account_name.strip() + ' - ' + \
frappe.db.get_value("Company", self.company, "abbr")
def validate(self):
self.validate_parent()
self.validate_root_details()
self.set_root_and_report_type()
self.validate_mandatory()
self.validate_warehouse_account()
self.validate_frozen_accounts_modifier()
self.validate_balance_must_be_debit_or_credit()
self.validate_account_currency()
def validate_parent(self):
"""Fetch Parent Details and validate parent account"""
if self.parent_account:
par = frappe.db.get_value("Account", self.parent_account,
["name", "is_group", "company"], as_dict=1)
if not par:
throw(_("Account {0}: Parent account {1} does not exist").format(self.name, self.parent_account))
elif par.name == self.name:
throw(_("Account {0}: You can not assign itself as parent account").format(self.name))
elif not par.is_group:
throw(_("Account {0}: Parent account {1} can not be a ledger").format(self.name, self.parent_account))
elif par.company != self.company:
throw(_("Account {0}: Parent account {1} does not belong to company: {2}")
.format(self.name, self.parent_account, self.company))
def set_root_and_report_type(self):
if self.parent_account:
par = frappe.db.get_value("Account", self.parent_account, ["report_type", "root_type"], as_dict=1)
if par.report_type:
self.report_type = par.report_type
if par.root_type:
self.root_type = par.root_type
if self.is_group:
db_value = frappe.db.get_value("Account", self.name, ["report_type", "root_type"], as_dict=1)
if db_value:
if self.report_type != db_value.report_type:
frappe.db.sql("update `tabAccount` set report_type=%s where lft > %s and rgt < %s",<|fim▁hole|> if self.root_type != db_value.root_type:
frappe.db.sql("update `tabAccount` set root_type=%s where lft > %s and rgt < %s",
(self.root_type, self.lft, self.rgt))
def validate_root_details(self):
# does not exists parent
if frappe.db.exists("Account", self.name):
if not frappe.db.get_value("Account", self.name, "parent_account"):
throw(_("Root cannot be edited."), RootNotEditable)
def validate_frozen_accounts_modifier(self):
old_value = frappe.db.get_value("Account", self.name, "freeze_account")
if old_value and old_value != self.freeze_account:
frozen_accounts_modifier = frappe.db.get_value('Accounts Settings', None, 'frozen_accounts_modifier')
if not frozen_accounts_modifier or \
frozen_accounts_modifier not in frappe.get_roles():
throw(_("You are not authorized to set Frozen value"))
def validate_balance_must_be_debit_or_credit(self):
from erpnext.accounts.utils import get_balance_on
if not self.get("__islocal") and self.balance_must_be:
account_balance = get_balance_on(self.name)
if account_balance > 0 and self.balance_must_be == "Credit":
frappe.throw(_("Account balance already in Debit, you are not allowed to set 'Balance Must Be' as 'Credit'"))
elif account_balance < 0 and self.balance_must_be == "Debit":
frappe.throw(_("Account balance already in Credit, you are not allowed to set 'Balance Must Be' as 'Debit'"))
def validate_account_currency(self):
if not self.account_currency:
self.account_currency = frappe.db.get_value("Company", self.company, "default_currency")
elif self.account_currency != frappe.db.get_value("Account", self.name, "account_currency"):
if frappe.db.get_value("GL Entry", {"account": self.name}):
frappe.throw(_("Currency can not be changed after making entries using some other currency"))
def convert_group_to_ledger(self):
if self.check_if_child_exists():
throw(_("Account with child nodes cannot be converted to ledger"))
elif self.check_gle_exists():
throw(_("Account with existing transaction cannot be converted to ledger"))
else:
self.is_group = 0
self.save()
return 1
def convert_ledger_to_group(self):
if self.check_gle_exists():
throw(_("Account with existing transaction can not be converted to group."))
elif self.account_type:
throw(_("Cannot covert to Group because Account Type is selected."))
else:
self.is_group = 1
self.save()
return 1
# Check if any previous balance exists
def check_gle_exists(self):
return frappe.db.get_value("GL Entry", {"account": self.name})
def check_if_child_exists(self):
return frappe.db.sql("""select name from `tabAccount` where parent_account = %s
and docstatus != 2""", self.name)
def validate_mandatory(self):
if not self.report_type:
throw(_("Report Type is mandatory"))
if not self.root_type:
throw(_("Root Type is mandatory"))
def validate_warehouse_account(self):
if not cint(frappe.defaults.get_global_default("auto_accounting_for_stock")):
return
if self.account_type == "Warehouse":
if not self.warehouse:
throw(_("Warehouse is mandatory if account type is Warehouse"))
old_warehouse = cstr(frappe.db.get_value("Account", self.name, "warehouse"))
if old_warehouse != cstr(self.warehouse):
if old_warehouse:
self.validate_warehouse(old_warehouse)
if self.warehouse:
self.validate_warehouse(self.warehouse)
elif self.warehouse:
self.warehouse = None
def validate_warehouse(self, warehouse):
if frappe.db.get_value("Stock Ledger Entry", {"warehouse": warehouse}):
throw(_("Stock entries exist against warehouse {0}, hence you cannot re-assign or modify Warehouse").format(warehouse))
def update_nsm_model(self):
"""update lft, rgt indices for nested set model"""
import frappe
import frappe.utils.nestedset
frappe.utils.nestedset.update_nsm(self)
def on_update(self):
self.update_nsm_model()
def validate_trash(self):
"""checks gl entries and if child exists"""
if not self.parent_account:
throw(_("Root account can not be deleted"))
if self.check_gle_exists():
throw(_("Account with existing transaction can not be deleted"))
if self.check_if_child_exists():
throw(_("Child account exists for this account. You can not delete this account."))
def on_trash(self):
self.validate_trash()
self.update_nsm_model()
def before_rename(self, old, new, merge=False):
# Add company abbr if not provided
from erpnext.setup.doctype.company.company import get_name_with_abbr
new_account = get_name_with_abbr(new, self.company)
# Validate properties before merging
if merge:
if not frappe.db.exists("Account", new):
throw(_("Account {0} does not exist").format(new))
val = list(frappe.db.get_value("Account", new_account,
["is_group", "root_type", "company"]))
if val != [self.is_group, self.root_type, self.company]:
throw(_("""Merging is only possible if following properties are same in both records. Is Group, Root Type, Company"""))
return new_account
def after_rename(self, old, new, merge=False):
if not merge:
frappe.db.set_value("Account", new, "account_name",
" - ".join(new.split(" - ")[:-1]))
else:
from frappe.utils.nestedset import rebuild_tree
rebuild_tree("Account", "parent_account")
def get_parent_account(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select name from tabAccount
where is_group = 1 and docstatus != 2 and company = %s
and %s like %s order by name limit %s, %s""" %
("%s", searchfield, "%s", "%s", "%s"),
(filters["company"], "%%%s%%" % txt, start, page_len), as_list=1)
def get_account_currency(account):
"""Helper function to get account currency"""
if not account:
return
def generator():
account_currency, company = frappe.db.get_value("Account", account, ["account_currency", "company"])
if not account_currency:
account_currency = frappe.db.get_value("Company", company, "default_currency")
return account_currency
return frappe.local_cache("account_currency", account, generator)<|fim▁end|> | (self.report_type, self.lft, self.rgt)) |
<|file_name|>associative-recall-task.py<|end_file_name|><|fim▁begin|>import theano
import theano.tensor as T
import numpy as np
import matplotlib.pyplot as plt
from lasagne.layers import InputLayer, DenseLayer, ReshapeLayer
import lasagne.layers
import lasagne.nonlinearities
import lasagne.updates
import lasagne.objectives
import lasagne.init
from ntm.layers import NTMLayer
from ntm.memory import Memory
from ntm.controllers import DenseController
from ntm.heads import WriteHead, ReadHead
from ntm.updates import graves_rmsprop
from utils.generators import AssociativeRecallTask
from utils.visualization import Dashboard
def model(input_var, batch_size=1, size=8, num_units=100, memory_shape=(128, 20)):
# Input Layer
l_input = InputLayer((batch_size, None, size + 2), input_var=input_var)<|fim▁hole|> controller = DenseController(l_input, memory_shape=memory_shape,
num_units=num_units, num_reads=1,
nonlinearity=lasagne.nonlinearities.rectify,
name='controller')
heads = [
WriteHead(controller, num_shifts=3, memory_shape=memory_shape, name='write', learn_init=False,
nonlinearity_key=lasagne.nonlinearities.rectify,
nonlinearity_add=lasagne.nonlinearities.rectify),
ReadHead(controller, num_shifts=3, memory_shape=memory_shape, name='read', learn_init=False,
nonlinearity_key=lasagne.nonlinearities.rectify)
]
l_ntm = NTMLayer(l_input, memory=memory, controller=controller, heads=heads)
# Output Layer
l_output_reshape = ReshapeLayer(l_ntm, (-1, num_units))
l_output_dense = DenseLayer(l_output_reshape, num_units=size + 2, nonlinearity=lasagne.nonlinearities.sigmoid, \
name='dense')
l_output = ReshapeLayer(l_output_dense, (batch_size, seqlen, size + 2))
return l_output, l_ntm
if __name__ == '__main__':
# Define the input and expected output variable
input_var, target_var = T.tensor3s('input', 'target')
# The generator to sample examples from
generator = AssociativeRecallTask(batch_size=1, max_iter=1000000, size=8, max_num_items=6, \
min_item_length=1, max_item_length=3)
# The model (1-layer Neural Turing Machine)
l_output, l_ntm = model(input_var, batch_size=generator.batch_size,
size=generator.size, num_units=100, memory_shape=(128, 20))
# The generated output variable and the loss function
pred_var = T.clip(lasagne.layers.get_output(l_output), 1e-6, 1. - 1e-6)
loss = T.mean(lasagne.objectives.binary_crossentropy(pred_var, target_var))
# Create the update expressions
params = lasagne.layers.get_all_params(l_output, trainable=True)
learning_rate = theano.shared(1e-4)
updates = lasagne.updates.adam(loss, params, learning_rate=learning_rate)
# Compile the function for a training step, as well as the prediction function and
# a utility function to get the inner details of the NTM
train_fn = theano.function([input_var, target_var], loss, updates=updates)
ntm_fn = theano.function([input_var], pred_var)
ntm_layer_fn = theano.function([input_var], lasagne.layers.get_output(l_ntm, get_details=True))
# Training
try:
scores, all_scores = [], []
for i, (example_input, example_output) in generator:
score = train_fn(example_input, example_output)
scores.append(score)
all_scores.append(score)
if i % 500 == 0:
mean_scores = np.mean(scores)
if mean_scores < 0.01:
learning_rate.set_value(1e-5)
print 'Batch #%d: %.6f' % (i, mean_scores)
scores = []
except KeyboardInterrupt:
pass
# Visualization
def marker1(params):
return params['num_items'] * (params['item_length'] + 1)
def marker2(params):
return (params['num_items'] + 1) * (params['item_length'] + 1)
markers = [
{
'location': marker1,
'style': {'color': 'red', 'ls': '-'}
},
{
'location': marker2,
'style': {'color': 'green', 'ls': '-'}
}
]
dashboard = Dashboard(generator=generator, ntm_fn=ntm_fn, ntm_layer_fn=ntm_layer_fn, \
memory_shape=(128, 20), markers=markers, cmap='bone')
# Example
params = generator.sample_params()
dashboard.sample(**params)<|fim▁end|> | _, seqlen, _ = l_input.input_var.shape
# Neural Turing Machine Layer
memory = Memory(memory_shape, name='memory', memory_init=lasagne.init.Constant(1e-6), learn_init=False) |
<|file_name|>process_site.py<|end_file_name|><|fim▁begin|>"""Process `site.json` and bower package tools."""
import os
import json
import subprocess
from functools import partial
import importlib
import sys
from flask import Flask, render_template, g, redirect, current_app
from gitloader import git_show
from import_code import import_code
try:
from app import app
except ImportError:
from deckmaster.app import app
sys.path.append('.')
component_dir = 'static/components'
bower_str = 'bower install --config.directory="%s" %s > /dev/null'
def get_pkg_dir(package):
"""Join the component and package directory."""
return os.path.join(component_dir, package)
def get_pkg_main(package):
"""Check `package.json` then `bower.json` for the main included file."""
pkg = json.load(
open(os.path.join(get_pkg_dir(package), 'bower.json'))
)
if isinstance(pkg['main'],list):
return [os.path.join(get_pkg_dir(package), p) for p in pkg['main']]
else:<|fim▁hole|> if not os.path.exists(os.path.join(component_dir, package)):
subprocess.call(
bower_str % (component_dir, package),
shell = True
)
return True
def script_or_style(path):
if path.endswith('js'):
return 'script'
elif path.endswith('css'):
return 'style'
else:
print "Script or style? " + path
def process_bower(deps):
retval = {'styles':[], 'scripts':[]}
try:
for pkg in deps['bower']:
check_pkg(pkg)
main = get_pkg_main(pkg)
if isinstance(main,list):
pkgassets = {}
for path in reversed(main):
try:
pkgassets[script_or_style(path)+'s'] = [path]
except TypeError:
pass
retval['scripts'] += pkgassets['scripts']
retval['styles'] += pkgassets['styles']
else:
retval[script_or_style(main)+'s'].append(main)
except KeyError:
pass
return retval
def process_local(deps):
retval = {'styles':[], 'scripts':[]}
try:
for path in deps['local']:
retval[script_or_style(path)+'s'].append(path)
except KeyError:
pass
return retval
def process_deps(deps):
"""Process script element in the config for local vs bower components."""
local, bower = process_local(deps), process_bower(deps)
retval = {}
for tag in local:
retval[tag] = local[tag] + bower[tag]
return retval
def process_route(route):
if not route.get('view'):
def route_handler(revid = None, path = None):
g.revid = revid
try:
return render_template(
'html/base.html', **process_deps(route['deps'])
)
except AttributeError:
return 'Not Found', 404
return route_handler
mname, fname = route['view'].rsplit('.', 1)
module = importlib.import_module(mname)
viewfunc = getattr(module, fname)
def route_handler(revid = None, path = None):
if revid is not None:
codestr = git_show('./views.py', revid)
mod = import_code(codestr, mname)
return getattr(mod,fname)()
return viewfunc()
return route_handler
def lazy_router(revid, path = None):
g.revid = revid
if path is None:
path = ''
if not path.startswith('/'):
path = '/' + path
cfgstr = git_show('./site.json', revid)
try:
return process_route(json.loads(cfgstr)[path])(revid, path)
except KeyError:
print cfgstr
def process_site(site = None, revid = None):
"""Process `site.json` based on the config and CLI options."""
if site is None:
try:
site = json.load(open('site.json'))
except IOError:
return []
if 'deps' in site:
return [
('/', 'index', process_route(site)),
('/<revid>/', 'index_revid', process_route(site)),
]
retval = [
('/favicon.ico', 'favicon', lambda: ''),
('/<revid>/', 'revid_lazy_index', lazy_router),
('/<revid>/<path:path>', 'revid_lazy', lazy_router),
]
for rt in site:
retval.append((rt, 'index' if rt=='/' else rt, process_route(site[rt])))
return retval<|fim▁end|> | return os.path.join(get_pkg_dir(package), pkg['main'])
def check_pkg(package):
"""CHeck if the package exists, if not use bower to install.""" |
<|file_name|>boot.js<|end_file_name|><|fim▁begin|>'use strict';
function Boot() {
}<|fim▁hole|> preload: function () {
this.load.image('preloader', 'assets/preloader.gif');
},
create: function () {
this.game.input.maxPointers = 1;
this.game.state.start('preload');
}
};
module.exports = Boot;<|fim▁end|> |
Boot.prototype = { |
<|file_name|>test_plugins.py<|end_file_name|><|fim▁begin|>import os
from twisted.trial import unittest
from lisa.server.plugins.PluginManager import PluginManagerSingleton
class LisaPluginTestCase(unittest.TestCase):
def setUp(self):
self.pluginManager = PluginManagerSingleton.get()
def test_a_install_plugin_ok(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True, version='0.1.6')
self.assertEqual(answer['status'], "success")
def test_aa_install_plugin_fail(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_b_disable_plugin_ok(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_bb_disable_plugin_fail(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_c_enable_plugin_ok(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_cc_enable_plugin_fail(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_d_upgrade_plugin_ok(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "success")
def test_dd_upgrade_plugin_fail(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_e_load_plugin(self):
answer = self.pluginManager.loadPlugins()
test_list = ['UnitTest']
self.assertListEqual(answer, test_list)
def test_f_methodList_plugin(self):
answer = self.pluginManager.methodListPlugin()
methodlist = [{'methods': ['test'], 'plugin': u'UnitTest'}, {'core': 'intents', 'methods': ['list']}]
self.assertListEqual(answer, methodlist)
def test_g_create_plugin(self):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lisa.server.web.weblisa.settings")
answer = self.pluginManager.createPlugin(plugin_name="TestPlugin", author_name="TestAuthor",
author_email="[email protected]")<|fim▁hole|> answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_hh_uninstall_plugin(self):
answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")<|fim▁end|> | self.assertEqual(answer['status'], "success")
def test_h_uninstall_plugin(self): |
<|file_name|>test_igmp.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import unittest
import inspect
import logging
from struct import pack, unpack_from, pack_into
from nose.tools import ok_, eq_, raises
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.lib.packet.ethernet import ethernet
from ryu.lib.packet.ipv4 import ipv4
from ryu.lib.packet.packet import Packet
from ryu.lib.packet.packet_utils import checksum
from ryu.lib import addrconv
from ryu.lib.packet.igmp import igmp
from ryu.lib.packet.igmp import igmpv3_query
from ryu.lib.packet.igmp import igmpv3_report
from ryu.lib.packet.igmp import igmpv3_report_group
from ryu.lib.packet.igmp import IGMP_TYPE_QUERY
from ryu.lib.packet.igmp import IGMP_TYPE_REPORT_V3
from ryu.lib.packet.igmp import MODE_IS_INCLUDE
LOG = logging.getLogger(__name__)
class Test_igmp(unittest.TestCase):
""" Test case for Internet Group Management Protocol
"""
def setUp(self):
self.msgtype = IGMP_TYPE_QUERY
self.maxresp = 100
self.csum = 0
self.address = '225.0.0.1'
self.buf = pack(igmp._PACK_STR, self.msgtype, self.maxresp,
self.csum,
addrconv.ipv4.text_to_bin(self.address))
self.g = igmp(self.msgtype, self.maxresp, self.csum,
self.address)
def tearDown(self):
pass
def find_protocol(self, pkt, name):
for p in pkt.protocols:
if p.protocol_name == name:
return p
def test_init(self):
eq_(self.msgtype, self.g.msgtype)
eq_(self.maxresp, self.g.maxresp)
eq_(self.csum, self.g.csum)
eq_(self.address, self.g.address)
def test_parser(self):
_res = self.g.parser(self.buf)
if type(_res) is tuple:
res = _res[0]
else:
res = _res
eq_(res.msgtype, self.msgtype)
eq_(res.maxresp, self.maxresp)
eq_(res.csum, self.csum)
eq_(res.address, self.address)
def test_serialize(self):
data = bytearray()
prev = None
buf = self.g.serialize(data, prev)
res = unpack_from(igmp._PACK_STR, buffer(buf))
eq_(res[0], self.msgtype)
eq_(res[1], self.maxresp)
eq_(res[2], checksum(self.buf))
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
def _build_igmp(self):
dl_dst = '11:22:33:44:55:66'
dl_src = 'aa:bb:cc:dd:ee:ff'
dl_type = ether.ETH_TYPE_IP
e = ethernet(dl_dst, dl_src, dl_type)
total_length = 20 + igmp._MIN_LEN
nw_proto = inet.IPPROTO_IGMP
nw_dst = '11.22.33.44'
nw_src = '55.66.77.88'
i = ipv4(total_length=total_length, src=nw_src, dst=nw_dst,
proto=nw_proto)
p = Packet()
p.add_protocol(e)
p.add_protocol(i)
p.add_protocol(self.g)
p.serialize()
return p
def test_build_igmp(self):
p = self._build_igmp()
e = self.find_protocol(p, "ethernet")
ok_(e)
eq_(e.ethertype, ether.ETH_TYPE_IP)
i = self.find_protocol(p, "ipv4")
ok_(i)
eq_(i.proto, inet.IPPROTO_IGMP)
g = self.find_protocol(p, "igmp")
ok_(g)
eq_(g.msgtype, self.msgtype)
eq_(g.maxresp, self.maxresp)
eq_(g.csum, checksum(self.buf))
eq_(g.address, self.address)
def test_to_string(self):
igmp_values = {'msgtype': repr(self.msgtype),
'maxresp': repr(self.maxresp),
'csum': repr(self.csum),
'address': repr(self.address)}
_g_str = ','.join(['%s=%s' % (k, igmp_values[k])
for k, v in inspect.getmembers(self.g)
if k in igmp_values])
g_str = '%s(%s)' % (igmp.__name__, _g_str)
eq_(str(self.g), g_str)
eq_(repr(self.g), g_str)
@raises(Exception)
def test_malformed_igmp(self):
m_short_buf = self.buf[1:igmp._MIN_LEN]
igmp.parser(m_short_buf)
def test_default_args(self):
ig = igmp()
buf = ig.serialize(bytearray(), None)
res = unpack_from(igmp._PACK_STR, str(buf))
eq_(res[0], 0x11)
eq_(res[1], 0)
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
def test_json(self):
jsondict = self.g.to_jsondict()
g = igmp.from_jsondict(jsondict['igmp'])
eq_(str(self.g), str(g))
class Test_igmpv3_query(unittest.TestCase):
""" Test case for Internet Group Management Protocol v3
Membership Query Message"""
def setUp(self):
self.msgtype = IGMP_TYPE_QUERY
self.maxresp = 100
self.csum = 0
self.address = '225.0.0.1'
self.s_flg = 0
self.qrv = 2
self.qqic = 10
self.num = 0
self.srcs = []
self.s_qrv = self.s_flg << 3 | self.qrv
self.buf = pack(igmpv3_query._PACK_STR, self.msgtype,
self.maxresp, self.csum,
addrconv.ipv4.text_to_bin(self.address),
self.s_qrv, self.qqic, self.num)
self.g = igmpv3_query(
self.msgtype, self.maxresp, self.csum, self.address,
self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
def setUp_with_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs)
self.buf = pack(igmpv3_query._PACK_STR, self.msgtype,
self.maxresp, self.csum,
addrconv.ipv4.text_to_bin(self.address),
self.s_qrv, self.qqic, self.num)
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_query(
self.msgtype, self.maxresp, self.csum, self.address,
self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
def tearDown(self):
pass
def find_protocol(self, pkt, name):
for p in pkt.protocols:
if p.protocol_name == name:
return p
def test_init(self):
eq_(self.msgtype, self.g.msgtype)
eq_(self.maxresp, self.g.maxresp)
eq_(self.csum, self.g.csum)
eq_(self.address, self.g.address)
eq_(self.s_flg, self.g.s_flg)
eq_(self.qrv, self.g.qrv)
eq_(self.qqic, self.g.qqic)
eq_(self.num, self.g.num)
eq_(self.srcs, self.g.srcs)
def test_init_with_srcs(self):
self.setUp_with_srcs()
self.test_init()
def test_parser(self):
_res = self.g.parser(self.buf)
if type(_res) is tuple:
res = _res[0]
else:
res = _res
eq_(res.msgtype, self.msgtype)
eq_(res.maxresp, self.maxresp)
eq_(res.csum, self.csum)
eq_(res.address, self.address)
eq_(res.s_flg, self.s_flg)
eq_(res.qrv, self.qrv)
eq_(res.qqic, self.qqic)
eq_(res.num, self.num)
eq_(res.srcs, self.srcs)
def test_parser_with_srcs(self):
self.setUp_with_srcs()
self.test_parser()
def test_serialize(self):
data = bytearray()
prev = None
buf = self.g.serialize(data, prev)
res = unpack_from(igmpv3_query._PACK_STR, buffer(buf))
eq_(res[0], self.msgtype)
eq_(res[1], self.maxresp)
eq_(res[2], checksum(self.buf))
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(res[4], self.s_qrv)
eq_(res[5], self.qqic)
eq_(res[6], self.num)
def test_serialize_with_srcs(self):
self.setUp_with_srcs()
data = bytearray()
prev = None
buf = self.g.serialize(data, prev)
res = unpack_from(igmpv3_query._PACK_STR, buffer(buf))
(src1, src2, src3) = unpack_from('4s4s4s', buffer(buf),
igmpv3_query._MIN_LEN)
eq_(res[0], self.msgtype)
eq_(res[1], self.maxresp)
eq_(res[2], checksum(self.buf))
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(res[4], self.s_qrv)
eq_(res[5], self.qqic)
eq_(res[6], self.num)
eq_(src1, addrconv.ipv4.text_to_bin(self.srcs[0]))
eq_(src2, addrconv.ipv4.text_to_bin(self.srcs[1]))
eq_(src3, addrconv.ipv4.text_to_bin(self.srcs[2]))
def _build_igmp(self):
dl_dst = '11:22:33:44:55:66'
dl_src = 'aa:bb:cc:dd:ee:ff'
dl_type = ether.ETH_TYPE_IP
e = ethernet(dl_dst, dl_src, dl_type)
total_length = len(ipv4()) + len(self.g)
nw_proto = inet.IPPROTO_IGMP
nw_dst = '11.22.33.44'
nw_src = '55.66.77.88'
i = ipv4(total_length=total_length, src=nw_src, dst=nw_dst,
proto=nw_proto, ttl=1)
p = Packet()
p.add_protocol(e)
p.add_protocol(i)
p.add_protocol(self.g)
p.serialize()
return p
def test_build_igmp(self):
p = self._build_igmp()
e = self.find_protocol(p, "ethernet")
ok_(e)
eq_(e.ethertype, ether.ETH_TYPE_IP)
i = self.find_protocol(p, "ipv4")
ok_(i)
eq_(i.proto, inet.IPPROTO_IGMP)
g = self.find_protocol(p, "igmpv3_query")
ok_(g)
eq_(g.msgtype, self.msgtype)
eq_(g.maxresp, self.maxresp)
eq_(g.csum, checksum(self.buf))
eq_(g.address, self.address)
eq_(g.s_flg, self.s_flg)
eq_(g.qrv, self.qrv)
eq_(g.qqic, self.qqic)
eq_(g.num, self.num)
eq_(g.srcs, self.srcs)
def test_build_igmp_with_srcs(self):
self.setUp_with_srcs()
self.test_build_igmp()
def test_to_string(self):
igmp_values = {'msgtype': repr(self.msgtype),
'maxresp': repr(self.maxresp),
'csum': repr(self.csum),
'address': repr(self.address),
's_flg': repr(self.s_flg),
'qrv': repr(self.qrv),
'qqic': repr(self.qqic),
'num': repr(self.num),
'srcs': repr(self.srcs)}
_g_str = ','.join(['%s=%s' % (k, igmp_values[k])
for k, v in inspect.getmembers(self.g)
if k in igmp_values])
g_str = '%s(%s)' % (igmpv3_query.__name__, _g_str)
eq_(str(self.g), g_str)
eq_(repr(self.g), g_str)
def test_to_string_with_srcs(self):
self.setUp_with_srcs()
self.test_to_string()
@raises(Exception)
def test_num_larger_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs) + 1
self.buf = pack(igmpv3_query._PACK_STR, self.msgtype,
self.maxresp, self.csum,
addrconv.ipv4.text_to_bin(self.address),
self.s_qrv, self.qqic, self.num)
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_query(
self.msgtype, self.maxresp, self.csum, self.address,
self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
self.test_parser()
@raises(Exception)
def test_num_smaller_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs) - 1
self.buf = pack(igmpv3_query._PACK_STR, self.msgtype,
self.maxresp, self.csum,
addrconv.ipv4.text_to_bin(self.address),
self.s_qrv, self.qqic, self.num)
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_query(
self.msgtype, self.maxresp, self.csum, self.address,
self.s_flg, self.qrv, self.qqic, self.num, self.srcs)
self.test_parser()
def test_default_args(self):
prev = ipv4(proto=inet.IPPROTO_IGMP)
g = igmpv3_query()
prev.serialize(g, None)
buf = g.serialize(bytearray(), prev)
res = unpack_from(igmpv3_query._PACK_STR, str(buf))
buf = bytearray(buf)
pack_into('!H', buf, 2, 0)
buf = str(buf)
eq_(res[0], IGMP_TYPE_QUERY)
eq_(res[1], 100)
eq_(res[2], checksum(buf))
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
eq_(res[4], 2)
eq_(res[5], 0)
eq_(res[6], 0)
# srcs without num
prev = ipv4(proto=inet.IPPROTO_IGMP)
srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
g = igmpv3_query(srcs=srcs)
prev.serialize(g, None)
buf = g.serialize(bytearray(), prev)
res = unpack_from(igmpv3_query._PACK_STR, str(buf))
buf = bytearray(buf)
pack_into('!H', buf, 2, 0)
buf = str(buf)
eq_(res[0], IGMP_TYPE_QUERY)
eq_(res[1], 100)
eq_(res[2], checksum(buf))
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
eq_(res[4], 2)
eq_(res[5], 0)
eq_(res[6], len(srcs))
res = unpack_from('4s4s4s', str(buf), igmpv3_query._MIN_LEN)
eq_(res[0], addrconv.ipv4.text_to_bin(srcs[0]))
eq_(res[1], addrconv.ipv4.text_to_bin(srcs[1]))
eq_(res[2], addrconv.ipv4.text_to_bin(srcs[2]))
def test_json(self):
jsondict = self.g.to_jsondict()
g = igmpv3_query.from_jsondict(jsondict['igmpv3_query'])
eq_(str(self.g), str(g))
def test_json_with_srcs(self):
self.setUp_with_srcs()
self.test_json()
class Test_igmpv3_report(unittest.TestCase):
""" Test case for Internet Group Management Protocol v3
Membership Report Message"""
def setUp(self):
self.msgtype = IGMP_TYPE_REPORT_V3
self.csum = 0
self.record_num = 0
self.records = []
self.buf = pack(igmpv3_report._PACK_STR, self.msgtype,
self.csum, self.record_num)
self.g = igmpv3_report(
self.msgtype, self.csum, self.record_num, self.records)
def setUp_with_records(self):
self.record1 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 0, '225.0.0.1')
self.record2 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 2, '225.0.0.2',
['172.16.10.10', '172.16.10.27'])
self.record3 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 0, '225.0.0.3', [], 'abc\x00')
self.record4 = igmpv3_report_group(
MODE_IS_INCLUDE, 2, 2, '225.0.0.4',
['172.16.10.10', '172.16.10.27'], 'abcde\x00\x00\x00')
self.records = [self.record1, self.record2, self.record3,
self.record4]
self.record_num = len(self.records)
self.buf = pack(igmpv3_report._PACK_STR, self.msgtype,
self.csum, self.record_num)
self.buf += self.record1.serialize()
self.buf += self.record2.serialize()
self.buf += self.record3.serialize()
self.buf += self.record4.serialize()
self.g = igmpv3_report(
self.msgtype, self.csum, self.record_num, self.records)
def tearDown(self):
pass
def find_protocol(self, pkt, name):
for p in pkt.protocols:
if p.protocol_name == name:
return p
def test_init(self):
eq_(self.msgtype, self.g.msgtype)
eq_(self.csum, self.g.csum)
eq_(self.record_num, self.g.record_num)
eq_(self.records, self.g.records)
def test_init_with_records(self):
self.setUp_with_records()
self.test_init()
def test_parser(self):
_res = self.g.parser(str(self.buf))
if type(_res) is tuple:
res = _res[0]
else:
res = _res
eq_(res.msgtype, self.msgtype)
eq_(res.csum, self.csum)
eq_(res.record_num, self.record_num)
eq_(repr(res.records), repr(self.records))
def test_parser_with_records(self):
self.setUp_with_records()
self.test_parser()
def test_serialize(self):
data = bytearray()
prev = None
buf = self.g.serialize(data, prev)
res = unpack_from(igmpv3_report._PACK_STR, buffer(buf))
eq_(res[0], self.msgtype)
eq_(res[1], checksum(self.buf))
eq_(res[2], self.record_num)
def test_serialize_with_records(self):
self.setUp_with_records()
data = bytearray()
prev = None
buf = self.g.serialize(data, prev)
res = unpack_from(igmpv3_report._PACK_STR, buffer(buf))
offset = igmpv3_report._MIN_LEN
rec1 = igmpv3_report_group.parser(buffer(buf[offset:]))
offset += len(rec1)
rec2 = igmpv3_report_group.parser(buffer(buf[offset:]))
offset += len(rec2)
rec3 = igmpv3_report_group.parser(buffer(buf[offset:]))
offset += len(rec3)
rec4 = igmpv3_report_group.parser(buffer(buf[offset:]))
eq_(res[0], self.msgtype)
eq_(res[1], checksum(self.buf))
eq_(res[2], self.record_num)
eq_(repr(rec1), repr(self.record1))
eq_(repr(rec2), repr(self.record2))
eq_(repr(rec3), repr(self.record3))
eq_(repr(rec4), repr(self.record4))
def _build_igmp(self):
dl_dst = '11:22:33:44:55:66'
dl_src = 'aa:bb:cc:dd:ee:ff'
dl_type = ether.ETH_TYPE_IP
e = ethernet(dl_dst, dl_src, dl_type)
total_length = len(ipv4()) + len(self.g)
nw_proto = inet.IPPROTO_IGMP
nw_dst = '11.22.33.44'
nw_src = '55.66.77.88'
i = ipv4(total_length=total_length, src=nw_src, dst=nw_dst,
proto=nw_proto, ttl=1)
p = Packet()
p.add_protocol(e)
p.add_protocol(i)
p.add_protocol(self.g)
p.serialize()
return p
def test_build_igmp(self):
p = self._build_igmp()
e = self.find_protocol(p, "ethernet")
ok_(e)
eq_(e.ethertype, ether.ETH_TYPE_IP)
i = self.find_protocol(p, "ipv4")
ok_(i)
eq_(i.proto, inet.IPPROTO_IGMP)
g = self.find_protocol(p, "igmpv3_report")
ok_(g)
eq_(g.msgtype, self.msgtype)
eq_(g.csum, checksum(self.buf))
eq_(g.record_num, self.record_num)
eq_(g.records, self.records)
def test_build_igmp_with_records(self):
self.setUp_with_records()
self.test_build_igmp()
def test_to_string(self):
igmp_values = {'msgtype': repr(self.msgtype),
'csum': repr(self.csum),
'record_num': repr(self.record_num),
'records': repr(self.records)}
_g_str = ','.join(['%s=%s' % (k, igmp_values[k])
for k, v in inspect.getmembers(self.g)
if k in igmp_values])
g_str = '%s(%s)' % (igmpv3_report.__name__, _g_str)
eq_(str(self.g), g_str)
eq_(repr(self.g), g_str)
def test_to_string_with_records(self):
self.setUp_with_records()
self.test_to_string()
@raises(Exception)
def test_record_num_larger_than_records(self):
self.record1 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 0, '225.0.0.1')
self.record2 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 2, '225.0.0.2',
['172.16.10.10', '172.16.10.27'])
self.record3 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 0, '225.0.0.3', [], 'abc\x00')
self.record4 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 2, '225.0.0.4',
['172.16.10.10', '172.16.10.27'], 'abc\x00')
self.records = [self.record1, self.record2, self.record3,
self.record4]
self.record_num = len(self.records) + 1
self.buf = pack(igmpv3_report._PACK_STR, self.msgtype,
self.csum, self.record_num)
self.buf += self.record1.serialize()
self.buf += self.record2.serialize()
self.buf += self.record3.serialize()
self.buf += self.record4.serialize()
self.g = igmpv3_report(
self.msgtype, self.csum, self.record_num, self.records)
self.test_parser()
@raises(Exception)
def test_record_num_smaller_than_records(self):
self.record1 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 0, '225.0.0.1')
self.record2 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 2, '225.0.0.2',
['172.16.10.10', '172.16.10.27'])
self.record3 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 0, '225.0.0.3', [], 'abc\x00')
self.record4 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 2, '225.0.0.4',
['172.16.10.10', '172.16.10.27'], 'abc\x00')
self.records = [self.record1, self.record2, self.record3,
self.record4]
self.record_num = len(self.records) - 1
self.buf = pack(igmpv3_report._PACK_STR, self.msgtype,
self.csum, self.record_num)
self.buf += self.record1.serialize()
self.buf += self.record2.serialize()
self.buf += self.record3.serialize()
self.buf += self.record4.serialize()
self.g = igmpv3_report(
self.msgtype, self.csum, self.record_num, self.records)
self.test_parser()
def test_default_args(self):
prev = ipv4(proto=inet.IPPROTO_IGMP)
g = igmpv3_report()
prev.serialize(g, None)
buf = g.serialize(bytearray(), prev)
res = unpack_from(igmpv3_report._PACK_STR, str(buf))
buf = bytearray(buf)
pack_into('!H', buf, 2, 0)
buf = str(buf)
eq_(res[0], IGMP_TYPE_REPORT_V3)
eq_(res[1], checksum(buf))
eq_(res[2], 0)
# records without record_num
prev = ipv4(proto=inet.IPPROTO_IGMP)
record1 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 0, '225.0.0.1')
record2 = igmpv3_report_group(
MODE_IS_INCLUDE, 0, 2, '225.0.0.2',
['172.16.10.10', '172.16.10.27'])
record3 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 0, '225.0.0.3', [], 'abc\x00')
record4 = igmpv3_report_group(
MODE_IS_INCLUDE, 1, 2, '225.0.0.4',
['172.16.10.10', '172.16.10.27'], 'abc\x00')
records = [record1, record2, record3, record4]
g = igmpv3_report(records=records)
prev.serialize(g, None)
buf = g.serialize(bytearray(), prev)
res = unpack_from(igmpv3_report._PACK_STR, str(buf))
buf = bytearray(buf)
pack_into('!H', buf, 2, 0)
buf = str(buf)
eq_(res[0], IGMP_TYPE_REPORT_V3)
eq_(res[1], checksum(buf))
eq_(res[2], len(records))
def test_json(self):
jsondict = self.g.to_jsondict()
g = igmpv3_report.from_jsondict(jsondict['igmpv3_report'])
eq_(str(self.g), str(g))
def test_json_with_records(self):
self.setUp_with_records()
self.test_json()
class Test_igmpv3_report_group(unittest.TestCase):
"""Test case for Group Records of
Internet Group Management Protocol v3 Membership Report Message"""
def setUp(self):
self.type_ = MODE_IS_INCLUDE
self.aux_len = 0
self.num = 0
self.address = '225.0.0.1'
self.srcs = []
self.aux = None
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
def setUp_with_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs)
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
def setUp_with_aux(self):
self.aux = '\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = len(self.aux) / 4
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
def setUp_with_srcs_and_aux(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs)
self.aux = '\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = len(self.aux) / 4
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.buf += self.aux
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
def tearDown(self):
pass
def test_init(self):
eq_(self.type_, self.g.type_)
eq_(self.aux_len, self.g.aux_len)
eq_(self.num, self.g.num)
eq_(self.address, self.g.address)
eq_(self.srcs, self.g.srcs)
eq_(self.aux, self.g.aux)
def test_init_with_srcs(self):
self.setUp_with_srcs()
self.test_init()
def test_init_with_aux(self):
self.setUp_with_aux()
self.test_init()
def test_init_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_init()
def test_parser(self):
_res = self.g.parser(self.buf)
if type(_res) is tuple:
res = _res[0]
else:
res = _res
eq_(res.type_, self.type_)
eq_(res.aux_len, self.aux_len)
eq_(res.num, self.num)
eq_(res.address, self.address)
eq_(res.srcs, self.srcs)
eq_(res.aux, self.aux)
def test_parser_with_srcs(self):
self.setUp_with_srcs()
self.test_parser()
def test_parser_with_aux(self):
self.setUp_with_aux()
self.test_parser()
def test_parser_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_parser()
def test_serialize(self):
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, buffer(buf))
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
def test_serialize_with_srcs(self):
self.setUp_with_srcs()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, buffer(buf))
(src1, src2, src3) = unpack_from('4s4s4s', buffer(buf),
igmpv3_report_group._MIN_LEN)
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(src1, addrconv.ipv4.text_to_bin(self.srcs[0]))
eq_(src2, addrconv.ipv4.text_to_bin(self.srcs[1]))
eq_(src3, addrconv.ipv4.text_to_bin(self.srcs[2]))
def test_serialize_with_aux(self):
self.setUp_with_aux()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, buffer(buf))
(aux, ) = unpack_from('%ds' % (self.aux_len * 4), buffer(buf),
igmpv3_report_group._MIN_LEN)
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(aux, self.aux)
def test_serialize_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
buf = self.g.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, buffer(buf))
(src1, src2, src3) = unpack_from('4s4s4s', buffer(buf),
igmpv3_report_group._MIN_LEN)
(aux, ) = unpack_from('%ds' % (self.aux_len * 4), buffer(buf),
igmpv3_report_group._MIN_LEN + 12)
eq_(res[0], self.type_)
eq_(res[1], self.aux_len)
eq_(res[2], self.num)
eq_(res[3], addrconv.ipv4.text_to_bin(self.address))
eq_(src1, addrconv.ipv4.text_to_bin(self.srcs[0]))
eq_(src2, addrconv.ipv4.text_to_bin(self.srcs[1]))
eq_(src3, addrconv.ipv4.text_to_bin(self.srcs[2]))
eq_(aux, self.aux)
def test_to_string(self):
igmp_values = {'type_': repr(self.type_),
'aux_len': repr(self.aux_len),
'num': repr(self.num),
'address': repr(self.address),
'srcs': repr(self.srcs),
'aux': repr(self.aux)}
_g_str = ','.join(['%s=%s' % (k, igmp_values[k])
for k, v in inspect.getmembers(self.g)
if k in igmp_values])<|fim▁hole|> eq_(repr(self.g), g_str)
def test_to_string_with_srcs(self):
self.setUp_with_srcs()
self.test_to_string()
def test_to_string_with_aux(self):
self.setUp_with_aux()
self.test_to_string()
def test_to_string_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
self.test_to_string()
def test_len(self):
eq_(len(self.g), 8)
def test_len_with_srcs(self):
self.setUp_with_srcs()
eq_(len(self.g), 20)
def test_len_with_aux(self):
self.setUp_with_aux()
eq_(len(self.g), 16)
def test_len_with_srcs_and_aux(self):
self.setUp_with_srcs_and_aux()
eq_(len(self.g), 28)
@raises
def test_num_larger_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs) + 1
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
self.test_parser()
@raises
def test_num_smaller_than_srcs(self):
self.srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
self.num = len(self.srcs) - 1
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
for src in self.srcs:
self.buf += pack('4s', addrconv.ipv4.text_to_bin(src))
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
self.test_parser()
@raises
def test_aux_len_larger_than_aux(self):
self.aux = '\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = len(self.aux) / 4 + 1
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
self.test_parser()
@raises
def test_aux_len_smaller_than_aux(self):
self.aux = '\x01\x02\x03\x04\x05\x00\x00\x00'
self.aux_len = len(self.aux) / 4 - 1
self.buf = pack(igmpv3_report_group._PACK_STR, self.type_,
self.aux_len, self.num,
addrconv.ipv4.text_to_bin(self.address))
self.buf += self.aux
self.g = igmpv3_report_group(
self.type_, self.aux_len, self.num, self.address,
self.srcs, self.aux)
self.test_parser()
def test_default_args(self):
rep = igmpv3_report_group()
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, str(buf))
eq_(res[0], 0)
eq_(res[1], 0)
eq_(res[2], 0)
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
# srcs without num
srcs = ['192.168.1.1', '192.168.1.2', '192.168.1.3']
rep = igmpv3_report_group(srcs=srcs)
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, str(buf))
eq_(res[0], 0)
eq_(res[1], 0)
eq_(res[2], len(srcs))
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
res = unpack_from('4s4s4s', str(buf), igmpv3_report_group._MIN_LEN)
eq_(res[0], addrconv.ipv4.text_to_bin(srcs[0]))
eq_(res[1], addrconv.ipv4.text_to_bin(srcs[1]))
eq_(res[2], addrconv.ipv4.text_to_bin(srcs[2]))
# aux without aux_len
aux = 'abcde'
rep = igmpv3_report_group(aux=aux)
buf = rep.serialize()
res = unpack_from(igmpv3_report_group._PACK_STR, str(buf))
eq_(res[0], 0)
eq_(res[1], 2)
eq_(res[2], 0)
eq_(res[3], addrconv.ipv4.text_to_bin('0.0.0.0'))
eq_(buf[igmpv3_report_group._MIN_LEN:], 'abcde\x00\x00\x00')<|fim▁end|> | g_str = '%s(%s)' % (igmpv3_report_group.__name__, _g_str)
eq_(str(self.g), g_str) |
<|file_name|>thir-unsafeck-issue-85871.rs<|end_file_name|><|fim▁begin|>// Tests that no ICE occurs when a closure appears inside a node
// that does not have a body when compiling with
// compile-flags: -Zthir-unsafeck=yes
// check-pass
#![allow(dead_code)]<|fim▁hole|> _n => 42, // we may not call the closure here (E0015)
}],
}
enum E {
V([(); { let _ = || 1; 42 }]),
}
type Ty = [(); { let _ = || 1; 42 }];
fn main() {}<|fim▁end|> |
struct Bug {
inner: [(); match || 1 { |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate may;
extern crate mimalloc;
extern crate num_cpus;
#[macro_use]
extern crate serde_derive;
extern crate may_minihttp;
extern crate serde_json;
use may_minihttp::{BodyWriter, HttpServer, HttpService, Request, Response};
use std::io;
use mimalloc::MiMalloc;
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
#[derive(Serialize)]
struct Message<'a> {
message: &'a str,
}
struct Techempower;
impl HttpService for Techempower {
fn call(&self, req: Request) -> io::Result<Response> {
let mut resp = Response::new();
// Bare-bones router
match req.path() {
"/plaintext" => {
resp.header("Content-Type", "text/plain")
.body("Hello, World!");
}
"/json" => {
resp.header("Content-Type", "application/json");
let body = resp.body_mut();
body.reserve(27);
serde_json::to_writer(
BodyWriter(body),
&Message {
message: "Hello, World!",
},
)
.unwrap();
}
_ => {
resp.status_code("404", "Not Found");
}
}
Ok(resp)
}
}
fn main() {<|fim▁hole|> .set_io_workers(num_cpus::get())
.set_pool_capacity(20000)
.set_stack_size(0x800);
let mut servers = Vec::new();
for _ in 0..num_cpus::get() {
let server = HttpServer(Techempower).start("0.0.0.0:8080").unwrap();
servers.push(server);
}
for server in servers {
server.join().unwrap();
}
}<|fim▁end|> | may::config() |
<|file_name|>ipc.rs<|end_file_name|><|fim▁begin|>/*
Precached - A Linux process monitor and pre-caching daemon
Copyright (C) 2017-2020 the precached developers
This file is part of precached.
Precached is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Precached is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Precached. If not, see <http://www.gnu.org/licenses/>.
*/
#![allow(unused)]
use std::collections::VecDeque;
use std::fs::OpenOptions;
use std::io;
use std::io::prelude;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use parking_lot::RwLock;
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
use chrono::{DateTime, Local, NaiveDateTime, TimeZone, Utc};
use log::{trace, debug, info, warn, error, log, LevelFilter};
use serde_derive::{Serialize, Deserialize};
use crate::constants;
use crate::events;
use crate::globals::*;
use crate::hooks::fanotify_logger::ACTIVE_TRACERS;
use crate::hooks::iotrace_prefetcher::{IOtracePrefetcher, ThreadState};
use crate::hooks::process_tracker::ProcessTracker;
use crate::manager::*;
use crate::plugins;
use crate::plugins::introspection;
use crate::plugins::introspection::InternalState;
use crate::plugins::statistics;
use crate::plugins::statistics::GlobalStatistics;
use crate::process;
use crate::EXIT_NOW;
/// Represents a process
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProcessEntry {
/// Holds the `pid` of the process
pub pid: libc::pid_t,
pub comm: String,
pub params: Vec<String>,
}
/// Represents an in-flight trace
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TracerEntry {
pub start_time: DateTime<Utc>,
pub trace_time_expired: bool,
pub exe: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PrefetchStats {
pub datetime: DateTime<Utc>,
pub thread_states: Vec<ThreadState>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InternalEvent {
pub datetime: DateTime<Utc>,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Statistics {
pub datetime: DateTime<Utc>,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum IpcCommand {
Ping,
Pong,
Connect,
ConnectedSuccessfully,
Close,
RequestTrackedProcesses,
SendTrackedProcesses(Vec<ProcessEntry>),
RequestInFlightTracers,
SendInFlightTracers(Vec<TracerEntry>),
RequestPrefetchStatus,
SendPrefetchStatus(PrefetchStats),
RequestInternalEvents,
SendInternalEvents(Vec<InternalEvent>),
RequestCachedFiles,
SendCachedFiles(Vec<PathBuf>),
RequestStatistics,
SendStatistics(Vec<Statistics>),
RequestInternalState,
SendInternalState(InternalState),
RequestGlobalStatistics,
SendGlobalStatistics(GlobalStatistics),
}
#[derive(Debug, Serialize, Deserialize)]
pub struct IpcMessage {
pub datetime: DateTime<Utc>,
pub command: IpcCommand,
}
impl IpcMessage {
pub fn new(command: IpcCommand) -> IpcMessage {
IpcMessage {
datetime: Utc::now(),
command,
}
}
}
pub struct IpcEvent {
pub datetime: DateTime<Utc>,
pub event: events::EventType,
}
impl IpcEvent {
pub fn new(event: events::EventType) -> IpcEvent {
IpcEvent {
datetime: Utc::now(),
event,
}
}
}
pub struct IpcServer {
socket: Option<zmq::Socket>,
}
impl IpcServer {
pub fn new() -> Self {
IpcServer { socket: None }
}
pub fn init(&mut self, globals: &mut Globals, manager: &Manager) -> Result<(), &'static str> {
let ctx = zmq::Context::new();
match ctx.socket(zmq::REP) {
Err(e) => Err("Socket creation failed!"),
Ok(socket) => {
socket.bind("ipc:///run/precached/precached.sock");
self.socket = Some(socket);
Ok(())
}
}
}
pub fn listen(&self) -> Result<String, String> {
match self.socket {
None => Err(String::from("IPC socket is not connected!")),
Some(ref socket) => {
// wait for consumer
trace!("Awaiting next IPC request...");
match socket.recv_string(0) {
Err(e) => Err(format!("Socket recv() error: {}", e)),
Ok(data) => Ok(data.unwrap()),
}
}
}
}
pub fn process_messages(&self, data: &str, queue: &mut VecDeque<events::InternalEvent>, manager: &Manager) {
match self.socket {
None => {
error!("IPC socket is not connected!");
}
Some(ref socket) => {
let manager = manager.clone();
let deserialized_data: IpcMessage = serde_json::from_str(data).unwrap();
match deserialized_data.command {
IpcCommand::Connect => {
info!("IPC client connected");
let cmd = IpcMessage::new(IpcCommand::ConnectedSuccessfully);
let buf = serde_json::to_string(&cmd).unwrap();
match socket.send(&buf.as_bytes(), 0) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
}
}
IpcCommand::Close => {<|fim▁hole|> IpcCommand::RequestTrackedProcesses => match Self::handle_request_tracked_processes(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestInFlightTracers => match Self::handle_request_inflight_tracers(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestPrefetchStatus => match Self::handle_request_prefetch_status(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestInternalEvents => match Self::handle_request_internal_events(socket, queue, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestCachedFiles => match Self::handle_request_cached_files(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestStatistics => match Self::handle_request_statistics(socket, queue, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestInternalState => match Self::handle_request_internal_state(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
IpcCommand::RequestGlobalStatistics => match Self::handle_request_global_statistics(socket, &manager) {
Err(e) => {
error!("Error sending response: {}", e);
}
Ok(()) => {
trace!("Successfully sent reply");
}
},
_ => {
warn!("Unknown IPC command received");
}
}
}
}
}
fn handle_request_tracked_processes(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
trace!("IPC client command: RequestTrackedProcesses");
let hm = manager.hook_manager.read();
match hm.get_hook_by_name(&String::from("process_tracker")) {
None => {
warn!("Hook not loaded: 'process_tracker', skipped");
Ok(())
}
Some(h) => {
let h = h.read();
let mut process_tracker = h.as_any().downcast_ref::<ProcessTracker>().unwrap();
let v: Vec<ProcessEntry> = process_tracker
.tracked_processes
.values()
.map(|v| {
let params = v.get_cmdline().unwrap_or_else(|_| "".to_owned());
let params = params.split("\u{0}").map(|p| p.to_owned()).collect();
ProcessEntry {
pid: v.pid,
comm: v.comm.to_string(),
params,
}
})
.collect();
let cmd = IpcMessage::new(IpcCommand::SendTrackedProcesses(v));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
}
}
fn handle_request_inflight_tracers(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
trace!("IPC client command: RequestInFlightTracers");
let active_tracers = ACTIVE_TRACERS.lock();
let mut result: Vec<TracerEntry> = vec![];
for trace in active_tracers.values() {
let item = TracerEntry {
// start_time: DateTime::<Utc>::from_utc(NaiveDateTime::from(trace.start_time), Utc),
start_time: Utc::now(),
trace_time_expired: trace.trace_time_expired,
exe: trace.trace_log.exe.clone(),
};
result.push(item);
}
let cmd = IpcMessage::new(IpcCommand::SendInFlightTracers(result));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
fn handle_request_prefetch_status(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
let hm = manager.hook_manager.read();
match hm.get_hook_by_name(&String::from("iotrace_prefetcher")) {
None => {
warn!("Hook not loaded: 'iotrace_prefetcher', skipped");
Ok(())
}
Some(h) => {
let h = h.read();
let mut iotrace_prefetcher = h.as_any().downcast_ref::<IOtracePrefetcher>().unwrap();
let mut v: Vec<ThreadState> = vec![];
for s in &iotrace_prefetcher.thread_states {
let val = s.read();
v.push((*val).clone());
}
let stats = PrefetchStats {
datetime: Utc::now(),
thread_states: v,
};
let cmd = IpcMessage::new(IpcCommand::SendPrefetchStatus(stats));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
}
}
fn handle_request_cached_files(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
trace!("IPC client command: RequestCachedFiles");
let hm = manager.hook_manager.read();
match hm.get_hook_by_name(&String::from("iotrace_prefetcher")) {
None => {
warn!("Hook not loaded: 'iotrace_prefetcher', skipped");
Ok(())
}
Some(h) => {
let v = statistics::MAPPED_FILES.iter().map(|f| f.clone()).collect();
let cmd = IpcMessage::new(IpcCommand::SendCachedFiles(v));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
}
}
fn handle_request_internal_events(
socket: &zmq::Socket,
queue: &mut VecDeque<events::InternalEvent>,
manager: &Manager,
) -> Result<(), zmq::Error> {
let mut items = vec![];
for e in queue.drain(..) {
let i = InternalEvent {
datetime: Utc::now(),
name: format!("{:?}", e.event_type),
};
items.push(i);
}
let cmd = IpcMessage::new(IpcCommand::SendInternalEvents(items));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
fn handle_request_statistics(
socket: &zmq::Socket,
queue: &mut VecDeque<events::InternalEvent>,
manager: &Manager,
) -> Result<(), zmq::Error> {
let mut items = vec![];
for e in queue.drain(..) {
let i = Statistics {
datetime: Utc::now(),
// name: format!("{:?}", e),
};
items.push(i);
}
let cmd = IpcMessage::new(IpcCommand::SendStatistics(items));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
fn handle_request_internal_state(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
let pm = manager.plugin_manager.read();
match pm.get_plugin_by_name(&String::from("introspection")) {
None => {
warn!("Plugin not loaded: 'introspection', skipped");
Ok(())
}
Some(p) => {
let p = p.read();
let mut introspection = p.as_any().downcast_ref::<plugins::introspection::Introspection>().unwrap();
let data = introspection.get_internal_state(manager);
let cmd = IpcMessage::new(IpcCommand::SendInternalState(data));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
}
}
fn handle_request_global_statistics(socket: &zmq::Socket, manager: &Manager) -> Result<(), zmq::Error> {
let pm = manager.plugin_manager.read();
match pm.get_plugin_by_name(&String::from("statistics")) {
None => {
warn!("Plugin not loaded: 'statistics', skipped");
Ok(())
}
Some(p) => {
let p = p.read();
let mut statistics = p.as_any().downcast_ref::<plugins::statistics::Statistics>().unwrap();
let data = statistics.get_global_statistics(manager);
let cmd = IpcMessage::new(IpcCommand::SendGlobalStatistics(data));
let buf = serde_json::to_string(&cmd).unwrap();
socket.send(&buf.as_bytes(), 0)?;
Ok(())
}
}
}
}<|fim▁end|> | info!("IPC client disconnected");
}
|
<|file_name|>sub.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty;
use middle::ty::TyVar;
use middle::typeck::check::regionmanip::replace_late_bound_regions_in_fn_sig;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::{cres, CresCompare};
use middle::typeck::infer::equate::Equate;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::type_variable::{SubtypeOf, SupertypeOf};
use util::common::{indenter};
use util::ppaux::{bound_region_to_string, Repr};
use syntax::ast::{Onceness, FnStyle, MutImmutable, MutMutable};
/// "Greatest lower bound" (common subtype)
pub struct Sub<'f> {
fields: CombineFields<'f>
}
#[allow(non_snake_case)]
pub fn Sub<'f>(cf: CombineFields<'f>) -> Sub<'f> {
Sub { fields: cf }
}
impl<'f> Combine for Sub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.fields.infcx }
fn tag(&self) -> String { "sub".to_string() }
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
fn trace(&self) -> TypeTrace { self.fields.trace.clone() }
fn equate<'a>(&'a self) -> Equate<'a> { Equate(self.fields.clone()) }
fn sub<'a>(&'a self) -> Sub<'a> { Sub(self.fields.clone()) }
fn lub<'a>(&'a self) -> Lub<'a> { Lub(self.fields.clone()) }
fn glb<'a>(&'a self) -> Glb<'a> { Glb(self.fields.clone()) }
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
Sub(self.fields.switch_expected()).tys(b, a)
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
let opp = CombineFields {
a_is_expected: !self.fields.a_is_expected,
..self.fields.clone()
};
Sub(opp).regions(b, a)
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.fields.infcx.tcx),
b.repr(self.fields.infcx.tcx));
self.fields.infcx.region_vars.make_subregion(Subtype(self.trace()), a, b);
Ok(a)
}
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
debug!("mts({} <: {})",
a.repr(self.fields.infcx.tcx),
b.repr(self.fields.infcx.tcx));
if a.mutbl != b.mutbl {
return Err(ty::terr_mutability);
}
match b.mutbl {
MutMutable => {
// If supertype is mut, subtype must match exactly
// (i.e., invariant if mut):
try!(self.equate().tys(a.ty, b.ty));
}
MutImmutable => {<|fim▁hole|> // Otherwise we can be covariant:
try!(self.tys(a.ty, b.ty));
}
}
Ok(*a) // return is meaningless in sub, just return *a
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
self.lub().fn_styles(a, b).compare(b, || {
ty::terr_fn_style_mismatch(expected_found(self, a, b))
})
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
self.lub().oncenesses(a, b).compare(b, || {
ty::terr_onceness_mismatch(expected_found(self, a, b))
})
}
fn builtin_bounds(&self, a: BuiltinBounds, b: BuiltinBounds)
-> cres<BuiltinBounds> {
// More bounds is a subtype of fewer bounds.
//
// e.g., fn:Copy() <: fn(), because the former is a function
// that only closes over copyable things, but the latter is
// any function at all.
if a.contains(b) {
Ok(a)
} else {
Err(ty::terr_builtin_bounds(expected_found(self, a, b)))
}
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
debug!("{}.tys({}, {})", self.tag(),
a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
if a == b { return Ok(a); }
let infcx = self.fields.infcx;
let a = infcx.type_variables.borrow().replace_if_possible(a);
let b = infcx.type_variables.borrow().replace_if_possible(b);
match (&ty::get(a).sty, &ty::get(b).sty) {
(&ty::ty_bot, _) => {
Ok(a)
}
(&ty::ty_infer(TyVar(a_id)), &ty::ty_infer(TyVar(b_id))) => {
infcx.type_variables
.borrow_mut()
.relate_vars(a_id, SubtypeOf, b_id);
Ok(a)
}
// The vec/str check here and below is so that we don't unify
// T with [T], this is necessary so we reflect subtyping of references
// (&T does not unify with &[T]) where that in turn is to reflect
// the historical non-typedness of [T].
(&ty::ty_infer(TyVar(_)), &ty::ty_str) |
(&ty::ty_infer(TyVar(_)), &ty::ty_vec(_, None)) => {
Err(ty::terr_sorts(expected_found(self, a, b)))
}
(&ty::ty_infer(TyVar(a_id)), _) => {
try!(self.fields
.switch_expected()
.instantiate(b, SupertypeOf, a_id));
Ok(a)
}
(&ty::ty_str, &ty::ty_infer(TyVar(_))) |
(&ty::ty_vec(_, None), &ty::ty_infer(TyVar(_))) => {
Err(ty::terr_sorts(expected_found(self, a, b)))
}
(_, &ty::ty_infer(TyVar(b_id))) => {
try!(self.fields.instantiate(a, SubtypeOf, b_id));
Ok(a)
}
(_, &ty::ty_bot) => {
Err(ty::terr_sorts(expected_found(self, a, b)))
}
_ => {
super_tys(self, a, b)
}
}
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
debug!("fn_sigs(a={}, b={})",
a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
let _indenter = indenter();
// Rather than checking the subtype relationship between `a` and `b`
// as-is, we need to do some extra work here in order to make sure
// that function subtyping works correctly with respect to regions
//
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.fields.infcx.region_vars.mark();
// First, we instantiate each bound region in the subtype with a fresh
// region variable.
let (a_sig, _) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
let (skol_map, b_sig) = {
replace_late_bound_regions_in_fn_sig(self.fields.infcx.tcx, b, |br| {
let skol = self.fields.infcx.region_vars.new_skolemized(br);
debug!("Bound region {} skolemized to {:?}",
bound_region_to_string(self.fields.infcx.tcx, "", false, br),
skol);
skol
})
};
debug!("a_sig={}", a_sig.repr(self.fields.infcx.tcx));
debug!("b_sig={}", b_sig.repr(self.fields.infcx.tcx));
// Compare types now that bound regions have been replaced.
let sig = try!(super_fn_sigs(self, &a_sig, &b_sig));
// Presuming type comparison succeeds, we need to check
// that the skolemized regions do not "leak".
let new_vars =
self.fields.infcx.region_vars.vars_created_since_mark(mark);
for (&skol_br, &skol) in skol_map.iter() {
let tainted = self.fields.infcx.region_vars.tainted(mark, skol);
for tainted_region in tainted.iter() {
// Each skolemized should only be relatable to itself
// or new variables:
match *tainted_region {
ty::ReInfer(ty::ReVar(ref vid)) => {
if new_vars.iter().any(|x| x == vid) { continue; }
}
_ => {
if *tainted_region == skol { continue; }
}
};
// A is not as polymorphic as B:
if self.a_is_expected() {
debug!("Not as polymorphic!");
return Err(ty::terr_regions_insufficiently_polymorphic(
skol_br, *tainted_region));
} else {
debug!("Overly polymorphic!");
return Err(ty::terr_regions_overly_polymorphic(
skol_br, *tainted_region));
}
}
}
return Ok(sig);
}
}<|fim▁end|> | |
<|file_name|>regions-creating-enums5.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(non_camel_case_types)]
// pretty-expanded FIXME #23616
enum ast<'a> {
num(usize),<|fim▁hole|>fn mk_add_ok<'a>(x: &'a ast<'a>, y: &'a ast<'a>, _z: &ast) -> ast<'a> {
ast::add(x, y)
}
pub fn main() {
}<|fim▁end|> | add(&'a ast<'a>, &'a ast<'a>)
}
|
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use prelude::v1::*;
use self::SocketStatus::*;
use self::InAddr::*;
use ffi::CString;
use ffi;
use old_io::net::addrinfo;
use old_io::net::ip::{SocketAddr, IpAddr, Ipv4Addr, Ipv6Addr};
use old_io::{IoResult, IoError};
use libc::{self, c_char, c_int};
use mem;
use num::Int;
use ptr::{self, null, null_mut};
use str;
use sys::{self, retry, c, sock_t, last_error, last_net_error, last_gai_error, close_sock,
wrlen, msglen_t, os, wouldblock, set_nonblocking, timer, ms_to_timeval,
decode_error_detailed};
use sync::{Arc, Mutex, MutexGuard};
use sys_common::{self, keep_going, short_write, timeout};
use cmp;
use old_io;
// FIXME: move uses of Arc and deadline tracking to std::io
#[derive(Debug)]
pub enum SocketStatus {
Readable,
Writable,
}
////////////////////////////////////////////////////////////////////////////////
// sockaddr and misc bindings
////////////////////////////////////////////////////////////////////////////////
pub fn htons(u: u16) -> u16 {
u.to_be()
}
pub fn ntohs(u: u16) -> u16 {
Int::from_be(u)
}
pub enum InAddr {
In4Addr(libc::in_addr),
In6Addr(libc::in6_addr),
}
pub fn ip_to_inaddr(ip: IpAddr) -> InAddr {
match ip {
Ipv4Addr(a, b, c, d) => {
let ip = ((a as u32) << 24) |
((b as u32) << 16) |
((c as u32) << 8) |
((d as u32) << 0);
In4Addr(libc::in_addr {
s_addr: Int::from_be(ip)
})
}
Ipv6Addr(a, b, c, d, e, f, g, h) => {
In6Addr(libc::in6_addr {
s6_addr: [
htons(a),
htons(b),
htons(c),
htons(d),
htons(e),
htons(f),
htons(g),
htons(h),
]
})
}
}
}
pub fn addr_to_sockaddr(addr: SocketAddr,
storage: &mut libc::sockaddr_storage)
-> libc::socklen_t {
unsafe {
let len = match ip_to_inaddr(addr.ip) {
In4Addr(inaddr) => {
let storage = storage as *mut _ as *mut libc::sockaddr_in;
(*storage).sin_family = libc::AF_INET as libc::sa_family_t;
(*storage).sin_port = htons(addr.port);
(*storage).sin_addr = inaddr;
mem::size_of::<libc::sockaddr_in>()
}
In6Addr(inaddr) => {
let storage = storage as *mut _ as *mut libc::sockaddr_in6;
(*storage).sin6_family = libc::AF_INET6 as libc::sa_family_t;
(*storage).sin6_port = htons(addr.port);
(*storage).sin6_addr = inaddr;
mem::size_of::<libc::sockaddr_in6>()
}
};
return len as libc::socklen_t;
}
}
pub fn socket(addr: SocketAddr, ty: libc::c_int) -> IoResult<sock_t> {
unsafe {
let fam = match addr.ip {
Ipv4Addr(..) => libc::AF_INET,
Ipv6Addr(..) => libc::AF_INET6,
};
match libc::socket(fam, ty, 0) {
-1 => Err(last_net_error()),
fd => Ok(fd),
}
}
}
pub fn setsockopt<T>(fd: sock_t, opt: libc::c_int, val: libc::c_int,
payload: T) -> IoResult<()> {
unsafe {
let payload = &payload as *const T as *const libc::c_void;
let ret = libc::setsockopt(fd, opt, val,
payload,
mem::size_of::<T>() as libc::socklen_t);
if ret != 0 {
Err(last_net_error())
} else {
Ok(())
}
}
}
pub fn getsockopt<T: Copy>(fd: sock_t, opt: libc::c_int,
val: libc::c_int) -> IoResult<T> {
unsafe {
let mut slot: T = mem::zeroed();
let mut len = mem::size_of::<T>() as libc::socklen_t;
let ret = c::getsockopt(fd, opt, val,
&mut slot as *mut _ as *mut _,
&mut len);
if ret != 0 {
Err(last_net_error())
} else {
assert!(len as uint == mem::size_of::<T>());
Ok(slot)
}
}
}
pub fn sockname(fd: sock_t,
f: unsafe extern "system" fn(sock_t, *mut libc::sockaddr,
*mut libc::socklen_t) -> libc::c_int)
-> IoResult<SocketAddr>
{
let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() };
let mut len = mem::size_of::<libc::sockaddr_storage>() as libc::socklen_t;
unsafe {
let storage = &mut storage as *mut libc::sockaddr_storage;
let ret = f(fd,
storage as *mut libc::sockaddr,
&mut len as *mut libc::socklen_t);
if ret != 0 {
return Err(last_net_error())
}
}
return sockaddr_to_addr(&storage, len as uint);
}
pub fn sockaddr_to_addr(storage: &libc::sockaddr_storage,
len: uint) -> IoResult<SocketAddr> {
match storage.ss_family as libc::c_int {
libc::AF_INET => {
assert!(len as uint >= mem::size_of::<libc::sockaddr_in>());
let storage: &libc::sockaddr_in = unsafe {
mem::transmute(storage)
};
let ip = (storage.sin_addr.s_addr as u32).to_be();
let a = (ip >> 24) as u8;
let b = (ip >> 16) as u8;
let c = (ip >> 8) as u8;
let d = (ip >> 0) as u8;
Ok(SocketAddr {
ip: Ipv4Addr(a, b, c, d),
port: ntohs(storage.sin_port),
})
}
libc::AF_INET6 => {
assert!(len as uint >= mem::size_of::<libc::sockaddr_in6>());
let storage: &libc::sockaddr_in6 = unsafe {
mem::transmute(storage)
};
let a = ntohs(storage.sin6_addr.s6_addr[0]);
let b = ntohs(storage.sin6_addr.s6_addr[1]);
let c = ntohs(storage.sin6_addr.s6_addr[2]);
let d = ntohs(storage.sin6_addr.s6_addr[3]);
let e = ntohs(storage.sin6_addr.s6_addr[4]);
let f = ntohs(storage.sin6_addr.s6_addr[5]);
let g = ntohs(storage.sin6_addr.s6_addr[6]);
let h = ntohs(storage.sin6_addr.s6_addr[7]);
Ok(SocketAddr {
ip: Ipv6Addr(a, b, c, d, e, f, g, h),
port: ntohs(storage.sin6_port),
})
}
_ => {
Err(IoError {
kind: old_io::InvalidInput,
desc: "invalid argument",
detail: None,
})
}
}
}
////////////////////////////////////////////////////////////////////////////////
// get_host_addresses
////////////////////////////////////////////////////////////////////////////////
extern "system" {
fn getaddrinfo(node: *const c_char, service: *const c_char,
hints: *const libc::addrinfo,
res: *mut *mut libc::addrinfo) -> c_int;
fn freeaddrinfo(res: *mut libc::addrinfo);
}
pub fn get_host_addresses(host: Option<&str>, servname: Option<&str>,
hint: Option<addrinfo::Hint>)
-> Result<Vec<addrinfo::Info>, IoError>
{
sys::init_net();
assert!(host.is_some() || servname.is_some());
let c_host = host.map(|x| CString::from_slice(x.as_bytes()));
let c_host = c_host.as_ref().map(|x| x.as_ptr()).unwrap_or(null());
let c_serv = servname.map(|x| CString::from_slice(x.as_bytes()));
let c_serv = c_serv.as_ref().map(|x| x.as_ptr()).unwrap_or(null());
let hint = hint.map(|hint| {
libc::addrinfo {
ai_flags: hint.flags as c_int,
ai_family: hint.family as c_int,
ai_socktype: 0,
ai_protocol: 0,
ai_addrlen: 0,
ai_canonname: null_mut(),
ai_addr: null_mut(),
ai_next: null_mut()
}
});
let hint_ptr = hint.as_ref().map_or(null(), |x| {
x as *const libc::addrinfo
});
let mut res = null_mut();
// Make the call
let s = unsafe {
getaddrinfo(c_host, c_serv, hint_ptr, &mut res)
};
// Error?
if s != 0 {
return Err(last_gai_error(s));
}
// Collect all the results we found
let mut addrs = Vec::new();
let mut rp = res;<|fim▁hole|> unsafe {
let addr = try!(sockaddr_to_addr(mem::transmute((*rp).ai_addr),
(*rp).ai_addrlen as uint));
addrs.push(addrinfo::Info {
address: addr,
family: (*rp).ai_family as uint,
socktype: None,
protocol: None,
flags: (*rp).ai_flags as uint
});
rp = (*rp).ai_next as *mut libc::addrinfo;
}
}
unsafe { freeaddrinfo(res); }
Ok(addrs)
}
////////////////////////////////////////////////////////////////////////////////
// get_address_name
////////////////////////////////////////////////////////////////////////////////
extern "system" {
fn getnameinfo(sa: *const libc::sockaddr, salen: libc::socklen_t,
host: *mut c_char, hostlen: libc::size_t,
serv: *mut c_char, servlen: libc::size_t,
flags: c_int) -> c_int;
}
const NI_MAXHOST: uint = 1025;
pub fn get_address_name(addr: IpAddr) -> Result<String, IoError> {
let addr = SocketAddr{ip: addr, port: 0};
let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() };
let len = addr_to_sockaddr(addr, &mut storage);
let mut hostbuf = [0 as c_char; NI_MAXHOST];
let res = unsafe {
getnameinfo(&storage as *const _ as *const libc::sockaddr, len,
hostbuf.as_mut_ptr(), NI_MAXHOST as libc::size_t,
ptr::null_mut(), 0,
0)
};
if res != 0 {
return Err(last_gai_error(res));
}
unsafe {
Ok(str::from_utf8(ffi::c_str_to_bytes(&hostbuf.as_ptr()))
.unwrap().to_string())
}
}
////////////////////////////////////////////////////////////////////////////////
// Timeout helpers
//
// The read/write functions below are the helpers for reading/writing a socket
// with a possible deadline specified. This is generally viewed as a timed out
// I/O operation.
//
// From the application's perspective, timeouts apply to the I/O object, not to
// the underlying file descriptor (it's one timeout per object). This means that
// we can't use the SO_RCVTIMEO and corresponding send timeout option.
//
// The next idea to implement timeouts would be to use nonblocking I/O. An
// invocation of select() would wait (with a timeout) for a socket to be ready.
// Once its ready, we can perform the operation. Note that the operation *must*
// be nonblocking, even though select() says the socket is ready. This is
// because some other thread could have come and stolen our data (handles can be
// cloned).
//
// To implement nonblocking I/O, the first option we have is to use the
// O_NONBLOCK flag. Remember though that this is a global setting, affecting all
// I/O objects, so this was initially viewed as unwise.
//
// It turns out that there's this nifty MSG_DONTWAIT flag which can be passed to
// send/recv, but the niftiness wears off once you realize it only works well on
// Linux [1] [2]. This means that it's pretty easy to get a nonblocking
// operation on Linux (no flag fiddling, no affecting other objects), but not on
// other platforms.
//
// To work around this constraint on other platforms, we end up using the
// original strategy of flipping the O_NONBLOCK flag. As mentioned before, this
// could cause other objects' blocking operations to suddenly become
// nonblocking. To get around this, a "blocking operation" which returns EAGAIN
// falls back to using the same code path as nonblocking operations, but with an
// infinite timeout (select + send/recv). This helps emulate blocking
// reads/writes despite the underlying descriptor being nonblocking, as well as
// optimizing the fast path of just hitting one syscall in the good case.
//
// As a final caveat, this implementation uses a mutex so only one thread is
// doing a nonblocking operation at at time. This is the operation that comes
// after the select() (at which point we think the socket is ready). This is
// done for sanity to ensure that the state of the O_NONBLOCK flag is what we
// expect (wouldn't want someone turning it on when it should be off!). All
// operations performed in the lock are *nonblocking* to avoid holding the mutex
// forever.
//
// So, in summary, Linux uses MSG_DONTWAIT and doesn't need mutexes, everyone
// else uses O_NONBLOCK and mutexes with some trickery to make sure blocking
// reads/writes are still blocking.
//
// Fun, fun!
//
// [1] http://twistedmatrix.com/pipermail/twisted-commits/2012-April/034692.html
// [2] http://stackoverflow.com/questions/19819198/does-send-msg-dontwait
pub fn read<T, L, R>(fd: sock_t, deadline: u64, mut lock: L, mut read: R) -> IoResult<uint> where
L: FnMut() -> T,
R: FnMut(bool) -> libc::c_int,
{
let mut ret = -1;
if deadline == 0 {
ret = retry(|| read(false));
}
if deadline != 0 || (ret == -1 && wouldblock()) {
let deadline = match deadline {
0 => None,
n => Some(n),
};
loop {
// With a timeout, first we wait for the socket to become
// readable using select(), specifying the relevant timeout for
// our previously set deadline.
try!(await(&[fd], deadline, Readable));
// At this point, we're still within the timeout, and we've
// determined that the socket is readable (as returned by
// select). We must still read the socket in *nonblocking* mode
// because some other thread could come steal our data. If we
// fail to read some data, we retry (hence the outer loop) and
// wait for the socket to become readable again.
let _guard = lock();
match retry(|| read(deadline.is_some())) {
-1 if wouldblock() => {}
-1 => return Err(last_net_error()),
n => { ret = n; break }
}
}
}
match ret {
0 => Err(sys_common::eof()),
n if n < 0 => Err(last_net_error()),
n => Ok(n as uint)
}
}
pub fn write<T, L, W>(fd: sock_t,
deadline: u64,
buf: &[u8],
write_everything: bool,
mut lock: L,
mut write: W) -> IoResult<uint> where
L: FnMut() -> T,
W: FnMut(bool, *const u8, uint) -> i64,
{
let mut ret = -1;
let mut written = 0;
if deadline == 0 {
if write_everything {
ret = keep_going(buf, |inner, len| {
written = buf.len() - len;
write(false, inner, len)
});
} else {
ret = retry(|| { write(false, buf.as_ptr(), buf.len()) });
if ret > 0 { written = ret as uint; }
}
}
if deadline != 0 || (ret == -1 && wouldblock()) {
let deadline = match deadline {
0 => None,
n => Some(n),
};
while written < buf.len() && (write_everything || written == 0) {
// As with read(), first wait for the socket to be ready for
// the I/O operation.
match await(&[fd], deadline, Writable) {
Err(ref e) if e.kind == old_io::EndOfFile && written > 0 => {
assert!(deadline.is_some());
return Err(short_write(written, "short write"))
}
Err(e) => return Err(e),
Ok(()) => {}
}
// Also as with read(), we use MSG_DONTWAIT to guard ourselves
// against unforeseen circumstances.
let _guard = lock();
let ptr = buf[written..].as_ptr();
let len = buf.len() - written;
match retry(|| write(deadline.is_some(), ptr, len)) {
-1 if wouldblock() => {}
-1 => return Err(last_net_error()),
n => { written += n as uint; }
}
}
ret = 0;
}
if ret < 0 {
Err(last_net_error())
} else {
Ok(written)
}
}
// See http://developerweb.net/viewtopic.php?id=3196 for where this is
// derived from.
pub fn connect_timeout(fd: sock_t,
addrp: *const libc::sockaddr,
len: libc::socklen_t,
timeout_ms: u64) -> IoResult<()> {
#[cfg(unix)] use libc::EINPROGRESS as INPROGRESS;
#[cfg(windows)] use libc::WSAEINPROGRESS as INPROGRESS;
#[cfg(unix)] use libc::EWOULDBLOCK as WOULDBLOCK;
#[cfg(windows)] use libc::WSAEWOULDBLOCK as WOULDBLOCK;
// Make sure the call to connect() doesn't block
try!(set_nonblocking(fd, true));
let ret = match unsafe { libc::connect(fd, addrp, len) } {
// If the connection is in progress, then we need to wait for it to
// finish (with a timeout). The current strategy for doing this is
// to use select() with a timeout.
-1 if os::errno() as int == INPROGRESS as int ||
os::errno() as int == WOULDBLOCK as int => {
let mut set: c::fd_set = unsafe { mem::zeroed() };
c::fd_set(&mut set, fd);
match await(fd, &mut set, timeout_ms) {
0 => Err(timeout("connection timed out")),
-1 => Err(last_net_error()),
_ => {
let err: libc::c_int = try!(
getsockopt(fd, libc::SOL_SOCKET, libc::SO_ERROR));
if err == 0 {
Ok(())
} else {
Err(decode_error_detailed(err))
}
}
}
}
-1 => Err(last_net_error()),
_ => Ok(()),
};
// be sure to turn blocking I/O back on
try!(set_nonblocking(fd, false));
return ret;
#[cfg(unix)]
fn await(fd: sock_t, set: &mut c::fd_set, timeout: u64) -> libc::c_int {
let start = timer::now();
retry(|| unsafe {
// Recalculate the timeout each iteration (it is generally
// undefined what the value of the 'tv' is after select
// returns EINTR).
let mut tv = ms_to_timeval(timeout - (timer::now() - start));
c::select(fd + 1, ptr::null_mut(), set as *mut _,
ptr::null_mut(), &mut tv)
})
}
#[cfg(windows)]
fn await(_fd: sock_t, set: &mut c::fd_set, timeout: u64) -> libc::c_int {
let mut tv = ms_to_timeval(timeout);
unsafe { c::select(1, ptr::null_mut(), set, ptr::null_mut(), &mut tv) }
}
}
pub fn await(fds: &[sock_t], deadline: Option<u64>,
status: SocketStatus) -> IoResult<()> {
let mut set: c::fd_set = unsafe { mem::zeroed() };
let mut max = 0;
for &fd in fds {
c::fd_set(&mut set, fd);
max = cmp::max(max, fd + 1);
}
if cfg!(windows) {
max = fds.len() as sock_t;
}
let (read, write) = match status {
Readable => (&mut set as *mut _, ptr::null_mut()),
Writable => (ptr::null_mut(), &mut set as *mut _),
};
let mut tv: libc::timeval = unsafe { mem::zeroed() };
match retry(|| {
let now = timer::now();
let tvp = match deadline {
None => ptr::null_mut(),
Some(deadline) => {
// If we're past the deadline, then pass a 0 timeout to
// select() so we can poll the status
let ms = if deadline < now {0} else {deadline - now};
tv = ms_to_timeval(ms);
&mut tv as *mut _
}
};
let r = unsafe {
c::select(max as libc::c_int, read, write, ptr::null_mut(), tvp)
};
r
}) {
-1 => Err(last_net_error()),
0 => Err(timeout("timed out")),
_ => Ok(()),
}
}
////////////////////////////////////////////////////////////////////////////////
// Basic socket representation
////////////////////////////////////////////////////////////////////////////////
struct Inner {
fd: sock_t,
// Unused on Linux, where this lock is not necessary.
#[allow(dead_code)]
lock: Mutex<()>,
}
impl Inner {
fn new(fd: sock_t) -> Inner {
Inner { fd: fd, lock: Mutex::new(()) }
}
}
impl Drop for Inner {
fn drop(&mut self) { unsafe { close_sock(self.fd); } }
}
pub struct Guard<'a> {
pub fd: sock_t,
pub guard: MutexGuard<'a, ()>,
}
#[unsafe_destructor]
impl<'a> Drop for Guard<'a> {
fn drop(&mut self) {
assert!(set_nonblocking(self.fd, false).is_ok());
}
}
////////////////////////////////////////////////////////////////////////////////
// TCP streams
////////////////////////////////////////////////////////////////////////////////
pub struct TcpStream {
inner: Arc<Inner>,
read_deadline: u64,
write_deadline: u64,
}
impl TcpStream {
pub fn connect(addr: SocketAddr, timeout: Option<u64>) -> IoResult<TcpStream> {
sys::init_net();
let fd = try!(socket(addr, libc::SOCK_STREAM));
let ret = TcpStream::new(fd);
let mut storage = unsafe { mem::zeroed() };
let len = addr_to_sockaddr(addr, &mut storage);
let addrp = &storage as *const _ as *const libc::sockaddr;
match timeout {
Some(timeout) => {
try!(connect_timeout(fd, addrp, len, timeout));
Ok(ret)
},
None => {
match retry(|| unsafe { libc::connect(fd, addrp, len) }) {
-1 => Err(last_error()),
_ => Ok(ret),
}
}
}
}
pub fn new(fd: sock_t) -> TcpStream {
TcpStream {
inner: Arc::new(Inner::new(fd)),
read_deadline: 0,
write_deadline: 0,
}
}
pub fn fd(&self) -> sock_t { self.inner.fd }
pub fn set_nodelay(&mut self, nodelay: bool) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_TCP, libc::TCP_NODELAY,
nodelay as libc::c_int)
}
pub fn set_keepalive(&mut self, seconds: Option<uint>) -> IoResult<()> {
let ret = setsockopt(self.fd(), libc::SOL_SOCKET, libc::SO_KEEPALIVE,
seconds.is_some() as libc::c_int);
match seconds {
Some(n) => ret.and_then(|()| self.set_tcp_keepalive(n)),
None => ret,
}
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
fn set_tcp_keepalive(&mut self, seconds: uint) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_TCP, libc::TCP_KEEPALIVE,
seconds as libc::c_int)
}
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
fn set_tcp_keepalive(&mut self, seconds: uint) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_TCP, libc::TCP_KEEPIDLE,
seconds as libc::c_int)
}
#[cfg(target_os = "openbsd")]
fn set_tcp_keepalive(&mut self, seconds: uint) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_TCP, libc::SO_KEEPALIVE,
seconds as libc::c_int)
}
#[cfg(not(any(target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd")))]
fn set_tcp_keepalive(&mut self, _seconds: uint) -> IoResult<()> {
Ok(())
}
#[cfg(target_os = "linux")]
fn lock_nonblocking(&self) {}
#[cfg(not(target_os = "linux"))]
fn lock_nonblocking<'a>(&'a self) -> Guard<'a> {
let ret = Guard {
fd: self.fd(),
guard: self.inner.lock.lock().unwrap(),
};
assert!(set_nonblocking(self.fd(), true).is_ok());
ret
}
pub fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let fd = self.fd();
let dolock = || self.lock_nonblocking();
let doread = |nb| unsafe {
let flags = if nb {c::MSG_DONTWAIT} else {0};
libc::recv(fd,
buf.as_mut_ptr() as *mut libc::c_void,
buf.len() as wrlen,
flags) as libc::c_int
};
read(fd, self.read_deadline, dolock, doread)
}
pub fn write(&mut self, buf: &[u8]) -> IoResult<()> {
let fd = self.fd();
let dolock = || self.lock_nonblocking();
let dowrite = |nb: bool, buf: *const u8, len: uint| unsafe {
let flags = if nb {c::MSG_DONTWAIT} else {0};
libc::send(fd,
buf as *const _,
len as wrlen,
flags) as i64
};
write(fd, self.write_deadline, buf, true, dolock, dowrite).map(|_| ())
}
pub fn peer_name(&mut self) -> IoResult<SocketAddr> {
sockname(self.fd(), libc::getpeername)
}
pub fn close_write(&mut self) -> IoResult<()> {
super::mkerr_libc(unsafe { libc::shutdown(self.fd(), libc::SHUT_WR) })
}
pub fn close_read(&mut self) -> IoResult<()> {
super::mkerr_libc(unsafe { libc::shutdown(self.fd(), libc::SHUT_RD) })
}
pub fn set_timeout(&mut self, timeout: Option<u64>) {
let deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
self.read_deadline = deadline;
self.write_deadline = deadline;
}
pub fn set_read_timeout(&mut self, timeout: Option<u64>) {
self.read_deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
}
pub fn set_write_timeout(&mut self, timeout: Option<u64>) {
self.write_deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
}
pub fn socket_name(&mut self) -> IoResult<SocketAddr> {
sockname(self.fd(), libc::getsockname)
}
}
impl Clone for TcpStream {
fn clone(&self) -> TcpStream {
TcpStream {
inner: self.inner.clone(),
read_deadline: 0,
write_deadline: 0,
}
}
}
////////////////////////////////////////////////////////////////////////////////
// UDP
////////////////////////////////////////////////////////////////////////////////
pub struct UdpSocket {
inner: Arc<Inner>,
read_deadline: u64,
write_deadline: u64,
}
impl UdpSocket {
pub fn bind(addr: SocketAddr) -> IoResult<UdpSocket> {
sys::init_net();
let fd = try!(socket(addr, libc::SOCK_DGRAM));
let ret = UdpSocket {
inner: Arc::new(Inner::new(fd)),
read_deadline: 0,
write_deadline: 0,
};
let mut storage = unsafe { mem::zeroed() };
let len = addr_to_sockaddr(addr, &mut storage);
let addrp = &storage as *const _ as *const libc::sockaddr;
match unsafe { libc::bind(fd, addrp, len) } {
-1 => Err(last_error()),
_ => Ok(ret),
}
}
pub fn fd(&self) -> sock_t { self.inner.fd }
pub fn set_broadcast(&mut self, on: bool) -> IoResult<()> {
setsockopt(self.fd(), libc::SOL_SOCKET, libc::SO_BROADCAST,
on as libc::c_int)
}
pub fn set_multicast_loop(&mut self, on: bool) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_IP, libc::IP_MULTICAST_LOOP,
on as libc::c_int)
}
pub fn set_membership(&mut self, addr: IpAddr, opt: libc::c_int) -> IoResult<()> {
match ip_to_inaddr(addr) {
In4Addr(addr) => {
let mreq = libc::ip_mreq {
imr_multiaddr: addr,
// interface == INADDR_ANY
imr_interface: libc::in_addr { s_addr: 0x0 },
};
setsockopt(self.fd(), libc::IPPROTO_IP, opt, mreq)
}
In6Addr(addr) => {
let mreq = libc::ip6_mreq {
ipv6mr_multiaddr: addr,
ipv6mr_interface: 0,
};
setsockopt(self.fd(), libc::IPPROTO_IPV6, opt, mreq)
}
}
}
#[cfg(target_os = "linux")]
fn lock_nonblocking(&self) {}
#[cfg(not(target_os = "linux"))]
fn lock_nonblocking<'a>(&'a self) -> Guard<'a> {
let ret = Guard {
fd: self.fd(),
guard: self.inner.lock.lock().unwrap(),
};
assert!(set_nonblocking(self.fd(), true).is_ok());
ret
}
pub fn socket_name(&mut self) -> IoResult<SocketAddr> {
sockname(self.fd(), libc::getsockname)
}
pub fn recv_from(&mut self, buf: &mut [u8]) -> IoResult<(uint, SocketAddr)> {
let fd = self.fd();
let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() };
let storagep = &mut storage as *mut _ as *mut libc::sockaddr;
let mut addrlen: libc::socklen_t =
mem::size_of::<libc::sockaddr_storage>() as libc::socklen_t;
let dolock = || self.lock_nonblocking();
let n = try!(read(fd, self.read_deadline, dolock, |nb| unsafe {
let flags = if nb {c::MSG_DONTWAIT} else {0};
libc::recvfrom(fd,
buf.as_mut_ptr() as *mut libc::c_void,
buf.len() as msglen_t,
flags,
storagep,
&mut addrlen) as libc::c_int
}));
sockaddr_to_addr(&storage, addrlen as uint).and_then(|addr| {
Ok((n as uint, addr))
})
}
pub fn send_to(&mut self, buf: &[u8], dst: SocketAddr) -> IoResult<()> {
let mut storage = unsafe { mem::zeroed() };
let dstlen = addr_to_sockaddr(dst, &mut storage);
let dstp = &storage as *const _ as *const libc::sockaddr;
let fd = self.fd();
let dolock = || self.lock_nonblocking();
let dowrite = |nb, buf: *const u8, len: uint| unsafe {
let flags = if nb {c::MSG_DONTWAIT} else {0};
libc::sendto(fd,
buf as *const libc::c_void,
len as msglen_t,
flags,
dstp,
dstlen) as i64
};
let n = try!(write(fd, self.write_deadline, buf, false, dolock, dowrite));
if n != buf.len() {
Err(short_write(n, "couldn't send entire packet at once"))
} else {
Ok(())
}
}
pub fn join_multicast(&mut self, multi: IpAddr) -> IoResult<()> {
match multi {
Ipv4Addr(..) => {
self.set_membership(multi, libc::IP_ADD_MEMBERSHIP)
}
Ipv6Addr(..) => {
self.set_membership(multi, libc::IPV6_ADD_MEMBERSHIP)
}
}
}
pub fn leave_multicast(&mut self, multi: IpAddr) -> IoResult<()> {
match multi {
Ipv4Addr(..) => {
self.set_membership(multi, libc::IP_DROP_MEMBERSHIP)
}
Ipv6Addr(..) => {
self.set_membership(multi, libc::IPV6_DROP_MEMBERSHIP)
}
}
}
pub fn multicast_time_to_live(&mut self, ttl: int) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_IP, libc::IP_MULTICAST_TTL,
ttl as libc::c_int)
}
pub fn time_to_live(&mut self, ttl: int) -> IoResult<()> {
setsockopt(self.fd(), libc::IPPROTO_IP, libc::IP_TTL, ttl as libc::c_int)
}
pub fn set_timeout(&mut self, timeout: Option<u64>) {
let deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
self.read_deadline = deadline;
self.write_deadline = deadline;
}
pub fn set_read_timeout(&mut self, timeout: Option<u64>) {
self.read_deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
}
pub fn set_write_timeout(&mut self, timeout: Option<u64>) {
self.write_deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
}
}
impl Clone for UdpSocket {
fn clone(&self) -> UdpSocket {
UdpSocket {
inner: self.inner.clone(),
read_deadline: 0,
write_deadline: 0,
}
}
}<|fim▁end|> | while !rp.is_null() { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by<|fim▁hole|>// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
#![warn(missing_docs)]
#![cfg_attr(feature="dev", feature(plugin))]
#![cfg_attr(feature="dev", plugin(clippy))]
// Clippy settings
// Most of the time much more readable
#![cfg_attr(feature="dev", allow(needless_range_loop))]
// Shorter than if-else
#![cfg_attr(feature="dev", allow(match_bool))]
// We use that to be more explicit about handled cases
#![cfg_attr(feature="dev", allow(match_same_arms))]
// Keeps consistency (all lines with `.clone()`).
#![cfg_attr(feature="dev", allow(clone_on_copy))]
// Some false positives when doing pattern matching.
#![cfg_attr(feature="dev", allow(needless_borrow))]
// TODO [todr] a lot of warnings to be fixed
#![cfg_attr(feature="dev", allow(assign_op_pattern))]
//! Ethcore-util library
//!
//! ### Rust version:
//! - nightly
//!
//! ### Supported platforms:
//! - OSX
//! - Linux
//!
//! ### Building:
//!
//! - Ubuntu 14.04 and later:
//!
//! ```bash
//! # install rocksdb
//! add-apt-repository "deb http://ppa.launchpad.net/giskou/librocksdb/ubuntu trusty main"
//! apt-get update
//! apt-get install -y --force-yes librocksdb
//!
//! # install multirust
//! curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
//!
//! # install nightly and make it default
//! multirust update nightly && multirust default nightly
//!
//! # export rust LIBRARY_PATH
//! export LIBRARY_PATH=/usr/local/lib
//!
//! # download and build parity
//! git clone https://github.com/ethcore/parity
//! cd parity
//! cargo build --release
//! ```
//!
//! - OSX:
//!
//! ```bash
//! # install rocksdb && multirust
//! brew update
//! brew install rocksdb
//! brew install multirust
//!
//! # install nightly and make it default
//! multirust update nightly && multirust default nightly
//!
//! # export rust LIBRARY_PATH
//! export LIBRARY_PATH=/usr/local/lib
//!
//! # download and build parity
//! git clone https://github.com/ethcore/parity
//! cd parity
//! cargo build --release
//! ```
extern crate rustc_serialize;
extern crate rand;
extern crate rocksdb;
extern crate env_logger;
extern crate crypto as rcrypto;
extern crate secp256k1;
extern crate arrayvec;
extern crate elastic_array;
extern crate time;
extern crate ethcore_devtools as devtools;
extern crate libc;
extern crate target_info;
extern crate ethcore_bigint as bigint;
extern crate parking_lot;
extern crate ansi_term;
extern crate tiny_keccak;
extern crate rlp;
extern crate regex;
#[macro_use]
extern crate heapsize;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate itertools;
#[macro_use]
extern crate log as rlog;
pub extern crate using_queue;
pub extern crate table;
pub mod bloom;
pub mod standard;
#[macro_use]
pub mod from_json;
#[macro_use]
pub mod common;
pub mod error;
pub mod bytes;
pub mod misc;
pub mod vector;
pub mod sha3;
pub mod hashdb;
pub mod memorydb;
pub mod migration;
pub mod overlaydb;
pub mod journaldb;
pub mod kvdb;
pub mod triehash;
pub mod trie;
pub mod nibbleslice;
pub mod nibblevec;
pub mod semantic_version;
pub mod log;
pub mod path;
pub mod snappy;
mod timer;
pub use common::*;
pub use misc::*;
pub use hashdb::*;
pub use memorydb::*;
pub use overlaydb::*;
pub use journaldb::JournalDB;
pub use triehash::*;
pub use trie::{Trie, TrieMut, TrieDB, TrieDBMut, TrieFactory, TrieError, SecTrieDB, SecTrieDBMut};
pub use nibbleslice::*;
pub use semantic_version::*;
pub use log::*;
pub use kvdb::*;
pub use timer::*;
/// 160-bit integer representing account address
pub type Address = H160;
/// Secret
pub type Secret = H256;<|fim▁end|> | // the Free Software Foundation, either version 3 of the License, or |
<|file_name|>cobp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# encoding=utf8
"""
Copyright (C) 2018 MuadDib
----------------------------------------------------------------------------
"THE BEER-WARE LICENSE" (Revision 42):
@tantrumdev wrote this file. As long as you retain this notice you can do
whatever you want with this stuff. Just Ask first when not released through
the tools and parser GIT. If we meet some day, and you think this stuff is
worth it, you can buy him a beer in return. - Muad'Dib
----------------------------------------------------------------------------
Changelog:
2018.7.2:
- Added Clear Cache function
- Minor update on fetch cache returns
2018.6.21:
- Added caching to primary menus (Cache time is 3 hours)
Usage Examples:
<dir>
<title>HD Videos</title>
<cobp>tag/hd-porn/newest</cobp>
</dir>
<dir>
<title>Most Popular</title>
<cobp>most-viewed</cobp>
</dir>
<dir>
<title>Most Recent</title>
<cobp>most-recent</cobp>
</dir>
<dir>
<title>Amateur</title>
<cobp>category/amateur</cobp>
</dir>
<dir>
<title>Anal</title>
<cobp>category/anal</cobp>
</dir>
<dir>
<title>Asian</title>
<cobp>category/asian</cobp>
</dir>
"""
import __builtin__
import base64,time
import json,re,requests,os,traceback,urlparse
import koding
import xbmc,xbmcaddon,xbmcgui
from koding import route
from resources.lib.plugin import Plugin
from resources.lib.util import dom_parser
from resources.lib.util.context import get_context_items
from resources.lib.util.xml import JenItem, JenList, display_list
from unidecode import unidecode
CACHE_TIME = 10800 # change to wanted cache time in seconds
addon_fanart = xbmcaddon.Addon().getAddonInfo('fanart')
addon_icon = xbmcaddon.Addon().getAddonInfo('icon')
next_icon = os.path.join(xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('path')), 'resources', 'media', 'next.png')
User_Agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36'
class COBP(Plugin):
name = "cobp"
def process_item(self, item_xml):
if "<cobp>" in item_xml:
item = JenItem(item_xml)
if "http" in item.get("cobp", ""):
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "PlayVideo",
'url': item.get("cobp", ""),
'folder': False,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
elif "category/" in item.get("cobp", ""):
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "COBP",
'url': item.get("cobp", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
elif "tag/" in item.get("cobp", ""):
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "COBP",
'url': item.get("cobp", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
elif "most-" in item.get("cobp", ""):
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "COBP",
'url': item.get("cobp", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
result_item["properties"] = {
'fanart_image': result_item["fanart"]
}
result_item['fanart_small'] = result_item["fanart"]
return result_item
def clear_cache(self):
dialog = xbmcgui.Dialog()
if dialog.yesno(xbmcaddon.Addon().getAddonInfo('name'), "Clear COBP Plugin Cache?"):
koding.Remove_Table("cobp_com_plugin")
<|fim▁hole|> xml = fetch_from_db(url)
if not xml:
xml = ""
try:
headers = {'User_Agent':User_Agent}
html = requests.get(url,headers=headers).content
vid_divs = dom_parser.parseDOM(html, 'div', attrs={'class':'video-item col-sm-5 col-md-4 col-xs-10'})
count = 0
for vid_section in vid_divs:
thumb_div = dom_parser.parseDOM(vid_section, 'div', attrs={'class':'video-thumb'})[0]
thumbnail = re.compile('<img src="(.+?)"',re.DOTALL).findall(str(thumb_div))[0]
vid_page_url = re.compile('href="(.+?)"',re.DOTALL).findall(str(thumb_div))[0]
title_div = dom_parser.parseDOM(vid_section, 'div', attrs={'class':'title'})[0]
title = remove_non_ascii(re.compile('title="(.+?)"',re.DOTALL).findall(str(title_div))[0])
count += 1
xml += "<item>"\
" <title>%s</title>"\
" <meta>"\
" <summary>%s</summary>"\
" </meta>"\
" <cobp>%s</cobp>"\
" <thumbnail>%s</thumbnail>"\
"</item>" % (title,title,vid_page_url,thumbnail)
try:
pagination = dom_parser.parseDOM(html, 'li', attrs={'class':'next'})[0]
next_page = dom_parser.parseDOM(pagination, 'a', ret='href')[0]
xml += "<dir>"\
" <title>Next Page</title>"\
" <meta>"\
" <summary>Click here for more porn bitches!</summary>"\
" </meta>"\
" <cobp>%s</cobp>"\
" <thumbnail>%s</thumbnail>"\
"</dir>" % (next_page,next_icon)
except:
pass
save_to_db(xml, url)
except:
pass
jenlist = JenList(xml)
display_list(jenlist.get_list(), jenlist.get_content_type())
@route(mode='PlayVideo', args=["url"])
def play_source(url):
try:
headers = {'User_Agent':User_Agent}
vid_html = requests.get(url,headers=headers).content
sources = dom_parser.parseDOM(vid_html, 'source', ret='src')
vid_url = sources[len(sources)-1]
xbmc.executebuiltin("PlayMedia(%s)" % vid_url)
except:
return
def save_to_db(item, url):
if not item or not url:
return False
try:
koding.reset_db()
koding.Remove_From_Table(
"cobp_com_plugin",
{
"url": url
})
koding.Add_To_Table("cobp_com_plugin",
{
"url": url,
"item": base64.b64encode(item),
"created": time.time()
})
except:
return False
def fetch_from_db(url):
koding.reset_db()
cobp_plugin_spec = {
"columns": {
"url": "TEXT",
"item": "TEXT",
"created": "TEXT"
},
"constraints": {
"unique": "url"
}
}
koding.Create_Table("cobp_com_plugin", cobp_plugin_spec)
match = koding.Get_From_Table(
"cobp_com_plugin", {"url": url})
if match:
match = match[0]
if not match["item"]:
return None
created_time = match["created"]
if created_time and float(created_time) + CACHE_TIME >= time.time():
match_item = match["item"]
try:
result = base64.b64decode(match_item)
except:
return None
return result
else:
return None
else:
return None
def remove_non_ascii(text):
return unidecode(text)<|fim▁end|> | @route(mode='COBP', args=["url"])
def get_stream(url):
url = urlparse.urljoin('http://collectionofbestporn.com/', url)
|
<|file_name|>inheritance.py<|end_file_name|><|fim▁begin|>class Hayvan:
def __init__(self,isim, renk):
self.isim=isim
self.renk=renk
def yuru(self):
print(self.isim+" yurumeye basladi")
def ye(self):
print(self.isim+" yemeye basladi")
class Fare(Hayvan):<|fim▁hole|> print(self.isim+" hizlica yurudu")
my_fare=Fare("siyah Avrasya sert sicani ","mavi")
my_fare.yuru();
my_fare.ye()<|fim▁end|> | def __init__(self,isim,renk):
super().__init__(isim,renk)
def yuru(self): |
<|file_name|>compiler.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The go-okcoin Authors
// This file is part of go-okcoin.<|fim▁hole|>//
// go-okcoin is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// go-okcoin is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with go-okcoin. If not, see <http://www.gnu.org/licenses/>.
package compiler
import (
"errors"
"fmt"
"github.com/okcoin/go-okcoin/core/asm"
)
func Compile(fn string, src []byte, debug bool) (string, error) {
compiler := asm.NewCompiler(debug)
compiler.Feed(asm.Lex(fn, src, debug))
bin, compileErrors := compiler.Compile()
if len(compileErrors) > 0 {
// report errors
for _, err := range compileErrors {
fmt.Printf("%s:%v\n", fn, err)
}
return "", errors.New("compiling failed")
}
return bin, nil
}<|fim▁end|> | |
<|file_name|>test_database.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 Phil Birkelbach
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import unittest
import io
import time
import fixgw.database as database
# This is a poorly formatted example of a database configuration file.
# it should test leading/trailing spaces blank lines etc.
minimal_config = """
variables:
a: 8 #Generic Analogs
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
"""
minimal_list = []
for x in range(8):
minimal_list.append("ANLG{}".format(x+1))
variable_config = """
variables:
e: 4 # Engines
c: 6 # Cylinders
t: 20 # Fuel Tanks
entries:
- key: EGTec
description: Exhaust Gas Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: FUELQt
description: Fuel Quantity Tank %t
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
"""
variable_list = []
for e in range(4):
for c in range(6):
variable_list.append("EGT{}{}".format(e+1,c+1))
for t in range(20):
variable_list.append("FUELQ{}".format(t+1))
variable_list.sort()
general_config = """
variables:
e: 1 # Engines
c: 6 # Cylinders
a: 8 # Generic Analogs
b: 16 # Generic Buttons
r: 1 # Encoders
t: 2 # Fuel Tanks
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
- key: BTNb
description: Generic Button %b
type: bool
tol: 0
- key: ENCr
description: Generic Encoder %r
type: int
min: -32768
max: 32767
units: Pulses
initial: 0
tol: 0
- key: IAS
description: Indicated Airspeed
type: float
min: 0.0
max: 1000.0
units: knots
initial: 0.0
tol: 2000
aux: [Min,Max,V1,V2,Vne,Vfe,Vmc,Va,Vno,Vs,Vs0,Vx,Vy]
- key: IASW
description: Indicated Airspeed Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: TAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: CAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: GS
description: Ground Speed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: ALT
description: Indicated Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: TALT
description: True Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: DALT
description: Density Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: BARO
description: Altimeter Setting
type: float
min: 0.0
max: 35.0
units: inHg
initial: 29.92
tol: 2000
- key: AIRPRESS
description: Air Pressure
type: float
min: 0.0
max: 200000.0
units: Pa
initial: 101325.0
tol: 2000
- key: VS
description: Vertical Speed
type: float
min: -30000.0
max: 30000.0
units: ft/min
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: HEAD
description: Current Aircraft Magnetic Heading
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACK
description: Current Aircraft Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACKM
description: Current Aircraft Magnetic Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: COURSE
description: Selected Course
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: CDI
description: Course Deviation Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: GSI
description: Glideslope Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: XTRACK
description: Cross Track Error
type: float
min: 0.0
max: 100.0
units: nM
initial: 0.0
tol: 2000
- key: OAT
description: Outside Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn]
- key: CAT
description: Cabin Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: OATW
description: Outside Air Temperature Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ROLL
description: Roll Angle
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: PITCH
description: Pitch Angle
type: float
min: -90.0
max: 90.0
units: deg
initial: 0.0
tol: 200
- key: ORISYSW
description: Orientation System Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: GYROW
description: Gyroscope sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ACCELW
description: Acceleration sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: MAGW
description: Magnetic sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: PITCHSET
description: Pitch angle setting
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: YAW
description: Yaw Angle
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: AOA
description: Angle of attack
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
aux:
- Min
- Max
- 0g
- Warn
- Stall
- key: CTLPTCH
description: Pitch Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLROLL
description: Roll Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLYAW
description: Yaw Control (Rudder)
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLCOLL
description: Collective Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLATP
description: AntiTorque Pedal Ctrl
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLFLAP
description: Flap Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLLBRK
description: Left Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLRBRK
description: Right Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: ANORM
description: Normal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALAT
description: Lateral Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALONG
description: Longitudinal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: THRe
description: Throttle Control Engine %e
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: MIXe
description: Mixture Control Engine %e
type: float
min: 0.0
max: 1.0<|fim▁hole|> tol: 1000
- key: OILPe
description: Oil Pressure Engine %e
type: float
min: 0.0
max: 200.0
units: psi
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: OILTe
description: Oil Temperature Engine %e
type: float
min: 0.0
max: 150.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: H2OTe
description: Coolant Temperature Engine %e
type: float
min: 0.0
max: 200.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELPe
description: Fuel Pressure Engine %e
type: float
min: 0.0
max: 200.0
units: psi
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELFe
description: Fuel Flow Engine %e
type: float
min: 0.0
max: 100.0
units: gal/hr
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: MAPe
description: Manifold Pressure Engine %e
type: float
min: 0.0
max: 60.0
units: inHg
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: VOLT
description: System Voltage
type: float
min: 0.0
max: 18.0
units: volt
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: CURRNT
description: Bus Current
type: float
min: 0.0
max: 60.0
units: amps
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: EGTec
description: Exhaust Gas Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: EGTAVGe
description: Average Exhaust Gas Temp Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 0
aux: [Min,Max]
- key: EGTSPANe
description: Exhaust Gas Temp Span Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 0
aux: [Min,Max]
- key: CHTec
description: Cylinder Head Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: CHTMAXe
description: Maximum Cylinder Head Temp Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELQt
description: Fuel Quantity Tank %t
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
- key: FUELQT
description: Total Fuel Quantity
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
- key: TACHe
description: Engine RPM Engine %e
type: int
min: 0
max: 10000
units: RPM
initial: 0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: PROPe
description: Propeller RPM Engine %e
type: int
min: 0
max: 10000
units: RPM
initial: 0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: LAT
description: Latitude
type: float
min: -90.0
max: 90.0
units: deg
initial: 0.0
tol: 2000
- key: LONG
description: Longitude
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 2000
- key: TIMEZ
description: Zulu Time String
type: str
tol: 2000
- key: TIMEZH
description: Zulu Time Hour
type: int
min: 0
max: 23
initial: 0
tol: 2000
- key: TIMEZM
description: Zulu Time Minute
type: int
min: 0
max: 59
initial: 0
tol: 2000
- key: TIMEZS
description: Zulu Time Second
type: int
min: 0
max: 59
initial: 0
tol: 2000
- key: TIMEL
description: Local Time String
type: str
tol: 0
- key: TZONE
description: Time Zone
type: float
min: -12.0
max: 12.0
initial: 0.0
- key: FTIME
description: Flight Time
type: float
min: 0.0
max: 1000.0
initial: 0.0
- key: DIM
description: Panel Dimmer Level
type: int
min: 0
max: 100
initial: 100
# Using this to test strings
- key: DUMMY
description:
type: str
"""
class TestDatabase(unittest.TestCase):
def setUp(self):
pass
def test_Minimal_Database_Build(self):
"""Test minimal database build"""
sf = io.StringIO(minimal_config)
database.init(sf)
l = database.listkeys()
l.sort()
self.assertEqual(l, minimal_list)
def test_Variable_Expansion(self):
"""Test database variable expansion"""
sf = io.StringIO(variable_config)
database.init(sf)
l = database.listkeys()
l.sort()
self.assertEqual(l, variable_list)
for e in range(4):
for c in range(6):
key = "EGT{}{}".format(e+1,c+1)
item = database.get_raw_item(key)
s = "Exhaust Gas Temp Engine {}, Cylinder {}".format(e+1,c+1)
self.assertEqual(item.description, s)
def test_aux_data_creation(self):
"""Test database auxillary data creation"""
sf = io.StringIO(general_config)
database.init(sf)
tests = ["Min", "Max", "0g", "Warn", "Stall"]
tests.sort()
i = database.get_raw_item("AOA")
l = i.get_aux_list()
l.sort()
self.assertEqual(l, tests)
def test_aux_data_read_write(self):
"""Test database auxillary data reading and writing"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [("Min", -160.0),
("Max", -130.0),
("0g", 10.0),
("Warn", 23.4),
("Stall", 45.6)]
for test in tests:
x = database.write("AOA." + test[0], test[1])
x = database.read("AOA." + test[0])
self.assertEqual(x, test[1])
def test_database_bounds(self):
"""Test database bounds checking"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [(0.0, 0.0),
(-180.0, -180.0),
(-180.1, -180.0),
(0.0, 0,0),
(180.0, 180.0),
(180.1, 180.0)]
for test in tests:
database.write("ROLL", test[0])
x = database.read("ROLL")
self.assertEqual(x[0], test[1])
def test_database_aux_data_bounds(self):
"""Test database aux data bounds checking"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [(0.0, 0.0),
(-180.0, -180.0),
(-180.1, -180.0),
(0.0, 0,0),
(180.0, 180.0),
(180.1, 180.0)]
for test in tests:
database.write("AOA.Warn", test[0])
x = database.read("AOA.Warn")
self.assertEqual(x, test[1])
def test_database_callbacks(self):
"""Test database callback routines"""
sf = io.StringIO(general_config)
database.init(sf)
rval = None
def test_cb(key, val, udata): # Use a closure for our callback
nonlocal rval
rval = (key, val)
database.callback_add("test", "PITCH", test_cb, None)
database.write("PITCH", -11.4)
self.assertEqual(rval, ("PITCH", (-11.4, False, False, False, False, False)))
database.write("PITCH", 10.2)
self.assertEqual(rval, ("PITCH", (10.2, False, False, False, False, False)))
i = database.get_raw_item("PITCH")
i.fail = True
self.assertEqual(rval, ("PITCH", (10.2, False, False, False, True, False)))
i.annunciate = True
self.assertEqual(rval, ("PITCH", (10.2, True, False, False, True, False)))
i.bad = True
self.assertEqual(rval, ("PITCH", (10.2, True, False, True, True, False)))
time.sleep(0.250)
database.update() # force the update
self.assertEqual(rval, ("PITCH", (10.2, True, True, True, True, False)))
def test_timeout_lifetime(self):
"""Test item timeout lifetime"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("PITCH", -11.4)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
time.sleep(0.15)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
time.sleep(0.05)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, True, False, False, False))
database.write("PITCH", -11.4)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
def test_description_units(self):
"""Test description and units"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("ROLL")
self.assertEqual(i.description, "Roll Angle")
self.assertEqual(i.units, "deg")
def test_missing_description_units(self):
"""Test missing description and units"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("DUMMY")
self.assertEqual(i.description, '')
self.assertEqual(i.units, '')
def test_quality_bits(self):
"""Test quality bits"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("OILP1")
database.write("OILP1", 15.4)
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.annunciate = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, True, False, False, False, False))
i.annunciate = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.fail = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, True, False))
i.fail = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.bad = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, True, False, False))
i.bad = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
def test_string_datatype(self):
"""test writing a string to an item"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("DUMMY", "test string")
x = database.read("DUMMY")
self.assertEqual(x[0], "test string")
def test_wrong_datatype(self):
"""test using wrong datatype for item"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("DUMMY", 1234)
x = database.read("DUMMY")
self.assertEqual(x[0], "1234")
database.write("PITCH", "23.4")
x = database.read("PITCH")
self.assertEqual(x[0], 23.4)
def test_bool_write(self):
"""test using wrong datatype for item"""
sf = io.StringIO(general_config)
database.init(sf)
# Test actual booleans
database.write("BTN1", True)
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", False)
x = database.read("BTN1")
self.assertEqual(x[0], False)
# Test strings
database.write("BTN1", "True")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "False")
x = database.read("BTN1")
self.assertEqual(x[0], False)
database.write("BTN1", "1")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "0")
x = database.read("BTN1")
self.assertEqual(x[0], False)
database.write("BTN1", "Yes")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "No")
x = database.read("BTN1")
self.assertEqual(x[0], False)
# Test integers
database.write("BTN1", 1)
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", 0)
x = database.read("BTN1")
self.assertEqual(x[0], False)
def test_similar_aux_items(self):
"""it would be easy for a single aux array to be pointed to
by different database items."""
sf = io.StringIO(variable_config)
database.init(sf)
database.write("EGT11.Max", 700)
database.write("EGT12.Max", 800)
x = database.read("EGT11.Max")
y = database.read("EGT12.Max")
self.assertNotEqual(y, x)
if __name__ == '__main__':
unittest.main()
# TODO: Test that a blank in TOL will result in no timeout.
# TODO: Test that we can set the "OLD" flag if the timeout is zero<|fim▁end|> | units: '%/100'
initial: 0.0 |
<|file_name|>client.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package gosrc
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
)
<|fim▁hole|> errFn func(*http.Response) error
header http.Header
client *http.Client
}
func (c *httpClient) err(resp *http.Response) error {
if resp.StatusCode == 404 {
return NotFoundError{Message: "Resource not found: " + resp.Request.URL.String()}
}
if c.errFn != nil {
return c.errFn(resp)
}
return &RemoteError{resp.Request.URL.Host, fmt.Errorf("%d: (%s)", resp.StatusCode, resp.Request.URL.String())}
}
// get issues a GET to the specified URL.
func (c *httpClient) get(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
for k, vs := range c.header {
req.Header[k] = vs
}
resp, err := c.client.Do(req)
if err != nil {
return nil, &RemoteError{req.URL.Host, err}
}
return resp, err
}
// getNoFollow issues a GET to the specified URL without following redirects.
func (c *httpClient) getNoFollow(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
for k, vs := range c.header {
req.Header[k] = vs
}
t := c.client.Transport
if t == nil {
t = http.DefaultTransport
}
resp, err := t.RoundTrip(req)
if err != nil {
return nil, &RemoteError{req.URL.Host, err}
}
return resp, err
}
func (c *httpClient) getBytes(url string) ([]byte, error) {
resp, err := c.get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return nil, c.err(resp)
}
p, err := ioutil.ReadAll(resp.Body)
return p, err
}
func (c *httpClient) getReader(url string) (io.ReadCloser, error) {
resp, err := c.get(url)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
err = c.err(resp)
resp.Body.Close()
return nil, err
}
return resp.Body, nil
}
func (c *httpClient) getJSON(url string, v interface{}) (*http.Response, error) {
resp, err := c.get(url)
if err != nil {
return resp, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return resp, c.err(resp)
}
err = json.NewDecoder(resp.Body).Decode(v)
if _, ok := err.(*json.SyntaxError); ok {
err = NotFoundError{Message: "JSON syntax error at " + url}
}
return resp, err
}
func (c *httpClient) getFiles(urls []string, files []*File) error {
ch := make(chan error, len(files))
for i := range files {
go func(i int) {
resp, err := c.get(urls[i])
if err != nil {
ch <- err
return
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
var err error
if c.errFn != nil {
err = c.errFn(resp)
} else {
err = &RemoteError{resp.Request.URL.Host, fmt.Errorf("get %s -> %d", urls[i], resp.StatusCode)}
}
ch <- err
return
}
files[i].Data, err = ioutil.ReadAll(resp.Body)
if err != nil {
ch <- &RemoteError{resp.Request.URL.Host, err}
return
}
ch <- nil
}(i)
}
for range files {
if err := <-ch; err != nil {
return err
}
}
return nil
}<|fim▁end|> | type httpClient struct { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.