prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>feature-gate-c-unwind.rs<|end_file_name|><|fim▁begin|>// Test that the "C-unwind" ABI is feature-gated, and cannot be used when the
// `c_unwind` feature gate is not used.
<|fim▁hole|>extern "C-unwind" fn f() {}
//~^ ERROR C-unwind ABI is experimental and subject to change [E0658]
fn main() {
f();
}<|fim▁end|>
| |
<|file_name|>misc_crypto.rs<|end_file_name|><|fim▁begin|>// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! Miscellaneous cryptograhic test data that isn't worth maintaining custom
//! scripts for.
/// A plaintext string.
pub const PLAIN_TEXT: &[u8] = b"I'm setting the alarm clock for July.";
/// The SHA-256 hash of `PLAIN_TEXT`.
///
/// Generate with:
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -binary \
/// | xxd -i -c 8
/// ```
#[rustfmt::skip]
pub const PLAIN_SHA256: &[u8] = &[
0xc9, 0xfd, 0xba, 0xae, 0x28, 0xe7, 0x49, 0x5c,
0xe1, 0x13, 0xbc, 0x87, 0xc8, 0x20, 0x6c, 0xba,
0xc2, 0xd1, 0x0c, 0x28, 0x17, 0xf0, 0x06, 0x11,
0xd0, 0xc6, 0x19, 0x2f, 0x47, 0x64, 0xdb, 0xba,
];
/// The SHA-256 HMAC of `PLAIN_TEXT`, using itself as the key.
///
/// Generate with:
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -binary \
/// -hmac "I'm setting the alarm clock for July." \
/// | xxd -i -c 8
/// ```
#[rustfmt::skip]
pub const PLAIN_HMAC256: &[u8] = &[
0x95, 0xcb, 0xa4, 0x97, 0x4d, 0x09, 0xa3, 0x9f,
0x2d, 0x97, 0xd0, 0x32, 0xa4, 0x0a, 0x3a, 0xd4,<|fim▁hole|>
/// An RSA signature for `PLAIN_TEXT`, generated thus:
///
/// ```text
/// echo -n "I'm setting the alarm clock for July." \
/// | openssl dgst -sha256 -keyform DER \
/// -sign testutil/src/data/keys/key1.rsa.pk8 \
/// | xxd -i -c 8
/// ```
///
/// The signature is in PKCS v1.5 format.
#[rustfmt::skip]
pub const KEY1_SHA256_SIG: &[u8] = &[
0x4d, 0xd8, 0x99, 0xbf, 0x42, 0xc0, 0xef, 0xf4,
0xd6, 0x5f, 0xb6, 0xa4, 0x9c, 0xeb, 0x63, 0xc3,
0x06, 0x00, 0xc3, 0xaa, 0x7e, 0xcb, 0x78, 0x8e,
0x13, 0xc6, 0xbb, 0xbc, 0x5a, 0x05, 0x34, 0xb8,
0xe8, 0xa9, 0xef, 0x43, 0xa8, 0x2d, 0x63, 0xe8,
0x64, 0xc4, 0x5d, 0x32, 0xaa, 0xed, 0x15, 0xf8,
0xf6, 0x1a, 0xeb, 0x95, 0xc3, 0x4d, 0x09, 0x91,
0x3b, 0xdd, 0x69, 0x94, 0x4f, 0xd6, 0x16, 0xca,
0x50, 0x88, 0x2d, 0xcf, 0xe7, 0x94, 0x43, 0x9c,
0xd8, 0xbd, 0x68, 0xdd, 0xdb, 0x48, 0xab, 0x60,
0xd5, 0xca, 0x34, 0xab, 0x18, 0x69, 0xb9, 0x34,
0xca, 0x5a, 0x3d, 0xdd, 0x65, 0xde, 0x51, 0x8d,
0x54, 0x67, 0x2b, 0xd1, 0x4e, 0xae, 0x8d, 0xcd,
0xa5, 0xaa, 0x62, 0x5d, 0xa0, 0x30, 0x97, 0xd9,
0x91, 0x38, 0xd4, 0x81, 0x83, 0x7c, 0xf9, 0xc5,
0xbe, 0xc5, 0xef, 0xfc, 0x34, 0x21, 0xce, 0x27,
0x81, 0xf2, 0x79, 0x51, 0x3a, 0x3b, 0x02, 0x2d,
0xe6, 0x1d, 0x0f, 0x38, 0x77, 0x63, 0xbd, 0x30,
0xce, 0x39, 0x63, 0x8a, 0x63, 0x7e, 0x1e, 0x0b,
0xb5, 0x39, 0xd5, 0xa7, 0x42, 0xb0, 0x1d, 0x69,
0x02, 0x81, 0x9a, 0x65, 0x4d, 0x51, 0xfd, 0x0b,
0xc5, 0x57, 0x20, 0xae, 0x2e, 0xf8, 0x62, 0x6b,
0xce, 0x35, 0xb6, 0xd4, 0x9b, 0x0a, 0x5e, 0x26,
0xfa, 0x10, 0x54, 0x5a, 0x95, 0x57, 0xe2, 0xd8,
0xf3, 0xa4, 0x1a, 0x11, 0x07, 0x40, 0xec, 0x3d,
0x84, 0x99, 0x56, 0xe1, 0x63, 0x7f, 0xec, 0x35,
0x5d, 0xf2, 0x3d, 0x21, 0xb2, 0x74, 0x42, 0x02,
0xad, 0xcb, 0x42, 0x7e, 0x45, 0x40, 0xef, 0x93,
0x23, 0xdd, 0x7d, 0xce, 0xcc, 0x6c, 0x63, 0x45,
0x9e, 0x26, 0x7b, 0x7c, 0x9a, 0xea, 0x07, 0x15,
0x33, 0x36, 0xcc, 0x3c, 0x96, 0x46, 0xbf, 0x79,
0x07, 0x3c, 0x3c, 0x9d, 0x8c, 0x72, 0x0c, 0x79,
];<|fim▁end|>
|
0x04, 0xe7, 0x1b, 0x4f, 0x74, 0x35, 0xb0, 0xf5,
0x99, 0xe6, 0xc5, 0x9c, 0x01, 0x52, 0x40, 0x51,
];
|
<|file_name|>jabbersearch.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="pl" sourcelanguage="en">
<context>
<name>JabberSearch</name>
<message>
<source>Jabber Search</source>
<translation>Przeszukaj Jabber</translation>
</message>
<message>
<source>Search</source>
<translation>Szukaj</translation>
</message>
<message>
<source>First Name</source>
<translation>Imię</translation>
</message>
<message>
<source>Last Name</source>
<translation>Nazwisko</translation>
</message>
<message><|fim▁hole|> <source>Email Address</source>
<translation>Adres e-mail</translation>
</message>
<message>
<source>Allows to search in the Jabber network</source>
<translation>Pozwala na przeszukiwanie sieci Jabber</translation>
</message>
<message>
<source>Supports the searching of the information</source>
<translation>Obsługa pobierania informacji</translation>
</message>
</context>
<context>
<name>SearchDialog</name>
<message>
<source>Search in %1</source>
<translation>Szukaj w %1</translation>
</message>
<message>
<source>Waiting for host response ...</source>
<translation>Oczekiwanie na odpowiedź...</translation>
</message>
<message>
<source>Disco info</source>
<translation>Informacje</translation>
</message>
<message>
<source>Add Contact</source>
<translation>Dodaj kontakt</translation>
</message>
<message>
<source>vCard</source>
<translation>Wizytówka</translation>
</message>
<message>
<source>Requested operation failed: %1</source>
<translation>Żądana operacja zakończyła się błędem: %1</translation>
</message>
<message>
<source>Error: Can't send request to host.</source>
<translation>Błąd: Nie można wysłać żądania do serwera.</translation>
</message>
</context>
<context>
<name>SearchDialogClass</name>
<message>
<source>First:</source>
<translation>Imię:</translation>
</message>
<message>
<source>Last:</source>
<translation>Nazwisko:</translation>
</message>
<message>
<source>Nick:</source>
<translation>Nick:</translation>
</message>
<message>
<source>Email:</source>
<translation>E-mail:</translation>
</message>
<message>
<source>JID</source>
<translation>JID</translation>
</message>
<message>
<source>First</source>
<translation>Imię</translation>
</message>
<message>
<source>Last</source>
<translation>Nazwisko</translation>
</message>
<message>
<source>Nick</source>
<translation>Nick</translation>
</message>
<message>
<source>EMail</source>
<translation>E-mail</translation>
</message>
</context>
</TS><|fim▁end|>
|
<source>Nickname</source>
<translation>Nick</translation>
</message>
<message>
|
<|file_name|>5.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
using namespace std;
int n;<|fim▁hole|>int eval(int i, int k, int *c, int *t);
int main() {
cin >> n;
int *c = new int[2 * n]; // saving each node childs in two index like 1:{6,5}, 2:{0,0}...6:{4,0}
fill_n(c, 2 * n, 0);
int q = 0;
for (int i = 0; i < n - 1; i++) {
cin >> q;
if (c[(q - 1) * 2 + 1]) {
c[(q - 1) * 2] = i + 2;
} else {
c[(q - 1) * 2 + 1] = i + 2;
}
}
int *t = new int[n];
fill_n(t, n + 1, 0);
t[0] = 1;
eval(0, 0, c, t);
print(t);
int p;
for (int i = 0; i < n; i++) {
p = lastNode(i, c)[0] + 1;
int start = 0, end = 0;
for (int k = 0; k < n; k++) {
if (t[k] == i + 1) {
start = k + 1;
}
if (t[k] == p) {
end = k + 1;
break;
}
}
cout << i + 1 << ": " << start << " " << end << endl;
}
cin.get();
cin.ignore();
return 0;
}
int* lastNode(int s, int *c) {
int ln[2] = { s,1 };
if (!c[2 * s + 1]) return ln;
int k = 0; // key
int d = 1; // depth
int rk, rd, lk, ld;
int *w = lastNode(c[2 * s + 1] - 1, c);
rk = w[0];
rd = w[1];
if (c[2 * s]) {
w = lastNode(c[2 * s] - 1, c);
lk = w[0];
ld = w[1];
k = rd >= ld ? rk : lk;
d += rd + ld;
} else {
k = rk;
d += rd;
}
ln[0] = k;
ln[1] = d;
return ln;
}
int eval(int i, int k, int *c, int *t) {
if (i >= n) return 0;
int lc = 0; // number of sub tree nodes
if (c[2 * k]) {
t[i + 1] = c[k];
lc = eval(i + 1, c[2 * k] - 1, c, t);
}
if (c[2 * k + 1]) {
i += lc;
t[i + 1] = c[2 * k + 1];
lc += eval(i + 1, c[2 * k + 1] - 1, c, t);
} else {
t[i] = k + 1;
}
return lc + 1;
}
void print(int* s) {
for (int i = 0; s[i]; i++) {
cout << s[i] << " ";
}
cout << endl;
}<|fim▁end|>
|
void print(int* s);
int* lastNode(int s, int *c);
|
<|file_name|>AbstractConsistentSessionTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.repair.consistent;
import java.net.UnknownHostException;
import java.util.Set;
import java.util.UUID;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.junit.Ignore;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.streaming.PreviewKind;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.UUIDGen;
@Ignore
public abstract class AbstractConsistentSessionTest
{
protected static final InetAddressAndPort COORDINATOR;
protected static final InetAddressAndPort PARTICIPANT1;
protected static final InetAddressAndPort PARTICIPANT2;
protected static final InetAddressAndPort PARTICIPANT3;
static
{
try
{
COORDINATOR = InetAddressAndPort.getByName("10.0.0.1");
PARTICIPANT1 = InetAddressAndPort.getByName("10.0.0.1");
PARTICIPANT2 = InetAddressAndPort.getByName("10.0.0.2");
PARTICIPANT3 = InetAddressAndPort.getByName("10.0.0.3");
}
catch (UnknownHostException e)
{
throw new AssertionError(e);
}
DatabaseDescriptor.daemonInitialization();
}
protected static final Set<InetAddressAndPort> PARTICIPANTS = ImmutableSet.of(PARTICIPANT1, PARTICIPANT2, PARTICIPANT3);
protected static Token t(int v)<|fim▁hole|> return DatabaseDescriptor.getPartitioner().getToken(ByteBufferUtil.bytes(v));
}
protected static final Range<Token> RANGE1 = new Range<>(t(1), t(2));
protected static final Range<Token> RANGE2 = new Range<>(t(2), t(3));
protected static final Range<Token> RANGE3 = new Range<>(t(4), t(5));
protected static UUID registerSession(ColumnFamilyStore cfs)
{
UUID sessionId = UUIDGen.getTimeUUID();
ActiveRepairService.instance.registerParentRepairSession(sessionId,
COORDINATOR,
Lists.newArrayList(cfs),
Sets.newHashSet(RANGE1, RANGE2, RANGE3),
true,
System.currentTimeMillis(),
true,
PreviewKind.NONE);
return sessionId;
}
}<|fim▁end|>
|
{
|
<|file_name|>Ptobjects.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from flask import Flask, request
from flask.ext.restful import Resource, fields, marshal_with, reqparse, abort
from flask.globals import g
from jormungandr import i_manager, timezone
from jormungandr.interfaces.v1.fields import disruption_marshaller
from jormungandr.interfaces.v1.make_links import add_id_links
from jormungandr.interfaces.v1.fields import NonNullList, NonNullNested, PbField, error, pt_object, feed_publisher
from jormungandr.interfaces.v1.ResourceUri import ResourceUri
from jormungandr.interfaces.argument import ArgumentDoc
from jormungandr.interfaces.parsers import depth_argument, option_value, default_count_arg_type, date_time_format
from copy import deepcopy
import datetime
pt_objects = {
"pt_objects": NonNullList(NonNullNested(pt_object), attribute='places'),
"disruptions": fields.List(NonNullNested(disruption_marshaller), attribute="impacts"),
"error": PbField(error, attribute='error'),
"feed_publishers": fields.List(NonNullNested(feed_publisher))
}
pt_object_type_values = ["network", "commercial_mode", "line", "line_group", "route", "stop_area"]
class Ptobjects(ResourceUri):
def __init__(self, *args, **kwargs):
ResourceUri.__init__(self, *args, **kwargs)
self.parsers = {}
self.parsers["get"] = reqparse.RequestParser(
argument_class=ArgumentDoc)
self.parsers["get"].add_argument("q", type=unicode, required=True,
description="The data to search")
self.parsers["get"].add_argument("type[]", type=option_value(pt_object_type_values),
action="append",default=pt_object_type_values,
description="The type of data to\
search")
self.parsers["get"].add_argument("count", type=default_count_arg_type, default=10,
description="The maximum number of\
ptobjects returned")
self.parsers["get"].add_argument("search_type", type=int, default=0,
description="Type of search:\
firstletter or type error")
self.parsers["get"].add_argument("admin_uri[]", type=unicode,
action="append",
description="If filled, will\
restrained the search within the\
given admin uris")
self.parsers["get"].add_argument("depth", type=depth_argument,
default=1,
description="The depth of objects")
self.parsers["get"].add_argument("_current_datetime", type=date_time_format, default=datetime.datetime.utcnow(),
description="The datetime used to consider the state of the pt object"
" Default is the current date and it is used for debug."
" Note: it will mainly change the disruptions that concern "
"the object The timezone should be specified in the format,"
" else we consider it as UTC")
@marshal_with(pt_objects)<|fim▁hole|> timezone.set_request_timezone(self.region)
args = self.parsers["get"].parse_args()
self._register_interpreted_parameters(args)
if len(args['q']) == 0:
abort(400, message="Search word absent")
response = i_manager.dispatch(args, "pt_objects",
instance_name=self.region)
return response, 200<|fim▁end|>
|
def get(self, region=None, lon=None, lat=None):
self.region = i_manager.get_region(region, lon, lat)
|
<|file_name|>pyunit_PUBDEV_8346_modelselection_result_frame.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import division
import sys
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.model_selection import H2OModelSelectionEstimator as modelSelection
# test modelselection algorithm for regression only. Make sure the result frame contains the correct information. Make
# sure that we can instantiate the best model from model ID, perform scoring with it.
def test_gaussian_result_frame_model_id():
d = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))<|fim▁hole|> my_x = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL","DPROS"]
maxr_model = modelSelection(seed=12345, max_predictor_number=7, mode="maxr")
maxr_model.train(training_frame=d, x=my_x, y=my_y)
allsubsets_model = modelSelection(seed=12345, max_predictor_number=7, mode="allsubsets")
allsubsets_model.train(training_frame=d, x=my_x, y=my_y)
result_frame_allsubsets = allsubsets_model.result()
numRows = result_frame_allsubsets.nrows
best_r2_allsubsets = allsubsets_model.get_best_R2_values()
result_frame_maxr = maxr_model.result()
best_r2_maxr = maxr_model.get_best_R2_values()
for ind in list(range(numRows)):
# r2 from attributes
best_r2_value_allsubsets = best_r2_allsubsets[ind]
one_model_allsubsets = h2o.get_model(result_frame_allsubsets["model_id"][ind, 0])
pred_allsubsets = one_model_allsubsets.predict(d)
print("last element of predictor frame: {0}".format(pred_allsubsets[pred_allsubsets.nrows-1,pred_allsubsets.ncols-1]))
assert pred_allsubsets.nrows == d.nrows, "expected dataset row: {0}, actual dataset row: " \
"{1}".format(pred_allsubsets.nrows, d.nrows)
best_r2_value_maxr = best_r2_maxr[ind]
one_model_maxr = h2o.get_model(result_frame_maxr["model_id"][ind, 0])
pred_maxr = one_model_maxr.predict(d)
pyunit_utils.compare_frames_local(pred_maxr, pred_allsubsets, prob=1, tol=1e-6) # compare allsubsets and maxr results
# r2 from result frame
frame_r2_allsubsets = result_frame_allsubsets["best_r2_value"][ind,0]
# r2 from model
model_r2_allsubsets = one_model_allsubsets.r2()
# make sure all r2 are equal
assert abs(best_r2_value_allsubsets-frame_r2_allsubsets) < 1e-6, "expected best r2: {0}, actual best r2: " \
"{1}".format(best_r2_value_allsubsets, frame_r2_allsubsets)
assert abs(frame_r2_allsubsets-model_r2_allsubsets) < 1e-6, "expected best r2: {0}, actual best r2: " \
"{1}".format(model_r2_allsubsets, frame_r2_allsubsets)
assert abs(best_r2_value_maxr-model_r2_allsubsets) < 1e-6, "expected best r2: {0}, maxr best r2: {1}" \
"".format(best_r2_value_maxr, model_r2_allsubsets)
if __name__ == "__main__":
pyunit_utils.standalone_test(test_gaussian_result_frame_model_id)
else:
test_gaussian_result_frame_model_id()<|fim▁end|>
|
my_y = "GLEASON"
|
<|file_name|>UntilTokenImpl.java<|end_file_name|><|fim▁begin|>/**
*/
package org.eclipse.xtext.impl;<|fim▁hole|>
import org.eclipse.xtext.UntilToken;
import org.eclipse.xtext.XtextPackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Until Token</b></em>'.
* <!-- end-user-doc -->
*
* @generated
*/
public class UntilTokenImpl extends AbstractNegatedTokenImpl implements UntilToken {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected UntilTokenImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return XtextPackage.Literals.UNTIL_TOKEN;
}
} //UntilTokenImpl<|fim▁end|>
|
import org.eclipse.emf.ecore.EClass;
|
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import matplotlib
matplotlib.use('Agg')
import numpy as np # noqa
import pandas as pd # noqa
import pandas_ml as pdml # noqa
import pandas_ml.util.testing as tm # noqa
import sklearn.datasets as datasets # noqa
import xgboost as xgb # noqa
class TestXGBoost(tm.TestCase):
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.xgboost.XGBRegressor, xgb.XGBRegressor)
self.assertIs(df.xgboost.XGBClassifier, xgb.XGBClassifier)
def test_XGBClassifier(self):
<|fim▁hole|> for model in models:
mod1 = getattr(df.xgboost, model)()
mod2 = getattr(xgb, model)()
df.fit(mod1)
mod2.fit(iris.data, iris.target)
result = df.predict(mod1)
expected = mod2.predict(iris.data)
self.assertIsInstance(result, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(result.values, expected)
def test_XGBRegressor(self):
# http://scikit-learn.org/stable/auto_examples/plot_kernel_ridge_regression.html
X = 5 * np.random.rand(1000, 1)
y = np.sin(X).ravel()
# Add noise to targets
y[::5] += 3 * (0.5 - np.random.rand(X.shape[0] // 5))
df = pdml.ModelFrame(data=X, target=y)
models = ['XGBRegressor']
for model in models:
mod1 = getattr(df.xgboost, model)()
mod2 = getattr(xgb, model)()
df.fit(mod1)
mod2.fit(X, y)
result = df.predict(mod1)
expected = mod2.predict(X)
self.assertIsInstance(result, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(result.values, expected)
self.assertIsInstance(df.predicted, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(df.predicted.values, expected)
def test_grid_search(self):
tuned_parameters = [{'max_depth': [3, 4],
'n_estimators': [50, 100]}]
df = pdml.ModelFrame(datasets.load_digits())
cv = df.grid_search.GridSearchCV(df.xgb.XGBClassifier(), tuned_parameters, cv=5)
with tm.RNGContext(1):
df.fit(cv)
result = df.grid_search.describe(cv)
expected = pd.DataFrame({'mean': [0.89705064, 0.91764051, 0.91263216, 0.91930996],
'std': [0.03244061, 0.03259985, 0.02764891, 0.0266436],
'max_depth': [3, 3, 4, 4],
'n_estimators': [50, 100, 50, 100]},
columns=['mean', 'std', 'max_depth', 'n_estimators'])
self.assertIsInstance(result, pdml.ModelFrame)
tm.assert_frame_equal(result, expected)
def test_plotting(self):
iris = datasets.load_iris()
df = pdml.ModelFrame(iris)
df.fit(df.svm.SVC())
# raises if df.estimator is not XGBModel
with self.assertRaises(ValueError):
df.xgb.plot_importance()
with self.assertRaises(ValueError):
df.xgb.to_graphviz()
with self.assertRaises(ValueError):
df.xgb.plot_tree()
df.fit(df.xgb.XGBClassifier())
from matplotlib.axes import Axes
from graphviz import Digraph
try:
ax = df.xgb.plot_importance()
except ImportError:
import nose
# matplotlib.use doesn't work on Travis
# PYTHON=3.4 PANDAS=0.17.1 SKLEARN=0.16.1
raise nose.SkipTest()
self.assertIsInstance(ax, Axes)
assert ax.get_title() == 'Feature importance'
assert ax.get_xlabel() == 'F score'
assert ax.get_ylabel() == 'Features'
assert len(ax.patches) == 4
g = df.xgb.to_graphviz(num_trees=0)
self.assertIsInstance(g, Digraph)
ax = df.xgb.plot_tree(num_trees=0)
self.assertIsInstance(ax, Axes)
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)<|fim▁end|>
|
iris = datasets.load_iris()
df = pdml.ModelFrame(iris)
models = ['XGBClassifier']
|
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use std::ptr;
use std::io;
use std::fs::{create_dir, remove_dir_all, read_dir, remove_file, remove_dir};
use std::fs::{metadata};
use std::path::{Path, PathBuf};
use std::path::Component::Normal;
use std::io::Error as IoError;
use std::io::ErrorKind::{AlreadyExists, NotFound};
use std::ffi::CString;
use std::env::current_dir;
use nix::sys::signal::Signal;
use nix::sys::signal::{SIGQUIT, SIGSEGV, SIGBUS, SIGHUP, SIGILL, SIGABRT};
use nix::sys::signal::{SIGFPE, SIGUSR1, SIGUSR2};
use libc::{c_int, c_char, timeval, c_void, mode_t, uid_t, gid_t};
use libc::{chmod, chdir, chown};
use signal::trap::Trap;
use range::Range;
use super::id_map::IdMap;
pub type Time = f64;
pub type SigNum = i32;
// TODO(tailhook) signal::Trap might use nix signals instead of i32
pub const ABNORMAL_TERM_SIGNALS: &'static [Signal] = &[
SIGQUIT, SIGSEGV, SIGBUS, SIGHUP,
SIGILL, SIGABRT, SIGFPE, SIGUSR1,
SIGUSR2,
];
pub struct FsUidGuard(bool);
extern {
fn chroot(dir: *const c_char) -> c_int;
fn pivot_root(new_root: *const c_char, put_old: *const c_char) -> c_int;
fn gettimeofday(tp: *mut timeval, tzp: *mut c_void) -> c_int;
// TODO(tailhook) move to libc and nix
fn setfsuid(uid: uid_t) -> c_int;
fn setfsgid(gid: gid_t) -> c_int;
}
pub fn temporary_change_root<T, F>(path: &Path, mut fun: F)
-> Result<T, String>
where F: FnMut() -> Result<T, String>
{
// The point is: if we gat fatal signal in the chroot, we have 2 issues:
//
// 1. Process can't actually restart (the binary path is wrong)
// 2. Even if it finds the binary, it will be angry restarting in chroot
//
let _trap = Trap::trap(ABNORMAL_TERM_SIGNALS);
let cwd = current_dir().map_err(|e| {
format!("Can't determine current dir: {}. \
This usually happens if the directory \
your're in is already deleted", e)
})?;
if unsafe { chdir(CString::new("/").unwrap().as_ptr()) } != 0 {
return Err(format!("Error chdir to root: {}",
IoError::last_os_error()));
}
if unsafe { chroot(cpath(&path).as_ptr()) } != 0 {
return Err(format!("Error chroot to {:?}: {}",
path, IoError::last_os_error()));
}
let res = fun();
if unsafe { chroot(CString::new(".").unwrap().as_ptr()) } != 0 {
return Err(format!("Error chroot back: {}",
IoError::last_os_error()));
}
if unsafe { chdir(cpath(&cwd).as_ptr()) } != 0 {
return Err(format!("Error chdir to workdir back: {}",
IoError::last_os_error()));
}
return res;
}
pub fn in_mapping(mapping: &Vec<IdMap>, value: u32) -> bool {
for mp in mapping.iter() {
if value >= mp.inside && value < mp.inside + mp.count {
return true;
}
}
return false;
}
pub fn check_mapping(ranges: &Vec<Range>, map: &Vec<IdMap>) -> bool {<|fim▁hole|> for rng in ranges.iter() {
if rng.start <= item.outside &&
rng.end >= item.outside + item.count - 1
{
continue 'map;
}
}
return false;
}
return true;
}
pub fn change_root(new_root: &Path, put_old: &Path) -> Result<(), String>
{
if unsafe { pivot_root(
cpath(new_root).as_ptr(),
cpath(put_old).as_ptr()) } != 0
{
return Err(format!("Error pivot_root to {}: {}", new_root.display(),
IoError::last_os_error()));
}
if unsafe { chdir(CString::new("/").unwrap().as_ptr()) } != 0
{
return Err(format!("Error chdir to root: {}",
IoError::last_os_error()));
}
return Ok(());
}
pub fn ensure_dir(dir: &Path) -> Result<(), String> {
if let Ok(dmeta) = metadata(dir) {
if !dmeta.is_dir() {
return Err(format!(concat!("Can't create dir {:?}, ",
"path already exists but not a directory"), dir));
}
return Ok(());
}
match create_dir(dir) {
Ok(()) => return Ok(()),
Err(ref e) if e.kind() == AlreadyExists => {
let dmeta = metadata(dir);
if dmeta.is_ok() && dmeta.unwrap().is_dir() {
return Ok(());
} else {
return Err(format!(concat!("Can't create dir {:?}, ",
"path already exists but not a directory"),
dir));
}
}
Err(ref e) => {
return Err(format!(concat!("Can't create dir {:?}: {} ",
"path already exists but not a directory"), dir, e));
}
}
}
pub fn clean_dir(dir: &Path, remove_dir_itself: bool) -> Result<(), String> {
if let Err(e) = metadata(dir) {
if e.kind() == NotFound {
return Ok(());
} else {
return Err(format!("Can't stat dir {:?}: {}", dir, e));
}
}
// We temporarily change root, so that symlinks inside the dir
// would do no harm. But note that dir itself can be a symlink
try!(temporary_change_root(dir, || {
let dirlist = try!(read_dir("/")
.map_err(|e| format!("Can't read directory {:?}: {}", dir, e)))
.filter_map(|x| x.ok())
.collect::<Vec<_>>();
for entry in dirlist.into_iter() {
match metadata(entry.path()) {
Ok(ref meta) if meta.is_dir() => {
try!(remove_dir_all(entry.path())
.map_err(|e| format!("Can't remove directory {:?}{:?}: {}",
dir, entry.path(), e)));
}
Ok(_) => {
try!(remove_file(entry.path())
.map_err(|e| format!("Can't remove file {:?}{:?}: {}",
dir, entry.path(), e)));
}
Err(_) => {
return Err(format!("Can't stat file {:?}", entry.path()));
}
}
}
Ok(())
}));
if remove_dir_itself {
try!(remove_dir(dir)
.map_err(|e| format!("Can't remove dir {:?}: {}", dir, e)));
}
return Ok(());
}
pub fn join<S1, S2, I>(mut iter: I, sep: S2) -> String
where S1:AsRef<str>, S2:AsRef<str>, I:Iterator<Item=S1>
{
let mut buf = String::new();
match iter.next() {
Some(x) => buf.push_str(x.as_ref()),
None => {}
}
for i in iter {
buf.push_str(sep.as_ref());
buf.push_str(i.as_ref());
}
return buf;
}
pub fn get_time() -> Time {
let mut tv = timeval { tv_sec: 0, tv_usec: 0 };
unsafe { gettimeofday(&mut tv, ptr::null_mut()) };
return tv.tv_sec as f64 + tv.tv_usec as f64 * 0.000001;
}
pub fn set_file_owner(path: &Path, owner: uid_t, group: gid_t)
-> Result<(), IoError>
{
let cpath = cpath(path);
let rc = unsafe { chown(cpath.as_ptr(), owner, group) };
if rc < 0 {
return Err(IoError::last_os_error());
}
return Ok(());
}
pub fn set_file_mode(path: &Path, mode: mode_t) -> Result<(), IoError> {
let cpath = cpath(path);
let rc = unsafe { chmod(cpath.as_ptr(), mode) };
if rc < 0 {
return Err(IoError::last_os_error());
}
return Ok(());
}
pub fn cpath<P:AsRef<Path>>(path: P) -> CString {
CString::new(path.as_ref().to_str().unwrap()).unwrap()
}
pub fn relative(child: &Path, base: &Path) -> PathBuf {
assert!(child.starts_with(base));
let mut res = PathBuf::new();
for cmp in child.components().skip(base.components().count()) {
if let Normal(ref chunk) = cmp {
res.push(chunk);
} else {
panic!("Bad path for relative ({:?} from {:?} against {:?})",
cmp, child, base);
}
}
return res
}
impl FsUidGuard {
pub fn set(uid: u32, gid: u32) -> FsUidGuard {
if uid != 0 || gid != 0 {
unsafe { setfsuid(uid) };
if unsafe { setfsuid(uid) } != uid as i32 {
error!("Can't set fs gid to open socket: {}. Ignoring.",
io::Error::last_os_error());
}
unsafe { setfsgid(gid) };
if unsafe { setfsgid(gid) } != gid as i32 {
error!("Can't set fs uid to open socket: {}. Ignoring.",
io::Error::last_os_error());
}
FsUidGuard(true)
} else {
FsUidGuard(false)
}
}
}
impl Drop for FsUidGuard {
fn drop(&mut self) {
if self.0 {
unsafe { setfsuid(0) };
if unsafe { setfsuid(0) } != 0 {
let err = io::Error::last_os_error();
error!("Can't return fs uid back to zero: {}. Aborting.", err);
panic!("Can't return fs uid back to zero: {}. Aborting.", err);
}
unsafe { setfsgid(0) };
if unsafe { setfsgid(0) } != 0 {
let err = io::Error::last_os_error();
error!("Can't return fs gid back to zero: {}. Aborting.", err);
panic!("Can't return fs gid back to zero: {}. Aborting.", err);
}
}
}
}<|fim▁end|>
|
// TODO(tailhook) do more comprehensive algo
'map: for item in map.iter() {
|
<|file_name|>oLanguage.oPaginate.js<|end_file_name|><|fim▁begin|>// DATA_TEMPLATE: js_data
oTest.fnStart( "oLanguage.oPaginate" );
/* Note that the paging language information only has relevence in full numbers */
$(document).ready( function () {
/* Check the default */
var oTable = $('#example').dataTable( {
"aaData": gaaData,
"sPaginationType": "full_numbers"
} );
var oSettings = oTable.fnSettings();
oTest.fnTest(
"oLanguage.oPaginate defaults",
null,
function () {
var bReturn =
oSettings.oLanguage.oPaginate.sFirst == "First" &&
oSettings.oLanguage.oPaginate.sPrevious == "Previous" &&
oSettings.oLanguage.oPaginate.sNext == "Next" &&
oSettings.oLanguage.oPaginate.sLast == "Last";
return bReturn;
}
);
oTest.fnTest(
"oLanguage.oPaginate defaults are in the DOM",
null,
function () {
var bReturn =
$('#example_paginate .first').html() == "First" &&
$('#example_paginate .previous').html() == "Previous" &&
$('#example_paginate .next').html() == "Next" &&
$('#example_paginate .last').html() == "Last";
return bReturn;
}
);
oTest.fnTest(
"oLanguage.oPaginate can be defined",
function () {
oSession.fnRestore();
oTable = $('#example').dataTable( {
"aaData": gaaData,
"sPaginationType": "full_numbers",
"oLanguage": {
"oPaginate": {
"sFirst": "unit1",
"sPrevious": "test2",
"sNext": "unit3",
"sLast": "test4"
}
}
} );
oSettings = oTable.fnSettings();
},
function () {
var bReturn =
oSettings.oLanguage.oPaginate.sFirst == "unit1" &&
oSettings.oLanguage.oPaginate.sPrevious == "test2" &&
oSettings.oLanguage.oPaginate.sNext == "unit3" &&
oSettings.oLanguage.oPaginate.sLast == "test4";
return bReturn;
}
);
oTest.fnTest(
"oLanguage.oPaginate definitions are in the DOM",
null,
function () {
var bReturn =
$('#example_paginate .first').html() == "unit1" &&
$('#example_paginate .previous').html() == "test2" &&
<|fim▁hole|> return bReturn;
}
);
oTest.fnComplete();
} );<|fim▁end|>
|
$('#example_paginate .next').html() == "unit3" &&
$('#example_paginate .last').html() == "test4";
|
<|file_name|>hover-child.js<|end_file_name|><|fim▁begin|>import $ from 'jquery';
let hoverChildDirective = {
bind: (el, binding) => {
$(el)
.on('mouseenter', function(event) {
$(el).children('.icon').addClass(binding.value);
})
.on('mouseleave', function(event) {
$(el).children('.icon').removeClass(binding.value);
});
}
};
<|fim▁hole|><|fim▁end|>
|
export { hoverChildDirective };
|
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { Route, IndexRoute } from 'react-router';
import App from './App';
import IncredibleOffersContainer from './IncredibleOffers/IncredibleOfferContainer';
<|fim▁hole|>
export default (
<Route path="/" component={App}>
<IndexRoute component={IncredibleOffersContainer} />
<Route path="/special-offers" component={IncredibleOffersContainer} />
<Route path="/special-offers/:filter" component={IncredibleOffersContainer} />
</Route>
);<|fim▁end|>
| |
<|file_name|>test_filter_remapped_reads.py<|end_file_name|><|fim▁begin|>import sys
import os
import subprocess
import filter_remapped_reads
import util
#
# filter_remapped_reads.py
# INPUT FILES:
# to_remap_bam - input BAM file containing original set of reads
# that need to be remapped after having their alleles flipped
#
# remap_bam - input BAM file containing remapped reads. Read names in this
# file should be delimited with the '.' character and
# contain the following fields:
# <orig_name>.<coordinate>.<read_number>.<total_read_number>
#
# For single-end reads <coordinate> is the left end of the read
# (e.g. 16052611)
# For paired-end reads the coordinate is the start of the
# the left read and start of the right read:
# (e.g. 16052611-16052734)
#
#
#
# OUTPUT FILES:
# keep_bam - ouput BAM file containing reads that are retained
# after filtering
#
#
#
# TODO: need to verify that interleaved read pairs handled appropriately
# TODO: need to test single end reads
#
#
def write_sam_header(f):
f.write("@HD VN:1.0 SO:coordinate\n")
f.write("@SQ SN:chr22 LN:51304566\n")
f.write('@PG ID:bowtie2 PN:bowtie2 VN:2.2.6 CL:"/iblm/netapp/home/gmcvicker/anaconda2/bin/bowtie2-align-s --wrapper basic-0 -x /iblm/netapp/data1/external/GRC37/combined/bowtie2_index/hg37 -1 /tmp/16686.inpipe1 -2 /tmp/16686.inpipe2\n')
def write_to_remap_bam_pe(data_dir="test_data", bam_filename="test_data/test.to.remap.bam"):
sam_lines = ["SRR1658224.34085432 163 chr22 16052611 12 101M = 16052734 224 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"SRR1658224.34085432 83 chr22 16052734 12 101M = 16052611 -224 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
"SRR1658224.34975561 99 chr22 16071944 12 101M = 16072163 320 ATTTATTTATTTATTTATTATTGGGACAGAGTCTCACTCTGTCCCCCAGACTGGAGTCCAGTGACATGATCTCAGCTCACTGCAACCTCTGCCTCGTGGGT CCCFFFFFHHHHHJJJJJJJJJJJJIJJJJIEHIJJJJJJJIIJJJJJIJJJJJJJJJJIJHIJIJJJJIJJJJJHHHHHHFFFFFECEEEEDDDDDDBBD AS:i:-5 XS:i:-22 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:89C11 YS:i:0 YT:Z:CP",
"SRR1658224.34975561 147 chr22 16072163 12 101M = 16071944 -320 GTCTCAAACTTCTGACCTCAGGTGATCCACCCACCTCGACCTCCCAAAGTGCTGGGATTACAGGCACTAGGTCCCTAAATTAGAGCCATATTCTTTAATGT DDBCDEDCDCCDCC?DDDDDDDBACBDA<FFB:6HIIJIIJIIJJJJJJJJJJJJIJJIHJJJJJIJJJJJJJJJJJJJJJJJJJJJJHHHGGFFFFFCCC AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-5 YT:Z:CP",
"SRR1658224.7462188 163 chr22 16235410 17 101M = 16235625 316 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CC@FFFFFHHHHHJJJJJJJJJJJJJJJJIJBGIJJJJJJJJJJJJJIJIFIJJJJJJJJJHHHHGFFFFFFEEEEDEEDDDDDEED@CFFFEDDD?ABB? AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-5 YT:Z:CP",
"SRR1658224.7462188 83 chr22 16235625 17 101M = 16235410 -316 TTCAAAAGATGGTATATGCATTAATATTTTCATACAACTTCCAGCTTTTGTTTTTCTTCATTTAATTTTATTTATTTATTTATTTTTGAGATGGAGTCTCG CBDDDDECEEDEFFFDFFFHHHHHHHJJIIJJIHIHFHGHJJJJJJJGJJJJJIJJJIIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJHHHHHFFFDFCCC AS:i:-5 XS:i:-39 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:15G85 YS:i:0 YT:Z:CP",
"SRR1658224.31153145 163 chr22 16235410 17 101M = 16235625 316 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CCCFFFFFHHHHHJJJJJJJJJJJJJJJJIJFHIJJJJJJJJJJJIJIJJFHIJJJJJJJJHHHHHFFFFFFEDEEEEEDDDDDEED@DEEEEDDDDDDB2 AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-2 YT:Z:CP",
"SRR1658224.31153145 83 chr22 16235625 17 101M = 16235410 -316 TTCAAAAGATGGTATGTGCATTAATATTTTCATACAACTTCCAGTTTTTGTTTTTCTTCATTTAATTTTATTTATTTATTTATTTTTGAGATGGAGTCTCG DDDDDDDDEEEEEEFFFFFFHHHHGHHJJIJJJIIJIJIHJHF@(JJJJJJJJJJJJIIIIJJJJJJJIJJJJJJJJJJJJJJJJJJJHHHHHFFFDFCCC AS:i:-2 XS:i:-36 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:44C56 YS:i:0 YT:Z:CP",
"SRR1658224.25014179 163 chr22 16236979 31 101M = 16237137 259 ATGTTTTTTAAGATTTAATATTACTTTTTCCAACATCTTTTTATCCTCAAGTTTTTTATATTCCTGTTGTATTTTTTTATAGATAATAACTCCTGTTGAAT CCCFFFFFHHHHFIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJHGIJJJJJJJJIJJJJJJJHHHHHHHDCDDECDEEDDEDDDDDDDDDDCDC AS:i:0 XS:i:-28 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:0 YT:Z:CP",
"SRR1658224.25014179 83 chr22 16237137 31 101M = 16236979 -259 TCATCGAACTACATTAATAAAATAATATAGCTTGATAATGAAGTAGGCTGAGAATAATCTCATACAAAACCAATAACAAATTTTGAAATACATTTACTTGC CEFFFFFHHHHHHHHJJJJJJJJJIHJIJIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJIIJJJIHJJJJJJIJJJJJJJJJJJJHHHHHFDDFFCCC AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:0 YT:Z:CP",
"readpair1 163 chr22 100 12 101M = 200 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair2 163 chr22 150 12 101M = 250 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair1 83 chr22 200 12 101M = 100 -201 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
"readpair2 163 chr22 250 12 101M = 150 -201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP"]
if not os.path.exists(data_dir):
os.makedirs(data_dir)
# write temporary file in SAM format, before converting to BAM
sam_filename = data_dir + "/tmp.sam"
f = open(sam_filename, "w")
write_sam_header(f)
for line in sam_lines:
f.write(line + "\n")
f.close()
subprocess.check_call("samtools view -b %s > %s" % (sam_filename, bam_filename), shell=True)
def write_remap_bam_pe(data_dir="test_data", bam_filename="test_data/test.remap.bam"):
sam_lines = [
# Read pair expected to map 2 times and maps to correct location 2 times
"SRR1658224.34085432.16052611-16052734.1.2 163 chr22 16052611 12 101M = 16052734 224 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"SRR1658224.34085432.16052611-16052734.1.2 83 chr22 16052734 12 101M = 16052611 -224 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
"SRR1658224.34085432.16052611-16052734.2.2 163 chr22 16052611 12 101M = 16052734 224 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"SRR1658224.34085432.16052611-16052734.2.2 83 chr22 16052734 12 101M = 16052611 -224 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
# Read pair expected to map 2 times, but only maps 1 time
"SRR1658224.34975561.16071944-16072163.2.2 99 chr22 16071944 12 101M = 16072163 320 ATTTATTTATTTATTTATTATTGGGACAGAGTCTCACTCTGTCCCCCAGACTGGAGTCCAGTGACATGATCTCAGCTCACTGCAACCTCTGCCTCGTGGGT CCCFFFFFHHHHHJJJJJJJJJJJJIJJJJIEHIJJJJJJJIIJJJJJIJJJJJJJJJJIJHIJIJJJJIJJJJJHHHHHHFFFFFECEEEEDDDDDDBBD AS:i:-5 XS:i:-22 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:89C11 YS:i:0 YT:Z:CP",
"SRR1658224.34975561.16071944-16072163.2.2 147 chr22 16072163 12 101M = 16071944 -320 GTCTCAAACTTCTGACCTCAGGTGATCCACCCACCTCGACCTCCCAAAGTGCTGGGATTACAGGCACTAGGTCCCTAAATTAGAGCCATATTCTTTAATGT DDBCDEDCDCCDCC?DDDDDDDBACBDA<FFB:6HIIJIIJIIJJJJJJJJJJJJIJJIHJJJJJIJJJJJJJJJJJJJJJJJJJJJJHHHGGFFFFFCCC AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-5 YT:Z:CP",
# Read pair expected to map 2 times, but only 1/2 of 2nd pair maps back to same location
"SRR1658224.7462188.16235410-16235625.1.2 163 chr22 16235410 17 101M = 16235625 316 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CC@FFFFFHHHHHJJJJJJJJJJJJJJJJIJBGIJJJJJJJJJJJJJIJIFIJJJJJJJJJHHHHGFFFFFFEEEEDEEDDDDDEED@CFFFEDDD?ABB? AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-5 YT:Z:CP",
"SRR1658224.7462188.16235410-16235625.1.2 83 chr22 16235625 17 101M = 16235410 -316 TTCAAAAGATGGTATATGCATTAATATTTTCATACAACTTCCAGCTTTTGTTTTTCTTCATTTAATTTTATTTATTTATTTATTTTTGAGATGGAGTCTCG CBDDDDECEEDEFFFDFFFHHHHHHHJJIIJJIHIHFHGHJJJJJJJGJJJJJIJJJIIJJJJJJJJJJJJJJJJJJJJJJJJJJJJJHHHHHFFFDFCCC AS:i:-5 XS:i:-39 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:15G85 YS:i:0 YT:Z:CP",
"SRR1658224.7462188.16235410-16235625.2.2 163 chr22 16235410 17 101M * 0 0 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CC@FFFFFHHHHHJJJJJJJJJJJJJJJJIJBGIJJJJJJJJJJJJJIJIFIJJJJJJJJJHHHHGFFFFFFEEEEDEEDDDDDEED@CFFFEDDD?ABB? AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-5 YT:Z:CP",
# Read pair expected to map 2 times, but 1 pair maps to wrong location
"SRR1658224.31153145.16235410-16235625.1.2 163 chr22 16235410 17 101M = 16235625 316 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CCCFFFFFHHHHHJJJJJJJJJJJJJJJJIJFHIJJJJJJJJJJJIJIJJFHIJJJJJJJJHHHHHFFFFFFEDEEEEEDDDDDEED@DEEEEDDDDDDB2 AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-2 YT:Z:CP",
"SRR1658224.31153145.16235410-16235625.1.2 83 chr22 16235625 17 101M = 16235410 -316 TTCAAAAGATGGTATGTGCATTAATATTTTCATACAACTTCCAGTTTTTGTTTTTCTTCATTTAATTTTATTTATTTATTTATTTTTGAGATGGAGTCTCG DDDDDDDDEEEEEEFFFFFFHHHHGHHJJIJJJIIJIJIHJHF@(JJJJJJJJJJJJIIIIJJJJJJJIJJJJJJJJJJJJJJJJJJJHHHHHFFFDFCCC AS:i:-2 XS:i:-36 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:44C56 YS:i:0 YT:Z:CP",
"SRR1658224.31153145.16235410-16235625.2.2 163 chr22 18235410 17 101M = 16235625 316 AGATAATTGTCTTATTTTTTTAAAAAAAGAGTAACTTTATATTATGGAATTCATAATATTTGAGACTATAATGCATGACATAAATAGTATAAAGGAGAGAG CCCFFFFFHHHHHJJJJJJJJJJJJJJJJIJFHIJJJJJJJJJJJIJIJJFHIJJJJJJJJHHHHHFFFFFFEDEEEEEDDDDDEED@DEEEEDDDDDDB2 AS:i:0 XS:i:0 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-2 YT:Z:CP",
"SRR1658224.31153145.16235410-16235625.2.2 83 chr22 18235625 17 101M = 16235410 -316 TTCAAAAGATGGTATGTGCATTAATATTTTCATACAACTTCCAGTTTTTGTTTTTCTTCATTTAATTTTATTTATTTATTTATTTTTGAGATGGAGTCTCG DDDDDDDDEEEEEEFFFFFFHHHHGHHJJIJJJIIJIJIHJHF@(JJJJJJJJJJJJIIIIJJJJJJJIJJJJJJJJJJJJJJJJJJJHHHHHFFFDFCCC AS:i:-2 XS:i:-36 XN:i:0 XM:i:1 XO:i:0 XG:i:0 NM:i:1 MD:Z:44C56 YS:i:0 YT:Z:CP",
# Read pair expected to map 2 times, but does not map at all
# "SRR1658224.25014179"
# Read pairs expected to map 1 times, with read-pairs interleaved
"readpair1.100-200.1.2 163 chr22 100 12 101M = 200 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair2.150-250.1.2 163 chr22 150 12 101M = 250 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair1.100-200.1.2 83 chr22 200 12 101M = 100 -201 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
"readpair2.150-250.1.2 163 chr22 250 12 101M = 150 -201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair1.100-200.2.2 163 chr22 100 12 101M = 200 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair2.150-250.2.2 163 chr22 150 12 101M = 250 201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP",
"readpair1.100-200.2.2 83 chr22 200 12 101M = 100 -201 TCCTGACAGCATGTGCCCAAGGTGGTCAGGATACAGCTTGCTTCTATATATTTTAGGGAGAAAATACATCAGCCTGTAAACAAAAAATTAAATTCTAAGGT DDDDDDDDDDDDDDEDEEEFFFFHHFHHIIFIIJJJJIJJJJJJJJJJIIJJJIIIJIJIJJJJIFIIIJJIJJJJJJJIIJJJJJJJHHHHHFFFFFCCC AS:i:0 XS:i:-12 XN:i:0 XM:i:0 XO:i:0 XG:i:0 NM:i:0 MD:Z:101 YS:i:-11 YT:Z:CP",
"readpair2.150-250.2.2 163 chr22 250 12 101M = 150 -201 TGGAGACATAAAATGAGGCATATCTGACCTCCACTTCCAAAAACATCTGAGATAGGTCTCAGTTAATTAAGAAAGTTTGTTCTGCCTAGTTTAAGGACATG CCCFFFFFHHHHHJJJJJJJJJJJJJJJIJJJJJJJJJJJJJJJJJIJJIHIJJJJEHIJJJHJJJJJJJJJJJJ=DHHHHHFFFFFFEEEEEEDDCDDDC AS:i:-11 XS:i:-17 XN:i:0 XM:i:2 XO:i:0 XG:i:0 NM:i:2 MD:Z:7G44C48 YS:i:0 YT:Z:CP"
]
if not os.path.exists(data_dir):
os.makedirs(data_dir)
# write temporary file in SAM format, before converting to BAM
sam_filename = data_dir + "/tmp.sam"
f = open(sam_filename, "w")
write_sam_header(f)
for line in sam_lines:
f.write(line + "\n")
f.close()
# write to temp bam file
tmp_bam_filename = data_dir + "/tmp.bam"
subprocess.check_call("samtools view -b %s > %s" % (sam_filename, tmp_bam_filename), shell=True)
# sort the temp bam file
util.sort_bam(tmp_bam_filename, data_dir + "/tmp")
# remove temp bam
os.remove(tmp_bam_filename)
# rename sorted bam to output bam filename
os.rename(data_dir + "/tmp.sort.bam", bam_filename)
def read_bam(bam):
"""
Read a bam file into a list where each element of the list is a line from
the bam file (with the newline stripped). The header is discarded.
"""
res = subprocess.check_output('samtools view %s' % bam, shell=True)
return res.strip().split('\n')
def test_filter_remapped_reads_pe():
test_dir = "test_data"
to_remap_bam_filename = "test_data/test.to.remap.bam"
remap_bam_filename = "test_data/test.remap.bam"
keep_bam_filename = "test_data/keep.bam"
# write test input data
write_to_remap_bam_pe(data_dir=test_dir, bam_filename=to_remap_bam_filename)
write_remap_bam_pe(data_dir=test_dir, bam_filename=remap_bam_filename)
# run filter remapped reads
filter_remapped_reads.main(to_remap_bam_filename, remap_bam_filename, keep_bam_filename)
# read in filtered reads
lines = read_bam(keep_bam_filename)
# read lines from keep BAM file
read_dict = {}
for line in lines:
words = line.split()
read_name = words[0]
if read_name in read_dict:
read_dict[read_name].append(words)
else:
read_dict[read_name] = [words]
# verify that filtered reads look correct
# we expect a read pair with this identifier:
read_name = "SRR1658224.34085432"
assert read_name in read_dict
reads = read_dict[read_name]
assert len(reads) == 2
pos1 = int(reads[0][3])
pos2 = int(reads[1][3])
assert pos1 == 16052611
assert pos2 == 16052734
# expect these read pairs to be filtered out (not present)
# only one version of read pair maps (expect 2)
assert "SRR1658224.34975561" not in read_dict
# 1/2 of second read pair missing
assert "SRR1658224.7462188" not in read_dict
# 1 pair maps to wrong location
assert "SRR1658224.31153145" not in read_dict
# neither pair maps
assert "SRR1658224.25014179" not in read_dict
# expect these (interleaved) read pairs to be kept
read_name = "readpair1"<|fim▁hole|> pos2 = int(reads[1][3])
assert pos1 == 100
assert pos2 == 200
read_name = "readpair2"
assert read_name in read_dict
reads = read_dict[read_name]
assert len(reads) == 2
pos1 = int(reads[0][3])
pos2 = int(reads[1][3])
assert pos1 == 150
assert pos2 == 250<|fim▁end|>
|
assert read_name in read_dict
reads = read_dict[read_name]
assert len(reads) == 2
pos1 = int(reads[0][3])
|
<|file_name|>pubsub2storage.cpp<|end_file_name|><|fim▁begin|>/*
* pubsub2storage: service which transfers between pubsub and storage
*
* Copyright (C) 1998 - 2020. Dinand Vanvelzen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*-------------------------------------------------------------------------//
// //
// INCLUDES //
// //
//-------------------------------------------------------------------------*/
#include <string.h>
#ifndef GUCEF_CORE_DVOSWRAP_H
#include "DVOSWRAP.h"
#define GUCEF_CORE_DVOSWRAP_H
#endif /* GUCEF_CORE_DVOSWRAP_H */
#ifndef GUCEF_CORE_CTASKMANAGER_H
#include "gucefCORE_CTaskManager.h"
#define GUCEF_CORE_CTASKMANAGER_H
#endif /* GUCEF_CORE_CTASKMANAGER_H */
#ifndef GUCEF_COMCORE_CCOMCOREGLOBAL_H
#include "gucefCOMCORE_CComCoreGlobal.h"
#define GUCEF_COMCORE_CCOMCOREGLOBAL_H
#endif /* GUCEF_COMCORE_CCOMCOREGLOBAL_H ? */
#ifndef GUCEF_COMCORE_CBASICPUBSUBMSG_H
#include "gucefCOMCORE_CBasicPubSubMsg.h"
#define GUCEF_COMCORE_CBASICPUBSUBMSG_H
#endif /* GUCEF_COMCORE_CBASICPUBSUBMSG_H ? */
#ifndef GUCEF_WEB_CDUMMYHTTPSERVERRESOURCE_H
#include "gucefWEB_CDummyHTTPServerResource.h"
#define GUCEF_WEB_CDUMMYHTTPSERVERRESOURCE_H
#endif /* GUCEF_WEB_CDUMMYHTTPSERVERRESOURCE_H ? */
#ifndef GUCEF_VFS_CVFSGLOBAL_H
#include "gucefVFS_CVfsGlobal.h"
#define GUCEF_VFS_CVFSGLOBAL_H
#endif /* GUCEF_VFS_CVFSGLOBAL_H ? */
#ifndef GUCEF_VFS_CVFS_H
#include "gucefVFS_CVFS.h"
#define GUCEF_VFS_CVFS_H
#endif /* GUCEF_VFS_CVFS_H ? */
#include "pubsub2storage.h"
#if ( GUCEF_PLATFORM == GUCEF_PLATFORM_MSWIN )
#include <winsock2.h>
#endif
#ifndef GUCEF_CORE_METRICSMACROS_H
#include "gucefCORE_MetricsMacros.h"
#define GUCEF_CORE_METRICSMACROS_H
#endif /* GUCEF_CORE_METRICSMACROS_H ? */
/*-------------------------------------------------------------------------//
// //
// GLOBAL VARS //
// //
//-------------------------------------------------------------------------*/
#define GUCEF_DEFAULT_TICKET_REFILLS_ON_BUSY_CYCLE 10000
#define GUCEF_DEFAULT_PUBSUB_RECONNECT_DELAY_IN_MS 100
#define GUCEF_DEFAULT_MINIMAL_PUBSUB_BLOCK_STORAGE_SIZE_IN_BYTES (1024*1024*50)// 50MB
#define GUCEF_DEFAULT_MAXIMAL_PUBSUB_BLOCK_STORE_GROW_DELAY_IN_MS (1000*60*5) // 5mins
#define GUCEF_DEFAULT_DECODE_GROWTH_RATIO_EXPECTATION 6.0f
/*-------------------------------------------------------------------------//
// //
// IMPLEMENTATION //
// //
//-------------------------------------------------------------------------*/
ChannelSettings::ChannelSettings( void )
: CORE::CIConfigurable()
, pubsubClientConfig()
, pubsubBinarySerializerOptions()
, desiredMinimalSerializedBlockSize( GUCEF_DEFAULT_MINIMAL_PUBSUB_BLOCK_STORAGE_SIZE_IN_BYTES )
, desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs( GUCEF_DEFAULT_MAXIMAL_PUBSUB_BLOCK_STORE_GROW_DELAY_IN_MS )
, vfsStorageRootPath()
, vfsFileExtention()
, encodeCodecFamily()
, encodeCodecName()
, decodeCodecFamily()
, decodeCodecName()
, channelId( -1 )
, ticketRefillOnBusyCycle( GUCEF_DEFAULT_TICKET_REFILLS_ON_BUSY_CYCLE )
, performPubSubInDedicatedThread( true )
, applyThreadCpuAffinity( false )
, cpuAffinityForDedicatedPubSubThread( 0 )
, cpuAffinityForMainChannelThread( 0 )
, collectMetrics( true )
, mode( CHANNELMODE_PUBSUB_TO_STORAGE )
, subscribeWithoutBookmarkIfNoneIsPersisted( true )
, autoPushAfterStartupIfStorageToPubSub( true )
, youngestStoragePubSubMsgFileToLoad( CORE::CDateTime::FutureMax )
, oldestStoragePubSubMsgFileToLoad( CORE::CDateTime::PastMax )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
ChannelSettings::ChannelSettings( const ChannelSettings& src )
: pubsubClientConfig( src.pubsubClientConfig )
, pubsubBinarySerializerOptions( src.pubsubBinarySerializerOptions )
, desiredMinimalSerializedBlockSize( src.desiredMinimalSerializedBlockSize )
, desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs( src.desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs )
, vfsStorageRootPath( src.vfsStorageRootPath )
, vfsFileExtention( src.vfsFileExtention )
, encodeCodecFamily( src.encodeCodecFamily )
, encodeCodecName( src.encodeCodecName )
, decodeCodecFamily( src.decodeCodecFamily )
, decodeCodecName( src.decodeCodecName )
, channelId( src.channelId )
, ticketRefillOnBusyCycle( src.ticketRefillOnBusyCycle )
, performPubSubInDedicatedThread( src.performPubSubInDedicatedThread )
, applyThreadCpuAffinity( src.applyThreadCpuAffinity )
, cpuAffinityForDedicatedPubSubThread( src.cpuAffinityForDedicatedPubSubThread )
, cpuAffinityForMainChannelThread( src.cpuAffinityForMainChannelThread )
, collectMetrics( src.collectMetrics )
, mode( src.mode )
, subscribeWithoutBookmarkIfNoneIsPersisted( src.subscribeWithoutBookmarkIfNoneIsPersisted )
, autoPushAfterStartupIfStorageToPubSub( src.autoPushAfterStartupIfStorageToPubSub )
, youngestStoragePubSubMsgFileToLoad( src.youngestStoragePubSubMsgFileToLoad )
, oldestStoragePubSubMsgFileToLoad( src.oldestStoragePubSubMsgFileToLoad )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
ChannelSettings&
ChannelSettings::operator=( const ChannelSettings& src )
{GUCEF_TRACE;
if ( this != &src )
{
pubsubClientConfig = src.pubsubClientConfig;
pubsubBinarySerializerOptions = src.pubsubBinarySerializerOptions;
desiredMinimalSerializedBlockSize = src.desiredMinimalSerializedBlockSize;
desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs = src.desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs;
vfsStorageRootPath = src.vfsStorageRootPath;
vfsFileExtention = src.vfsFileExtention;
encodeCodecFamily = src.encodeCodecFamily;
encodeCodecName = src.encodeCodecName;
decodeCodecFamily = src.decodeCodecFamily;
decodeCodecName = src.decodeCodecName;
channelId = src.channelId;
ticketRefillOnBusyCycle = src.ticketRefillOnBusyCycle;
performPubSubInDedicatedThread = src.performPubSubInDedicatedThread;
applyThreadCpuAffinity = src.applyThreadCpuAffinity;
cpuAffinityForDedicatedPubSubThread = src.cpuAffinityForDedicatedPubSubThread;
cpuAffinityForMainChannelThread = src.cpuAffinityForMainChannelThread;
collectMetrics = src.collectMetrics;
mode = src.mode;
subscribeWithoutBookmarkIfNoneIsPersisted = src.subscribeWithoutBookmarkIfNoneIsPersisted;
autoPushAfterStartupIfStorageToPubSub = src.autoPushAfterStartupIfStorageToPubSub;
youngestStoragePubSubMsgFileToLoad = src.youngestStoragePubSubMsgFileToLoad;
oldestStoragePubSubMsgFileToLoad = src.oldestStoragePubSubMsgFileToLoad;
}
return *this;
}
/*-------------------------------------------------------------------------*/
bool
ChannelSettings::SaveConfig( CORE::CDataNode& tree ) const
{GUCEF_TRACE;
tree.SetAttribute( "desiredMinimalSerializedBlockSize", desiredMinimalSerializedBlockSize );
tree.SetAttribute( "desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs", desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs );
tree.SetAttribute( "vfsStorageRootPath", vfsStorageRootPath );
tree.SetAttribute( "vfsFileExtention", vfsFileExtention );
tree.SetAttribute( "encodeCodecFamily", encodeCodecFamily );
tree.SetAttribute( "encodeCodecName", encodeCodecName );
tree.SetAttribute( "decodeCodecFamily", decodeCodecFamily );
tree.SetAttribute( "decodeCodecName", decodeCodecName );
tree.SetAttribute( "channelId", channelId );
tree.SetAttribute( "ticketRefillOnBusyCycle", ticketRefillOnBusyCycle );
tree.SetAttribute( "performPubSubInDedicatedThread", performPubSubInDedicatedThread );
tree.SetAttribute( "applyThreadCpuAffinity", applyThreadCpuAffinity );
tree.SetAttribute( "cpuAffinityForDedicatedPubSubThread", cpuAffinityForDedicatedPubSubThread );
tree.SetAttribute( "cpuAffinityForMainChannelThread", cpuAffinityForMainChannelThread );
tree.SetAttribute( "collectMetrics", collectMetrics );
tree.SetAttribute( "mode", mode );
tree.SetAttribute( "subscribeWithoutBookmarkIfNoneIsPersisted", subscribeWithoutBookmarkIfNoneIsPersisted );
tree.SetAttribute( "autoPushAfterStartupIfStorageToPubSub", autoPushAfterStartupIfStorageToPubSub );
tree.SetAttribute( "youngestStoragePubSubMsgFileToLoad", youngestStoragePubSubMsgFileToLoad.ToIso8601DateTimeString( true, true ) );
tree.SetAttribute( "oldestStoragePubSubMsgFileToLoad", oldestStoragePubSubMsgFileToLoad.ToIso8601DateTimeString( true, true ) );
CORE::CDataNode* psClientConfig = tree.Structure( "PubSubClientConfig", '/' );
if ( !pubsubClientConfig.SaveConfig( *psClientConfig ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "ChannelSettings:SaveConfig: config is malformed, failed to save a mandatory PubSubClientConfig section" );
return false;
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
ChannelSettings::LoadConfig( const CORE::CDataNode& tree )
{GUCEF_TRACE;
const CORE::CDataNode* psClientConfig = tree.Find( "PubSubClientConfig" );
if ( GUCEF_NULL != psClientConfig )
{
if ( !pubsubClientConfig.LoadConfig( *psClientConfig ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "ChannelSettings:LoadConfig: config is unacceptable, failed to load mandatory PubSubClientConfig section" );
return false;
}
// There is no sane default of pubsubClientType since it depends on the clients loaded into the application
// as such this is a mandatory setting to provide
if ( pubsubClientConfig.pubsubClientType.IsNULLOrEmpty() )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "ChannelSettings:LoadConfig: config is malformed, \"pubsubClientType\" was not provided" );
return false;
}
// We are fully config driven with no programatically defined topics
// As such the config must have yielded at least 1 topic
if ( pubsubClientConfig.topics.empty() )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "ChannelSettings:LoadConfig: config is malformed, having at least one topic configured for the client section is mandatory" );
return false;
}
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "ChannelSettings:LoadConfig: config is malformed, a PubSubClientConfig section is mandatory" );
return false;
}
desiredMinimalSerializedBlockSize = tree.GetAttributeValueOrChildValueByName( "desiredMinimalSerializedBlockSize" ).AsUInt32( desiredMinimalSerializedBlockSize, true );
desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs = tree.GetAttributeValueOrChildValueByName( "desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs" ).AsUInt32( desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs, true );
vfsStorageRootPath = tree.GetAttributeValueOrChildValueByName( "vfsStorageRootPath" ).AsString( vfsStorageRootPath, true );
vfsFileExtention = tree.GetAttributeValueOrChildValueByName( "vfsFileExtention" ).AsString( vfsFileExtention, true );
encodeCodecFamily = tree.GetAttributeValueOrChildValueByName( "encodeCodecFamily" ).AsString( encodeCodecFamily, true );
encodeCodecName = tree.GetAttributeValueOrChildValueByName( "encodeCodecName" ).AsString( encodeCodecName, true );
decodeCodecFamily = tree.GetAttributeValueOrChildValueByName( "decodeCodecFamily" ).AsString( decodeCodecFamily, true );
decodeCodecName = tree.GetAttributeValueOrChildValueByName( "decodeCodecName" ).AsString( decodeCodecName, true );
channelId = tree.GetAttributeValueOrChildValueByName( "channelId" ).AsInt32( channelId, true );
ticketRefillOnBusyCycle = tree.GetAttributeValueOrChildValueByName( "ticketRefillOnBusyCycle" ).AsUInt32( ticketRefillOnBusyCycle, true );
performPubSubInDedicatedThread = tree.GetAttributeValueOrChildValueByName( "performPubSubInDedicatedThread" ).AsBool( performPubSubInDedicatedThread, true );
applyThreadCpuAffinity = tree.GetAttributeValueOrChildValueByName( "applyThreadCpuAffinity" ).AsBool( applyThreadCpuAffinity, true );
cpuAffinityForDedicatedPubSubThread = tree.GetAttributeValueOrChildValueByName( "cpuAffinityForDedicatedPubSubThread" ).AsUInt32( cpuAffinityForDedicatedPubSubThread, true );
cpuAffinityForMainChannelThread = tree.GetAttributeValueOrChildValueByName( "cpuAffinityForMainChannelThread" ).AsUInt32( cpuAffinityForMainChannelThread, true );
collectMetrics = tree.GetAttributeValueOrChildValueByName( "collectMetrics" ).AsBool( collectMetrics, true );
mode = (TChannelMode) tree.GetAttributeValueOrChildValueByName( "mode" ).AsInt32( mode, true );
subscribeWithoutBookmarkIfNoneIsPersisted = tree.GetAttributeValueOrChildValueByName( "subscribeWithoutBookmarkIfNoneIsPersisted" ).AsBool( subscribeWithoutBookmarkIfNoneIsPersisted, true );
autoPushAfterStartupIfStorageToPubSub = tree.GetAttributeValueOrChildValueByName( "autoPushAfterStartupIfStorageToPubSub" ).AsBool( autoPushAfterStartupIfStorageToPubSub, true );
youngestStoragePubSubMsgFileToLoad.FromIso8601DateTimeString( tree.GetAttributeValueOrChildValueByName( "youngestStoragePubSubMsgFileToLoad" ).AsString( youngestStoragePubSubMsgFileToLoad.ToIso8601DateTimeString( true, true ), true ) );
oldestStoragePubSubMsgFileToLoad.FromIso8601DateTimeString( tree.GetAttributeValueOrChildValueByName( "oldestStoragePubSubMsgFileToLoad" ).AsString( oldestStoragePubSubMsgFileToLoad.ToIso8601DateTimeString( true, true ), true ) );
return true;
}
/*-------------------------------------------------------------------------*/
const CORE::CString&
ChannelSettings::GetClassTypeName( void ) const
{GUCEF_TRACE;
static CORE::CString classTypeName = "pubsub2storage::ChannelSettings";
return classTypeName;
}
/*-------------------------------------------------------------------------*/
COMCORE::CPubSubClientTopicConfig*
ChannelSettings::GetTopicConfig( const CORE::CString& topicName )
{GUCEF_TRACE;
TTopicConfigVector::iterator i = pubsubClientConfig.topics.begin();
while ( i != pubsubClientConfig.topics.end() )
{
if ( topicName == (*i).topicName )
return &(*i);
}
return GUCEF_NULL;
}
/*-------------------------------------------------------------------------*/
CPubSubClientChannel::CPubSubClientChannel( CIPubSubBookmarkPersistance* persistance )
: CORE::CTaskConsumer()
, m_pubsubClient()
, m_topics()
, m_channelSettings()
, m_mailbox()
, m_bulkMail()
, m_metricsTimer( GUCEF_NULL )
, m_pubsubClientReconnectTimer( GUCEF_NULL )
, m_buffers( 2 )
, m_msgReceiveBuffer( GUCEF_NULL )
, m_lastWriteBlockCompletion()
, m_msgOffsetIndex()
, m_persistance( persistance )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CPubSubClientChannel::~CPubSubClientChannel()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CPubSubClientChannel::TopicLink::TopicLink( void )
: topic( GUCEF_NULL )
, currentPublishActionIds()
, inFlightMsgs()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CPubSubClientChannel::TopicLink::TopicLink( COMCORE::CPubSubClientTopic* t )
: topic( t )
, currentPublishActionIds()
, inFlightMsgs()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::TopicLink::AddInFlightMsgs( const COMCORE::CPubSubClientTopic::TPublishActionIdVector& publishActionIds ,
const COMCORE::CPubSubClientTopic::TIPubSubMsgSPtrVector& msgs )
{GUCEF_TRACE;
size_t max = SMALLEST( publishActionIds.size(), msgs.size() );
for ( size_t i=0; i<max; ++i )
inFlightMsgs[ publishActionIds[ i ] ] = msgs[ i ];
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::TopicLink::AddInFlightMsgs( const COMCORE::CPubSubClientTopic::TPublishActionIdVector& publishActionIds ,
const COMCORE::CPubSubClientTopic::TPubSubMsgsRefVector& msgs )
{GUCEF_TRACE;
size_t max = SMALLEST( publishActionIds.size(), msgs.size() );
for ( size_t i=0; i<max; ++i )
{
COMCORE::CIPubSubMsg::TNoLockSharedPtr ptr( static_cast< COMCORE::CIPubSubMsg* >( msgs[ i ]->Clone() ) );
inFlightMsgs[ publishActionIds[ i ] ] = ptr;
}
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::RegisterEventHandlers( void )
{GUCEF_TRACE;
TEventCallback callback( this, &CPubSubClientChannel::OnMetricsTimerCycle );
SubscribeTo( m_metricsTimer ,
CORE::CTimer::TimerUpdateEvent ,
callback );
if ( GUCEF_NULL != m_pubsubClientReconnectTimer )
{
TEventCallback callback( this, &CPubSubClientChannel::OnPubSubClientReconnectTimerCycle );
SubscribeTo( m_pubsubClientReconnectTimer ,
CORE::CTimer::TimerUpdateEvent ,
callback );
}
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::RegisterTopicEventHandlers( COMCORE::CPubSubClientTopic& topic )
{GUCEF_TRACE;
if ( m_channelSettings.mode == TChannelMode::CHANNELMODE_PUBSUB_TO_STORAGE )
{
TEventCallback callback( this, &CPubSubClientChannel::OnPubSubTopicMsgsReceived );
SubscribeTo( &topic ,
COMCORE::CPubSubClientTopic::MsgsRecievedEvent ,
callback );
}
}
/*-------------------------------------------------------------------------*/
CORE::CString
CPubSubClientChannel::GetType( void ) const
{GUCEF_TRACE;
return "PubSubClientChannel";
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnMetricsTimerCycle( CORE::CNotifier* notifier ,
const CORE::CEvent& eventId ,
CORE::CICloneable* eventData )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnPubSubClientReconnectTimerCycle( CORE::CNotifier* notifier ,
const CORE::CEvent& eventId ,
CORE::CICloneable* eventData )
{GUCEF_TRACE;
// stop the timer, reconnect time itself should not count towards the reconnect interval
m_pubsubClientReconnectTimer->SetEnabled( false );
// Since the client does not support reconnects we will destructively reconnnect
// Meaning we wipe out any previous client as we cannot rely on the client implementation
// properly handling the state transition
if ( DisconnectPubSubClient( true ) )
{
if ( ConnectPubSubClient() )
return; // no need to resume the timer
}
// no joy, start the timer again
m_pubsubClientReconnectTimer->SetEnabled( false );
}
/*-------------------------------------------------------------------------*/
CORE::CDynamicBufferSwap&
CPubSubClientChannel::GetSerializedMsgBuffers( void )
{GUCEF_TRACE;
return m_buffers;
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnPubSubTopicMsgsReceived( CORE::CNotifier* notifier ,
const CORE::CEvent& eventId ,
CORE::CICloneable* eventData )
{GUCEF_TRACE;
if ( GUCEF_NULL == eventData )
return;
try
{
const COMCORE::CPubSubClientTopic::TMsgsRecievedEventData& receiveAction = ( *static_cast< COMCORE::CPubSubClientTopic::TMsgsRecievedEventData* >( eventData ) );
if ( !receiveAction.empty() )
{
COMCORE::CPubSubClientTopic::TPubSubMsgsRefVector::const_iterator i = receiveAction.begin();
const CORE::CDateTime& firstMsgDt = (*i)->GetMsgDateTime();
bool firstBlock = m_lastWriteBlockCompletion == CORE::CDateTime::Empty;
if ( firstBlock )
{
m_lastWriteBlockCompletion = CORE::CDateTime::NowUTCDateTime();
}
if ( GUCEF_NULL == m_msgReceiveBuffer )
{
m_msgReceiveBuffer = m_buffers.GetNextWriterBuffer( firstMsgDt, true, GUCEF_MT_INFINITE_LOCK_TIMEOUT );
CORE::UInt32 newBytesWritten = 0;
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::SerializeHeader( m_channelSettings.pubsubBinarySerializerOptions, 0, *m_msgReceiveBuffer, newBytesWritten ) )
{
// We carry on best effort but this is really bad
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnPubSubTopicMsgsReceived: Failed to write container header at start of new pub-sub msg container" );
}
m_msgOffsetIndex.clear();
}
CORE::UInt32 bufferOffset = m_msgReceiveBuffer->GetDataSize();
while ( i != receiveAction.end() )
{
CORE::UInt32 ticks = CORE::GUCEFGetTickCount();
CORE::UInt32 msgBytesWritten = 0;
if ( COMCORE::CPubSubMsgBinarySerializer::Serialize( m_channelSettings.pubsubBinarySerializerOptions, *(*i), bufferOffset, *m_msgReceiveBuffer, msgBytesWritten ) )
{
m_msgOffsetIndex.push_back( bufferOffset );
ticks = CORE::GUCEFGetTickCount() - ticks;
bufferOffset += msgBytesWritten;
m_msgReceiveBuffer->SetDataSize( bufferOffset );
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_BELOW_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnPubSubTopicMsgsReceived: Serialized a message with serialized size " + CORE::ToString( msgBytesWritten ) + ". This took " + CORE::ToString( ticks ) + "ms" );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_BELOW_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnPubSubTopicMsgsReceived: Failed to serialize a message" );
}
++i;
}
// Check to see if we have gathered enough data or enough time has passed to consider the current container complete
if ( m_msgReceiveBuffer->GetDataSize() >= m_channelSettings.desiredMinimalSerializedBlockSize ||
( !firstBlock && m_lastWriteBlockCompletion.GetTimeDifferenceInMillisecondsToNow() >= m_channelSettings.desiredMaxTimeToWaitToGrowSerializedBlockSizeInMs ) )
{
<|fim▁hole|> // Let's wrap things up...
CORE::UInt32 newBytesWritten = 0;
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::SerializeFooter( m_msgOffsetIndex, bufferOffset, *m_msgReceiveBuffer, newBytesWritten ) )
{
// We carry on best effort but this is really bad
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnPubSubTopicMsgsReceived: Failed to write container footer at end of current pub-sub msg container" );
}
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnPubSubTopicMsgsReceived: Completed a serialized msg data block of size " + CORE::ToString( bufferOffset ) );
m_msgReceiveBuffer = GUCEF_NULL;
m_lastWriteBlockCompletion = CORE::CDateTime::NowUTCDateTime();
}
}
}
catch ( const std::exception& e )
{
GUCEF_EXCEPTION_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) + "):OnPubSubTopicMsgsReceived: exception: " + CORE::CString( e.what() ) );
}
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::DisconnectPubSubClient( bool destroyClient )
{GUCEF_TRACE;
if ( m_pubsubClient.IsNULL() )
return true;
if ( !m_pubsubClient->Disconnect() )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Failed to disconnect the pub-sub client" );
return false;
}
COMCORE::CPubSubClientFeatures clientFeatures;
m_pubsubClient->GetSupportedFeatures( clientFeatures );
if ( destroyClient || !clientFeatures.supportsAutoReconnect )
{
m_pubsubClient.Unlink();
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::ConnectPubSubClient( void )
{GUCEF_TRACE;
if ( !DisconnectPubSubClient() )
return false;
if ( m_pubsubClient.IsNULL() )
{
// Create and configure the pub-sub client
m_channelSettings.pubsubClientConfig.pulseGenerator = GetPulseGenerator();
m_pubsubClient = COMCORE::CComCoreGlobal::Instance()->GetPubSubClientFactory().Create( m_channelSettings.pubsubClientConfig.pubsubClientType, m_channelSettings.pubsubClientConfig );
if ( m_pubsubClient.IsNULL() )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnTaskStart: Failed to create a pub-sub client of type \"" + m_channelSettings.pubsubClientConfig.pubsubClientType + "\". Cannot proceed" );
return false;
}
}
COMCORE::CPubSubClientFeatures clientFeatures;
m_pubsubClient->GetSupportedFeatures( clientFeatures );
if ( !clientFeatures.supportsAutoReconnect )
{
if ( GUCEF_NULL != m_pubsubClientReconnectTimer )
m_pubsubClientReconnectTimer = new CORE::CTimer( *GetPulseGenerator(), m_channelSettings.pubsubClientConfig.reconnectDelayInMs );
}
SubscribeTo( m_pubsubClient.GetPointerAlways() );
if ( !m_pubsubClient->Connect() )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Failed to connect the pub-sub client" );
return false;
}
// Create and configure the pub-sub client's topics
m_topics.clear();
m_topics.reserve( m_channelSettings.pubsubClientConfig.topics.size() );
ChannelSettings::TTopicConfigVector::iterator i = m_channelSettings.pubsubClientConfig.topics.begin();
while ( i != m_channelSettings.pubsubClientConfig.topics.end() )
{
COMCORE::CPubSubClientTopic* topic = m_pubsubClient->CreateTopicAccess( (*i) );
if ( GUCEF_NULL == topic )
{
if ( !(*i).isOptional )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Failed to create a pub-sub client topic access for topic \"" + (*i).topicName + "\". Cannot proceed" );
return false;
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Unable to create a pub-sub client topic access for optional topic \"" + (*i).topicName + "\". Proceeding" );
}
}
RegisterTopicEventHandlers( *topic );
m_topics.push_back( TopicLink( topic ) );
++i;
}
TopicVector::iterator t = m_topics.begin();
while ( t != m_topics.end() )
{
TopicLink& topicLink = (*t);
COMCORE::CPubSubClientTopic* topic = topicLink.topic;
if ( topic->InitializeConnectivity() )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Successfully requested connectivity initialization for topic \"" + topic->GetTopicName() + "\". Proceeding" );
// We use the 'desired' feature to also drive whether we actually subscribe at this point
// saves us an extra setting
COMCORE::CPubSubClientTopicConfig* topicConfig = m_channelSettings.GetTopicConfig( topic->GetTopicName() );
if ( GUCEF_NULL != topicConfig && topicConfig->needSubscribeSupport )
{
// The method of subscription depends on the supported feature set
bool subscribeSuccess = false;
if ( !clientFeatures.supportsBookmarkingConcept ) // We have no control bookmark wise with this backend, just subscribe and hope for the best
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Bookmarking concept is not supported by the backend, we will attempt to subscribe as-is" );
subscribeSuccess = topic->Subscribe();
}
else
if ( clientFeatures.supportsServerSideBookmarkPersistance ) // first preference is always backend managed bookmarking if available
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Bookmarking concept is natively supported and managed by the backend independently and we will attempt to subscribe as such" );
subscribeSuccess = topic->Subscribe();
}
else
{
// bookmarks are supported but they rely on client-side persistance
// we will need to obtain said bookmark
COMCORE::CPubSubBookmark bookmark;
if ( !m_persistance->GetPersistedBookmark( m_channelSettings.channelId, topic->GetTopicName(), bookmark ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Bookmarking concept is supported by the backend via a client-side message index marker but we failed at obtaining the last used message index" );
if ( m_channelSettings.subscribeWithoutBookmarkIfNoneIsPersisted )
{
subscribeSuccess = topic->Subscribe();
if ( !subscribeSuccess )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "CPubSubClientSide(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Also unable to subscribe using the default bookmark as a fallback" );
return false;
}
}
else
return false;
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "CPubSubClientSide(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Bookmarking concept is supported by the backend via a client-side bookmark. Bookmark type=" + CORE::ToString( bookmark.GetBookmarkType() ) + ". Bookmark=" + bookmark.GetBookmarkData().AsString() );
subscribeSuccess = topic->SubscribeStartingAtBookmark( bookmark );
}
}
if ( !subscribeSuccess )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):ConnectPubSubClient: Failed to subscribe to topic: " + topic->GetTopicName() );
return false;
}
}
}
++t;
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::OnTaskStart( CORE::CICloneable* taskData )
{GUCEF_TRACE;
m_metricsTimer = new CORE::CTimer( *GetPulseGenerator(), 1000 );
m_metricsTimer->SetEnabled( m_channelSettings.pubsubClientConfig.desiredFeatures.supportsMetrics );
m_buffers.SetMinimalBufferSize( m_channelSettings.desiredMinimalSerializedBlockSize );
m_msgOffsetIndex.clear();
m_msgOffsetIndex.reserve( 1000 );
if ( m_channelSettings.performPubSubInDedicatedThread )
{
// Set the minimum number of cycles we will go full speed if a single cycle was not enough to handle
// all the processing. This will cause a bypass of CPU yielding if/when the situation arises.
// In such a case the thread will run at max speed for a least the below set nr of cycles.
GetPulseGenerator()->RequestPulsesPerImmediatePulseRequest( m_channelSettings.ticketRefillOnBusyCycle );
// Default smallest pulse delta at 25ms
GetPulseGenerator()->RequestPeriodicPulses( this, 25 );
if ( m_channelSettings.applyThreadCpuAffinity )
{
if ( SetCpuAffinityByCpuId( m_channelSettings.cpuAffinityForDedicatedPubSubThread ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnTaskStart: Successfully set a CPU affinity for logical CPU " + CORE::UInt32ToString( m_channelSettings.cpuAffinityForDedicatedPubSubThread ) );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnTaskStart: Failed to set a CPU affinity for logical CPU " + CORE::UInt32ToString( m_channelSettings.cpuAffinityForDedicatedPubSubThread ) +
". Proceeding without affinity");
}
}
}
if ( !ConnectPubSubClient() )
{
GUCEF_WARNING_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):OnTaskStart: Failed initial connection attempt on task start, will rely on auto-reconnect" );
}
RegisterEventHandlers();
return true;
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnStoredPubSubMsgTransmissionFailure( const CORE::CDateTime& firstMsgDt )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::TransmitNextPubSubMsgBuffer( void )
{GUCEF_TRACE;
CORE::CDateTime firstMsgDt;
m_msgReceiveBuffer = m_buffers.GetNextReaderBuffer( firstMsgDt, true, 25 );
if ( GUCEF_NULL == m_msgReceiveBuffer )
return true; // nothing to do
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: New buffer is available of " + CORE::ToString( m_msgReceiveBuffer->GetDataSize() ) + " bytes" );
CORE::UInt32 bytesRead = 0;
COMCORE::CPubSubMsgContainerBinarySerializer::TMsgOffsetIndex originalOffsetIndex;
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::DeserializeFooter( originalOffsetIndex, *m_msgReceiveBuffer, bytesRead ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: Failed to read container footer" );
OnStoredPubSubMsgTransmissionFailure( firstMsgDt );
return false;
}
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: Per footer the buffer contains " + CORE::ToString( originalOffsetIndex.size() ) + " messages to publish" );
// We now link logical message objects to the data in the buffer
CORE::UInt32 startIndexOffset = 0;
CORE::UInt32 endIndexOffset = 0;
bool isCorrupted = false;
COMCORE::CPubSubMsgContainerBinarySerializer::TBasicPubSubMsgVector msgs;
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::Deserialize( msgs, true, originalOffsetIndex, *m_msgReceiveBuffer, isCorrupted ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: Failed to deserialize messages from container. According to the footer the container had " +
CORE::ToString( originalOffsetIndex.size() ) + " entries. isCorrupted=" + CORE::BoolToString( isCorrupted ) );
OnStoredPubSubMsgTransmissionFailure( firstMsgDt );
return false;
}
// We now have the messages in a format that allows interpretation by the pub-sub backend
// We can now proceed with publishing all the messages to the relevant topics
CORE::UInt32 topicsToPublishOn = 0;
CORE::UInt32 topicsPublishedOn = 0;
bool publishSuccess = true;
TopicVector::iterator i = m_topics.begin();
while ( i != m_topics.end() )
{
TopicLink& topicLink = (*i);
COMCORE::CPubSubClientTopic* topic = topicLink.topic;
if ( GUCEF_NULL != topic )
{
if ( topic->IsPublishingSupported() )
{
++topicsToPublishOn;
if ( topic->Publish( topicLink.currentPublishActionIds, msgs, true ) )
{
++topicsPublishedOn;
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: Failed to publish messages to topic" );
OnStoredPubSubMsgTransmissionFailure( firstMsgDt );
publishSuccess = false;
}
}
}
++i;
}
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "PubSubClientChannel(" + CORE::PointerToString( this ) +
"):TransmitNextPubSubMsgBuffer: Successfully published messages to " + CORE::ToString( topicsPublishedOn ) + " topics, " +
CORE::ToString( topicsToPublishOn ) + " topics available for publishing" );
return publishSuccess;
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::OnTaskCycle( CORE::CICloneable* taskData )
{GUCEF_TRACE;
if ( m_channelSettings.mode == TChannelMode::CHANNELMODE_STORAGE_TO_PUBSUB )
{
TransmitNextPubSubMsgBuffer();
}
// We are never 'done' so return false
return false;
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnTaskEnding( CORE::CICloneable* taskdata ,
bool willBeForced )
{GUCEF_TRACE;
m_buffers.SignalEndOfWriting();
}
/*-------------------------------------------------------------------------*/
void
CPubSubClientChannel::OnTaskEnded( CORE::CICloneable* taskData ,
bool wasForced )
{GUCEF_TRACE;
delete m_metricsTimer;
m_metricsTimer = GUCEF_NULL;
delete m_pubsubClientReconnectTimer;
m_pubsubClientReconnectTimer = GUCEF_NULL;
CORE::CTaskConsumer::OnTaskEnded( taskData, wasForced );
}
/*-------------------------------------------------------------------------*/
bool
CPubSubClientChannel::LoadConfig( const ChannelSettings& channelSettings )
{GUCEF_TRACE;
m_channelSettings = channelSettings;
return true;
}
/*-------------------------------------------------------------------------*/
const ChannelSettings&
CPubSubClientChannel::GetChannelSettings( void ) const
{GUCEF_TRACE;
return m_channelSettings;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::CStorageChannel()
: CORE::CTaskConsumer()
, m_channelSettings()
, m_metricsTimer( GUCEF_NULL )
, m_metrics()
, m_pubsubClient( new CPubSubClientChannel( this ) )
, m_msgReceiveBuffer( GUCEF_NULL )
, m_vfsFilePostfix( ".vUNKNOWN.bin" )
, m_lastPersistedMsgId()
, m_lastPersistedMsgDt()
, m_encodeSizeRatio( -1 )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::CStorageChannel( const CStorageChannel& src )
: CORE::CTaskConsumer()
, m_channelSettings( src.m_channelSettings )
, m_metricsTimer( GUCEF_NULL )
, m_metrics()
, m_pubsubClient( new CPubSubClientChannel( this ) )
, m_vfsFilePostfix( src.m_vfsFilePostfix )
, m_lastPersistedMsgId()
, m_lastPersistedMsgDt()
, m_encodeSizeRatio( src.m_encodeSizeRatio )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::~CStorageChannel()
{GUCEF_TRACE;
delete m_metricsTimer;
m_metricsTimer = GUCEF_NULL;
}
/*-------------------------------------------------------------------------*/
void
CStorageChannel::RegisterEventHandlers( void )
{GUCEF_TRACE;
TEventCallback callback6( this, &CStorageChannel::OnMetricsTimerCycle );
SubscribeTo( m_metricsTimer ,
CORE::CTimer::TimerUpdateEvent ,
callback6 );
}
/*-------------------------------------------------------------------------*/
CStorageChannel::StorageToPubSubRequest::StorageToPubSubRequest( void )
: startDt()
, endDt()
, vfsPubSubMsgContainersToPush()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::StorageToPubSubRequest::StorageToPubSubRequest( const CORE::CDateTime& startDt, const CORE::CDateTime& endDt )
: startDt( startDt )
, endDt( endDt )
, vfsPubSubMsgContainersToPush()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::StorageToPubSubRequest::StorageToPubSubRequest( const StorageToPubSubRequest& src )
: startDt( src.startDt )
, endDt( src.endDt )
, vfsPubSubMsgContainersToPush( src.vfsPubSubMsgContainersToPush )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
const CORE::CString&
CStorageChannel::StorageToPubSubRequest::GetClassTypeName( void ) const
{GUCEF_TRACE;
static const CORE::CString classTypeName = "StorageToPubSubRequest";
return classTypeName;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::StorageToPubSubRequest::SaveConfig( CORE::CDataNode & tree ) const
{
return false;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::StorageToPubSubRequest::LoadConfig( const CORE::CDataNode & treeroot )
{
return false;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::AddStorageToPubSubRequest( const StorageToPubSubRequest& request )
{GUCEF_TRACE;
m_storageToPubSubRequests.push_back( request );
return true;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::LoadConfig( const ChannelSettings& channelSettings )
{GUCEF_TRACE;
m_channelSettings = channelSettings;
m_channelSettings.vfsStorageRootPath = CORE::ResolveVars( m_channelSettings.vfsStorageRootPath ).ReplaceSubstr( "{channelId}", CORE::ToString( m_channelSettings.channelId ) );
if ( m_channelSettings.vfsFileExtention.IsNULLOrEmpty() )
{
if ( m_channelSettings.encodeCodecFamily.IsNULLOrEmpty() || m_channelSettings.encodeCodecName.IsNULLOrEmpty() )
m_channelSettings.vfsFileExtention = "bin";
else
{
if ( "deflate" == m_channelSettings.encodeCodecName )
m_channelSettings.vfsFileExtention = "bin.gz";
else
m_channelSettings.vfsFileExtention = "bin.encoded";
}
}
// the encoder and decoder almost always belong to the same codec family so we can make that the default
if ( m_channelSettings.decodeCodecFamily.IsNULLOrEmpty() )
{
m_channelSettings.decodeCodecFamily = m_channelSettings.encodeCodecFamily;
}
if ( m_channelSettings.decodeCodecName.IsNULLOrEmpty() )
{
if ( "deflate" == m_channelSettings.encodeCodecName )
m_channelSettings.decodeCodecName = "inflate";
}
m_vfsFilePostfix = ".v" + CORE::ToString( COMCORE::CPubSubMsgContainerBinarySerializer::CurrentFormatVersion ) + '.' + m_channelSettings.vfsFileExtention;
return m_pubsubClient->LoadConfig( channelSettings );
}
/*-------------------------------------------------------------------------*/
const ChannelSettings&
CStorageChannel::GetChannelSettings( void ) const
{GUCEF_TRACE;
return m_channelSettings;
}
/*-------------------------------------------------------------------------*/
CORE::CString
CStorageChannel::GetType( void ) const
{GUCEF_TRACE;
return "StorageChannel";
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::WaitForTaskToFinish( CORE::Int32 timeoutInMs )
{GUCEF_TRACE;
// Overriding the base class implementation because this consumer can start its own
// consumer based on settings transparent to the caller.
if ( CTaskConsumer::WaitForTaskToFinish( timeoutInMs ) )
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "StorageChannel:WaitForTaskToFinish: Successfully waited for channel " + CORE::Int32ToString( m_channelSettings.channelId ) + "'s task to stop" );
if ( m_channelSettings.performPubSubInDedicatedThread )
{
if ( m_pubsubClient->WaitForTaskToFinish( timeoutInMs ) )
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "StorageChannel:WaitForTaskToFinish: Successfully waited for channel " + CORE::Int32ToString( m_channelSettings.channelId ) + "'s dedicated pub sub task to stop" );
return true;
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "StorageChannel:WaitForTaskToFinish: Failed waiting for dedicated pub sub task to stop for channel " + CORE::Int32ToString( m_channelSettings.channelId ) );
}
}
return true;
}
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "StorageChannel:WaitForTaskToFinish: Failed waiting for task to stop for channel " + CORE::Int32ToString( m_channelSettings.channelId ) );
return false;
}
/*-------------------------------------------------------------------------*/
CStorageChannel::ChannelMetrics::ChannelMetrics( void )
//: udpBytesReceived( 0 )
//, udpPacketsReceived( 0 )
//, redisMessagesTransmitted( 0 )
//, redisPacketsInMsgsTransmitted( 0 )
//, redisPacketsInMsgsRatio( 0 )
//, redisTransmitQueueSize( 0 )
//, redisErrorReplies( 0 )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
void
CStorageChannel::OnMetricsTimerCycle( CORE::CNotifier* notifier ,
const CORE::CEvent& eventId ,
CORE::CICloneable* eventData )
{GUCEF_TRACE;
//m_metrics.udpBytesReceived = m_udpSocket->GetBytesReceived( true );
//m_metrics.udpPacketsReceived = m_udpSocket->GetNrOfDataReceivedEvents( true );
//m_metrics.redisTransmitQueueSize = m_redisWriter->GetRedisTransmitQueueSize();
//m_metrics.redisMessagesTransmitted = m_redisWriter->GetRedisMsgsTransmittedCounter( true );
//m_metrics.redisPacketsInMsgsTransmitted = m_redisWriter->GetRedisPacketsInMsgsTransmittedCounter( true );
//m_metrics.redisPacketsInMsgsRatio = m_redisWriter->GetRedisPacketsInMsgsRatio();
//m_metrics.redisErrorReplies = m_redisWriter->GetRedisErrorRepliesCounter( true );
}
/*-------------------------------------------------------------------------*/
const CStorageChannel::ChannelMetrics&
CStorageChannel::GetMetrics( void ) const
{GUCEF_TRACE;
return m_metrics;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::OnTaskStart( CORE::CICloneable* taskData )
{GUCEF_TRACE;
if ( m_channelSettings.performPubSubInDedicatedThread )
{
CORE::ThreadPoolPtr threadPool = CORE::CCoreGlobal::Instance()->GetTaskManager().GetThreadPool();
if ( !threadPool->StartTask( m_pubsubClient ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:OnTaskStart: Failed to start dedicated task (dedicated thread) for pub-sub. Falling back to a single thread" );
m_channelSettings.performPubSubInDedicatedThread = false;
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:OnTaskStart: Successfully requested the launch of a dedicated task (dedicated thread) for pub-sub" );
}
}
if ( !m_channelSettings.performPubSubInDedicatedThread )
{
if ( !m_pubsubClient->OnTaskStart( taskData ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:OnTaskStart: Failed startup of pub-sub client logic" );
return false;
}
}
if ( ( m_channelSettings.mode == TChannelMode::CHANNELMODE_STORAGE_TO_PUBSUB ) && ( m_channelSettings.autoPushAfterStartupIfStorageToPubSub ) )
{
AddStorageToPubSubRequest( StorageToPubSubRequest( m_channelSettings.oldestStoragePubSubMsgFileToLoad, m_channelSettings.youngestStoragePubSubMsgFileToLoad ) );
}
return true;
}
/*-------------------------------------------------------------------------*/
CORE::CString
CStorageChannel::GetPathToLastWrittenPubSubStorageFile( CORE::UInt32 lastOffset ) const
{GUCEF_TRACE;
VFS::CVFS& vfs = VFS::CVfsGlobal::Instance()->GetVfs();
CORE::CString fileFilter = '*' + m_vfsFilePostfix;
VFS::CVFS::TStringSet index;
vfs.GetList( index, m_channelSettings.vfsStorageRootPath, false, true, fileFilter, true, false );
// The index is already alphabetically ordered and since we use the datetime as the part of filename we can leverage that
// to get the last produced file
if ( !index.empty() )
{
VFS::CVFS::TStringSet::reverse_iterator f = index.rbegin();
CORE::UInt32 n=0;
while ( n<lastOffset && f != index.rend() )
{
++f; ++n;
}
if ( f != index.rend() )
{
const CORE::CString& lastFilename = (*f);
return lastFilename;
}
}
return CORE::CString::Empty;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::GetLastPersistedMsgAttributes( CORE::Int32 channelId ,
const CORE::CString& topicName ,
CORE::CVariant& msgId ,
CORE::CDateTime& msgDt )
{GUCEF_TRACE;
bool success = true;
CORE::UInt32 lastFileOffset = 0;
bool fileExistedButHasIssue = false;
do
{
success = GetLastPersistedMsgAttributesWithOffset( channelId ,
topicName ,
msgId ,
msgDt ,
lastFileOffset ,
fileExistedButHasIssue );
++lastFileOffset;
}
while ( !success && fileExistedButHasIssue );
return success;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::GetPersistedBookmark( CORE::Int32 channelId ,
const CORE::CString& topicName ,
COMCORE::CPubSubBookmark& bookmark )
{GUCEF_TRACE;
// @TODO: Update to use dedicated bookmark persistance
CORE::CVariant msgId;
CORE::CDateTime msgDt;
if ( GetLastPersistedMsgAttributes( channelId, topicName, msgId, msgDt ) )
{
if ( msgId.IsInitialized() )
{
bookmark.SetBookmarkData( msgId );
bookmark.SetBookmarkType( COMCORE::CPubSubBookmark::BOOKMARK_TYPE_MSG_ID );
}
else
{
CORE::CVariant dtStrVar = msgDt.ToIso8601DateTimeString( true, true );
bookmark.SetBookmarkData( dtStrVar );
bookmark.SetBookmarkType( COMCORE::CPubSubBookmark::BOOKMARK_TYPE_MSG_DATETIME );
}
return true;
}
bookmark.SetBookmarkType( COMCORE::CPubSubBookmark::BOOKMARK_TYPE_NOT_AVAILABLE );
return false;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::LoadStorageFile( const CORE::CString& vfsPath ,
CORE::CDynamicBuffer& targetBuffer )
{GUCEF_TRACE;
VFS::CVFS& vfs = VFS::CVfsGlobal::Instance()->GetVfs();
if ( !m_channelSettings.decodeCodecFamily.IsNULLOrEmpty() && !m_channelSettings.decodeCodecName.IsNULLOrEmpty() )
{
CORE::Float32 encodeRatio = m_encodeSizeRatio < 0 ? GUCEF_DEFAULT_DECODE_GROWTH_RATIO_EXPECTATION : m_encodeSizeRatio;
CORE::UInt32 estimatedApproxDecodedSize = (CORE::UInt32) ( vfs.GetFileSize( vfsPath ) * encodeRatio );
targetBuffer.SetBufferSize( estimatedApproxDecodedSize, false );
if ( !vfs.DecodeAsFile( targetBuffer, 0, vfsPath, m_channelSettings.decodeCodecFamily, m_channelSettings.decodeCodecName ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:LoadStorageFile: Cannot decode and load persisted file. CodeFamily:" + m_channelSettings.decodeCodecFamily +
" CodecName: " + m_channelSettings.decodeCodecName + ". VFS File: " + vfsPath );
return false;
}
if ( targetBuffer.GetDataSize() > 0 )
m_encodeSizeRatio = (CORE::Float32) ( targetBuffer.GetDataSize() / vfs.GetFileSize( vfsPath ) );
}
else
{
// Not using any encoding, load the file as-is
if ( !vfs.LoadFile( targetBuffer, vfsPath, "rb" ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:LoadStorageFile: Cannot load last persisted file. VFS File: " + vfsPath );
return false;
}
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::GetLastPersistedMsgAttributesWithOffset( CORE::Int32 channelId ,
const CORE::CString& topicName ,
CORE::CVariant& msgId ,
CORE::CDateTime& msgDt ,
CORE::UInt32 lastFileOffset ,
bool& fileExistedButHasIssue )
{GUCEF_TRACE;
// @TODO: topic name segregation
if ( channelId != m_channelSettings.channelId )
{
fileExistedButHasIssue = false;
return false; // this should never happen
}
if ( m_lastPersistedMsgId.IsNULLOrEmpty() && m_lastPersistedMsgDt == CORE::CDateTime::Empty )
{
CORE::CString lastWrittenFilePath = GetPathToLastWrittenPubSubStorageFile( lastFileOffset );
if ( lastWrittenFilePath.IsNULLOrEmpty() )
{
fileExistedButHasIssue = false;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastWrittenPubSubMsgId: Cannot obtain path to last written file with offset " + CORE::ToString( lastFileOffset ) );
return false;
}
CORE::CDynamicBuffer lastStorageFileContent;
if ( !LoadStorageFile( lastWrittenFilePath, lastStorageFileContent ) )
{
fileExistedButHasIssue = false;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastWrittenPubSubMsgId: Unable to load file from storage. Loading using last offset " + CORE::ToString( lastFileOffset ) );
return false;
}
if ( 0 == lastStorageFileContent.GetDataSize() )
{
fileExistedButHasIssue = true;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: last persisted file is empty. VFS File: " + lastWrittenFilePath );
return false;
}
bool isCorrupted = false;
COMCORE::CBasicPubSubMsg msg;
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::DeserializeMsgAtIndex( msg, true, lastStorageFileContent, 0, false, isCorrupted ) )
{
if ( isCorrupted )
{
// Attempt to recover what we can with an index rebuild
// This could effectively move the "last" message received to the actually non-corrupt persisted message as the new "last"
GUCEF_WARNING_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: Failed to deserialize the last message, will attempt an index rebuild of the corrupt container" );
CORE::UInt32 bytesRead = 0;
COMCORE::CPubSubMsgContainerBinarySerializer::TMsgOffsetIndex newRecoveredIndex;
if ( COMCORE::CPubSubMsgContainerBinarySerializer::IndexRebuildScan( newRecoveredIndex, lastStorageFileContent, bytesRead ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: Successfully performed an index rebuild of the corrupt container, discovered " + CORE::ToString( newRecoveredIndex.size() ) + " messages. Will attempt to add a new footer" );
CORE::UInt32 bytesWritten = 0;
if ( COMCORE::CPubSubMsgContainerBinarySerializer::SerializeFooter( newRecoveredIndex, lastStorageFileContent.GetDataSize()-1, lastStorageFileContent, bytesWritten ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: Successfully serialized a new footer to the previously corrupt container. Will attempt to persist the amended container" );
VFS::CVFS& vfs = VFS::CVfsGlobal::Instance()->GetVfs();
if ( m_channelSettings.encodeCodecFamily.IsNULLOrEmpty() || m_channelSettings.encodeCodecName.IsNULLOrEmpty() )
{
if ( vfs.StoreAsFile( lastWrittenFilePath, lastStorageFileContent, 0, true ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: Successfully stored rebuild pub-sub message container at: " + lastWrittenFilePath );
}
else
{
fileExistedButHasIssue = true;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:GetLastPersistedMsgAttributes: StoreAsFile() Failed for rebuild message container" );
return false;
}
}
else
{
if ( vfs.EncodeAsFile( lastStorageFileContent, 0, lastWrittenFilePath, true, m_channelSettings.encodeCodecFamily, m_channelSettings.encodeCodecName ) )
{
m_encodeSizeRatio = (CORE::Float32) ( lastStorageFileContent.GetDataSize() / vfs.GetFileSize( lastWrittenFilePath ) );
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:GetLastPersistedMsgAttributes: Successfully encoded and stored rebuild pub-sub message container resource at: \"" + lastWrittenFilePath + "\" with a encoded size ratio of " + CORE::ToString( m_encodeSizeRatio ) );
}
else
{
fileExistedButHasIssue = true;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:GetLastPersistedMsgAttributes: EncodeAsFile() Failed for rebuild message container" );
return false;
}
}
}
}
// Lets try again, hopefully its fixed now best effort...
if ( !COMCORE::CPubSubMsgContainerBinarySerializer::DeserializeMsgAtIndex( msg, true, lastStorageFileContent, 0, false, isCorrupted ) )
{
// This should not happen, something is seriously wrong here.
fileExistedButHasIssue = true;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:GetLastPersistedMsgAttributes: Failed to load last message even after a successfull rebuild. isCorrupted=" + CORE::ToString( isCorrupted ) );
return false;
}
}
}
m_lastPersistedMsgId = msg.GetMsgId();
m_lastPersistedMsgDt = msg.GetMsgDateTime();
}
msgId = m_lastPersistedMsgId;
msgDt = m_lastPersistedMsgDt;
return true;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::GetStartAndEndFromContainerFilename( const CORE::CString& fullPath ,
CORE::CDateTime& startDt ,
CORE::CDateTime& endDt ) const
{GUCEF_TRACE;
// first strip the extra stuff from the full path to get the string form timestamps
CORE::CString segment = CORE::ExtractFilename( fullPath );
segment = segment.CutChars( m_vfsFilePostfix.Length(), false, 0 );
CORE::CString startDtSegment = segment.SubstrToChar( '_', true );
CORE::CString endDtSegment = segment.SubstrToChar( '_', false );
// Try to parse what is left as a valid ISO 8601 DateTime
if ( startDt.FromIso8601DateTimeString( startDtSegment ) && endDt.FromIso8601DateTimeString( endDtSegment ) )
return true;
return false;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::GetPathsToPubSubStorageFiles( const CORE::CDateTime& startDt ,
const CORE::CDateTime& endDt ,
CORE::CString::StringSet& files ) const
{GUCEF_TRACE;
VFS::CVFS& vfs = VFS::CVfsGlobal::Instance()->GetVfs();
CORE::CString fileFilter = '*' + m_vfsFilePostfix;
VFS::CVFS::TStringSet index;
vfs.GetList( index, m_channelSettings.vfsStorageRootPath, false, true, fileFilter, true, false );
VFS::CVFS::TStringSet::iterator i = index.begin();
while ( i != index.end() )
{
CORE::CDateTime containerFileFirstMsgDt;
CORE::CDateTime containerFileLastMsgDt;
if ( GetStartAndEndFromContainerFilename( (*i), containerFileFirstMsgDt, containerFileLastMsgDt ) )
{
// Check the container first messgage dt against the our time range
// It is assumed here that the containers have messages chronologically ordered
if ( containerFileFirstMsgDt.IsWithinRange( startDt, endDt ) || containerFileLastMsgDt.IsWithinRange( startDt, endDt ) )
{
files.insert( (*i) );
}
}
++i;
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::StoreNextReceivedPubSubBuffer( void )
{GUCEF_TRACE;
CORE::CDynamicBufferSwap& buffers = m_pubsubClient->GetSerializedMsgBuffers();
CORE::CDateTime msgBatchDt;
m_msgReceiveBuffer = buffers.GetNextReaderBuffer( msgBatchDt, false, 25 );
if ( GUCEF_NULL != m_msgReceiveBuffer )
{
// Get the timestamp of the last message in the buffer.
// This is not as expensive an operation as it would appear because we just link to the bytes in the buffer we dont copy them
bool isCorrupted = false;
COMCORE::CBasicPubSubMsg lastMsg;
COMCORE::CPubSubMsgContainerBinarySerializer::DeserializeMsgAtIndex( lastMsg, true, *m_msgReceiveBuffer, 0, false, isCorrupted );
CORE::CString vfsFilename = msgBatchDt.ToIso8601DateTimeString( false, true ) + '_' + lastMsg.GetMsgDateTime().ToIso8601DateTimeString( false, true ) + m_vfsFilePostfix;
CORE::CString vfsStoragePath = CORE::CombinePath( m_channelSettings.vfsStorageRootPath, vfsFilename );
VFS::CVFS& vfs = VFS::CVfsGlobal::Instance()->GetVfs();
if ( m_channelSettings.encodeCodecFamily.IsNULLOrEmpty() || m_channelSettings.encodeCodecName.IsNULLOrEmpty() )
{
if ( vfs.StoreAsFile( vfsStoragePath, *m_msgReceiveBuffer, 0, true ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:OnTaskCycle: Successfully stored pub-sub mesage block at: " + vfsStoragePath );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:OnTaskCycle: StoreAsFile() Failed" );
}
}
else
{
if ( vfs.EncodeAsFile( *m_msgReceiveBuffer, 0, vfsStoragePath, true, m_channelSettings.encodeCodecFamily, m_channelSettings.encodeCodecName ) )
{
m_encodeSizeRatio = (CORE::Float32) ( m_msgReceiveBuffer->GetDataSize() / ( 1.0f * vfs.GetFileSize( vfsStoragePath ) ) );
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:OnTaskCycle: Successfully encoded and stored pub-sub mesage block at: \"" + vfsStoragePath + "\" with a encoded size ratio of " + CORE::ToString( m_encodeSizeRatio ) );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:OnTaskCycle: EncodeAsFile() Failed" );
}
}
}
return true;
}
/*-------------------------------------------------------------------------*/
void
CStorageChannel::OnUnableToFullFillStorageToPubSubRequest( const StorageToPubSubRequest& failedRequest )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::ProcessNextStorageToPubSubRequest( void )
{GUCEF_TRACE;
StorageToPubSubRequestDeque::iterator i = m_storageToPubSubRequests.begin();
if ( i != m_storageToPubSubRequests.end() )
{
StorageToPubSubRequest& queuedRequest = (*i);
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Request for messages in range " +
CORE::ToString( queuedRequest.startDt ) + " to " + CORE::ToString( queuedRequest.endDt ) );
if ( queuedRequest.vfsPubSubMsgContainersToPush.empty() )
{
if ( !GetPathsToPubSubStorageFiles( queuedRequest.startDt ,
queuedRequest.endDt ,
queuedRequest.vfsPubSubMsgContainersToPush ) )
{
GUCEF_WARNING_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Did not obtain any storage paths for time range " +
queuedRequest.startDt.ToIso8601DateTimeString( true, true ) + " to " + queuedRequest.endDt.ToIso8601DateTimeString( true, true ) );
OnUnableToFullFillStorageToPubSubRequest( queuedRequest );
m_storageToPubSubRequests.pop_back();
return false;
}
}
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Available data in the request range spans " +
CORE::ToString( queuedRequest.vfsPubSubMsgContainersToPush.size() ) + " containers" );
size_t containersProcessed = 0;
CORE::CString::StringSet::iterator n = queuedRequest.vfsPubSubMsgContainersToPush.begin();
while ( n != queuedRequest.vfsPubSubMsgContainersToPush.end() )
{
bool needContainerSubsetOnly = false;
bool containerStartIsInRange = true;
bool containerEndIsInRange = true;
CORE::CDateTime containerFileFirstMsgDt;
CORE::CDateTime containerFileLastMsgDt;
if ( GetStartAndEndFromContainerFilename( (*n), containerFileFirstMsgDt, containerFileLastMsgDt ) )
{
containerStartIsInRange = containerFileFirstMsgDt.IsWithinRange( queuedRequest.startDt, queuedRequest.endDt );
containerEndIsInRange = containerFileLastMsgDt.IsWithinRange( queuedRequest.startDt, queuedRequest.endDt );
needContainerSubsetOnly = !( containerStartIsInRange && containerEndIsInRange );
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Parsed file path container start and end DateTimes. Start=" +
CORE::ToString( containerFileFirstMsgDt ) + ", End=" + CORE::ToString( containerFileLastMsgDt ) + ". containerStartIsInRange=" + CORE::ToString( containerStartIsInRange ) +
", containerEndIsInRange=" + CORE::ToString( containerEndIsInRange ) + ", needContainerSubsetOnly=" + CORE::ToString( needContainerSubsetOnly ) );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Failed to parse start and/or end DateTime from file path: " + (*n) );
}
if ( needContainerSubsetOnly )
{
if ( GUCEF_NULL == m_msgReceiveBuffer )
m_msgReceiveBuffer = m_pubsubClient->GetSerializedMsgBuffers().GetNextWriterBuffer( containerStartIsInRange ? containerFileFirstMsgDt : queuedRequest.startDt, true, GUCEF_MT_INFINITE_LOCK_TIMEOUT );
if ( GUCEF_NULL != m_msgReceiveBuffer )
{
if ( LoadStorageFile( (*n), *m_msgReceiveBuffer ) )
{
CORE::UInt32 bytesRead = 0;
COMCORE::CPubSubMsgContainerBinarySerializer::TMsgOffsetIndex originalOffsetIndex;
COMCORE::CPubSubMsgContainerBinarySerializer::DeserializeFooter( originalOffsetIndex, *m_msgReceiveBuffer, bytesRead );
// Since we loaded the entire container we need to now efficiently make sure only the subset gets processed
// The way we can do that is by editing the footer in the buffer to logically eliminate entries we do not need
// This will make it appear as if only the needed entries are in the container to the reader when reading the footer
CORE::UInt32 startIndexOffset = 0;
CORE::UInt32 endIndexOffset = 0;
bool isCorrupted = false;
COMCORE::CPubSubMsgContainerBinarySerializer::TBasicPubSubMsgVector msgs;
if ( COMCORE::CPubSubMsgContainerBinarySerializer::Deserialize( msgs, true, originalOffsetIndex, *m_msgReceiveBuffer, isCorrupted ) )
{
// Check to see how many we need to trim from the start
if ( !containerStartIsInRange )
{
COMCORE::CPubSubMsgContainerBinarySerializer::TBasicPubSubMsgVector::iterator m = msgs.begin();
while ( m != msgs.end() )
{
if ( (*m).GetMsgDateTime() >= queuedRequest.startDt )
break;
++m; ++startIndexOffset;
}
}
if ( !containerEndIsInRange )
{
COMCORE::CPubSubMsgContainerBinarySerializer::TBasicPubSubMsgVector::reverse_iterator m = msgs.rbegin();
while ( m != msgs.rend() )
{
if ( (*m).GetMsgDateTime() <= queuedRequest.endDt )
break;
++m; ++endIndexOffset;
}
}
CORE::UInt32 o2=0;
std::size_t newIndexSize = originalOffsetIndex.size() - ( startIndexOffset + endIndexOffset );
endIndexOffset = (CORE::UInt32) originalOffsetIndex.size() - endIndexOffset;
COMCORE::CPubSubMsgContainerBinarySerializer::TMsgOffsetIndex newOffsetIndex( newIndexSize );
for ( CORE::UInt32 o=startIndexOffset; o<endIndexOffset; ++o )
{
newOffsetIndex[ o2 ] = originalOffsetIndex[ o ];
++o2;
}
// Now we overwrite the footer in the in-memory container to only have the subset of messages we care about referenced
CORE::UInt32 bytesWritten = 0;
if ( COMCORE::CPubSubMsgContainerBinarySerializer::SerializeFooter( newOffsetIndex, m_msgReceiveBuffer->GetDataSize()-1, *m_msgReceiveBuffer, bytesWritten ) )
{
// We are done with this container
++containersProcessed;
m_msgReceiveBuffer = GUCEF_NULL;
}
}
}
}
else
{
// No write buffer available, we need to wait before processing more requests
return false;
}
}
else
{
if ( GUCEF_NULL == m_msgReceiveBuffer )
m_msgReceiveBuffer = m_pubsubClient->GetSerializedMsgBuffers().GetNextWriterBuffer( containerFileFirstMsgDt, true, GUCEF_MT_INFINITE_LOCK_TIMEOUT );
if ( GUCEF_NULL != m_msgReceiveBuffer )
{
GUCEF_DEBUG_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:ProcessNextStorageToPubSubRequest: Loading the entire container as-is to serve (part of) the request" );
if ( LoadStorageFile( (*n), *m_msgReceiveBuffer ) )
{
// Since we loaded the entire container and we dont need a subset we are done
++containersProcessed;
m_msgReceiveBuffer = GUCEF_NULL;
}
}
else
{
// No write buffer available, we need to wait before processing more requests
return false;
}
}
++n;
}
if ( containersProcessed != queuedRequest.vfsPubSubMsgContainersToPush.size() )
{
OnUnableToFullFillStorageToPubSubRequest( queuedRequest );
}
m_storageToPubSubRequests.pop_front();
m_pubsubClient->GetSerializedMsgBuffers().SignalEndOfWriting();
}
return true;
}
/*-------------------------------------------------------------------------*/
bool
CStorageChannel::OnTaskCycle( CORE::CICloneable* taskData )
{GUCEF_TRACE;
if ( !m_channelSettings.performPubSubInDedicatedThread )
{
m_pubsubClient->OnTaskCycle( taskData );
}
switch ( m_channelSettings.mode )
{
case TChannelMode::CHANNELMODE_PUBSUB_TO_STORAGE:
{
StoreNextReceivedPubSubBuffer();
break;
}
case TChannelMode::CHANNELMODE_STORAGE_TO_PUBSUB:
{
ProcessNextStorageToPubSubRequest();
break;
}
}
// We are never 'done' so return false
return false;
}
/*-------------------------------------------------------------------------*/
void
CStorageChannel::OnTaskEnding( CORE::CICloneable* taskdata ,
bool willBeForced )
{GUCEF_TRACE;
if ( !m_channelSettings.performPubSubInDedicatedThread )
{
m_pubsubClient->OnTaskEnding( taskdata, willBeForced );
}
else
{
// Since we are the ones that launched the dedicated Redis write thread we should also ask
// to have it cleaned up when we are shutting down this thread
CORE::ThreadPoolPtr threadPool = CORE::CCoreGlobal::Instance()->GetTaskManager().GetThreadPool();
if ( !threadPool->RequestTaskToStop( m_pubsubClient.StaticCast< CORE::CTaskConsumer >(), false ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "StorageChannel:OnTaskEnding: Failed to request the dedicated task (dedicated thread) for pub-sub to stop" );
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "StorageChannel:OnTaskEnding: Successfully requested the dedicated task (dedicated thread) for pub-sub to stop" );
}
}
}
/*-------------------------------------------------------------------------*/
void
CStorageChannel::OnTaskEnded( CORE::CICloneable* taskData ,
bool wasForced )
{GUCEF_TRACE;
delete m_metricsTimer;
m_metricsTimer = GUCEF_NULL;
if ( !m_channelSettings.performPubSubInDedicatedThread )
{
m_pubsubClient->OnTaskEnded( taskData, wasForced );
}
CORE::CTaskConsumer::OnTaskEnded( taskData, wasForced );
}
/*-------------------------------------------------------------------------*/
RestApiPubSub2StorageInfoResource::RestApiPubSub2StorageInfoResource( PubSub2Storage* app )
: WEB::CCodecBasedHTTPServerResource()
, m_app( app )
{GUCEF_TRACE;
m_allowSerialize = true;
}
/*-------------------------------------------------------------------------*/
RestApiPubSub2StorageInfoResource::~RestApiPubSub2StorageInfoResource()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
bool
RestApiPubSub2StorageInfoResource::Serialize( const CORE::CString& resourcePath ,
CORE::CDataNode& output ,
const CORE::CString& representation ,
const CORE::CString& params )
{GUCEF_TRACE;
output.SetName( "info" );
output.SetAttribute( "application", "pubsub2storage" );
output.SetAttribute( "appBuildDateTime", PubSub2Storage::GetAppCompileDateTime().ToIso8601DateTimeString( true, true ) );
output.SetAttribute( "platformBuildDateTime", CORE::CDateTime::CompileDateTime().ToIso8601DateTimeString( true, true ) );
#ifdef GUCEF_DEBUG_MODE
output.SetAttribute( "isReleaseBuild", "false" );
#else
output.SetAttribute( "isReleaseBuild", "true" );
#endif
return true;
}
/*-------------------------------------------------------------------------*/
RestApiPubSub2StorageConfigResource::RestApiPubSub2StorageConfigResource( PubSub2Storage* app, bool appConfig )
: WEB::CCodecBasedHTTPServerResource()
, m_app( app )
, m_appConfig( appConfig )
{GUCEF_TRACE;
m_allowSerialize = true;
m_allowDeserialize = true;
}
/*-------------------------------------------------------------------------*/
RestApiPubSub2StorageConfigResource::~RestApiPubSub2StorageConfigResource()
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
bool
RestApiPubSub2StorageConfigResource::Serialize( const CORE::CString& resourcePath ,
CORE::CDataNode& output ,
const CORE::CString& representation ,
const CORE::CString& params )
{GUCEF_TRACE;
if ( m_appConfig )
{
const CORE::CValueList& loadedConfig = m_app->GetAppConfig();
return loadedConfig.SaveConfig( output );
}
else
{
const CORE::CDataNode& globalConfig = m_app->GetGlobalConfig();
output.Copy( globalConfig );
return true;
}
}
/*-------------------------------------------------------------------------*/
RestApiPubSub2StorageConfigResource::TDeserializeState
RestApiPubSub2StorageConfigResource::Deserialize( const CORE::CString& resourcePath ,
const CORE::CDataNode& input ,
const CORE::CString& representation ,
bool isDeltaUpdateOnly )
{GUCEF_TRACE;
if ( m_appConfig )
{
CORE::CValueList loadedAppConfig;
if ( isDeltaUpdateOnly )
{
// Grab a copy of the current app config
loadedAppConfig = m_app->GetAppConfig();
loadedAppConfig.SetAllowMultipleValues( false );
loadedAppConfig.SetAllowDuplicates( false );
}
else
{
loadedAppConfig.CopySettingsFrom( m_app->GetAppConfig() );
}
if ( loadedAppConfig.LoadConfig( input ) )
{
if ( isDeltaUpdateOnly )
{
loadedAppConfig.SetAllowMultipleValues( m_app->GetAppConfig().GetAllowMultipleValues() );
loadedAppConfig.SetAllowDuplicates( m_app->GetAppConfig().GetAllowDuplicates() );
}
// First put the app in standby mode before we mess with the settings
if ( !m_app->SetStandbyMode( true ) )
return TDeserializeState::DESERIALIZESTATE_UNABLETOUPDATE;
const CORE::CDataNode& globalConfig = m_app->GetGlobalConfig();
if ( m_app->LoadConfig( loadedAppConfig ) )
{
if ( !m_app->IsGlobalStandbyEnabled() )
{
if ( m_app->SetStandbyMode( false ) )
return TDeserializeState::DESERIALIZESTATE_SUCCEEDED;
else
return TDeserializeState::DESERIALIZESTATE_UNABLETOUPDATE;
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "RestApiUdp2RedisConfigResource: IsGlobalStandbyEnabled is true. We will leave the app in standby mode" );
return TDeserializeState::DESERIALIZESTATE_SUCCEEDED;
}
}
else
{
return TDeserializeState::DESERIALIZESTATE_UNABLETOUPDATE;
}
}
return TDeserializeState::DESERIALIZESTATE_CORRUPTEDINPUT;
}
else
{
if ( isDeltaUpdateOnly )
{
//// Grab a copy of the current global config
//CORE::CDataNode globalConfigCopy = m_app->GetGlobalConfig();
//if ( globalConfigCopy.Merge( input ) )
//{
// const CORE::CValueList& loadedAppConfig = m_app->GetAppConfig();
// if ( m_app->LoadConfig( loadedAppConfig, globalConfigCopy ) )
// {
// return TDeserializeState::DESERIALIZESTATE_SUCCEEDED;
// }
// else
// {
// return TDeserializeState::DESERIALIZESTATE_UNABLETOUPDATE;
// }
//}
return TDeserializeState::DESERIALIZESTATE_CORRUPTEDINPUT;
}
else
{
const CORE::CValueList& loadedAppConfig = m_app->GetAppConfig();
if ( m_app->LoadConfig( input ) )
{
return TDeserializeState::DESERIALIZESTATE_SUCCEEDED;
}
else
{
return TDeserializeState::DESERIALIZESTATE_UNABLETOUPDATE;
}
}
}
}
/*-------------------------------------------------------------------------*/
PubSub2Storage::PubSub2Storage( void )
: CORE::CObserver()
, CORE::CIConfigurable()
, m_isInStandby( false )
, m_globalStandbyEnabled( false )
, m_udpStartPort()
, m_channelCount()
, m_storageStartChannelID()
, m_redisStreamName()
, m_redisHost()
, m_redisPort()
, m_channels()
, m_channelSettings()
, m_templateChannelSettings()
, m_httpServer()
, m_httpRouter()
, m_appConfig()
, m_globalConfig()
, m_metricsTimer()
, m_transmitMetrics( true )
{GUCEF_TRACE;
TEventCallback callback1( this, &PubSub2Storage::OnMetricsTimerCycle );
SubscribeTo( &m_metricsTimer ,
CORE::CTimer::TimerUpdateEvent ,
callback1 );
}
/*-------------------------------------------------------------------------*/
PubSub2Storage::~PubSub2Storage()
{GUCEF_TRACE;
m_httpServer.Close();
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::IsGlobalStandbyEnabled( void ) const
{GUCEF_TRACE;
return m_globalStandbyEnabled;
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::Start( void )
{GUCEF_TRACE;
m_isInStandby = true;
bool errorOccured = !SetStandbyMode( m_globalStandbyEnabled );
if ( !errorOccured )
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage: Opening REST API" );
return m_httpServer.Listen();
}
return !errorOccured;
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::SetStandbyMode( bool putInStandbyMode )
{GUCEF_TRACE;
// Check if we need to do anything
if ( m_isInStandby == putInStandbyMode )
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( " + CORE::BoolToString( putInStandbyMode ) + " ): Already in the desired mode (" + CORE::BoolToString( m_isInStandby ) + "), nothing to do" );
return true;
}
if ( putInStandbyMode )
{
bool totalSuccess = true;
CORE::ThreadPoolPtr threadPool = CORE::CCoreGlobal::Instance()->GetTaskManager().GetThreadPool();
// Signal all channel threads to stop gracefully
// Since this standby operation is global not per channel we signal all to stop before
// we starting any waiting operation
StorageChannelMap::iterator i = m_channels.begin();
while ( i != m_channels.end() )
{
CStorageChannelPtr channel = (*i).second;
if ( !threadPool->RequestTaskToStop( channel.StaticCast< CORE::CTaskConsumer >(), false ) )
{
totalSuccess = false;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( true ): Failed to signal task to stop for channel " + CORE::Int32ToString( channel->GetChannelSettings().channelId ) )
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( true ): Requested channel " + CORE::Int32ToString( channel->GetChannelSettings().channelId ) + "'s task to stop" );
}
++i;
}
// Now actually wait for the threads to be finished
i = m_channels.begin();
while ( i != m_channels.end() )
{
CStorageChannelPtr channel = (*i).second;
if ( !channel->WaitForTaskToFinish( 30000 ) )
{
totalSuccess = false;
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( true ): Failed to signal task to stop for channel " + CORE::Int32ToString( channel->GetChannelSettings().channelId ) )
}
else
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( true ): Successfully waited for channel " + CORE::Int32ToString( channel->GetChannelSettings().channelId ) + "'s task to stop" );
}
++i;
}
m_metricsTimer.SetEnabled( false );
m_isInStandby = totalSuccess;
return totalSuccess;
}
else
{
bool totalSuccess = true;
// Channel config could have changed such that we need to remove channels that should no longer exist
StorageChannelMap::iterator i = m_channels.begin();
while ( i != m_channels.end() )
{
CORE::Int32 channelId = (*i).first;
ChannelSettingsMap::iterator n = m_channelSettings.find( channelId );
if ( n == m_channelSettings.end() )
{
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( false ): Found channel which no longer has corresponding channel settings, deleting channel with ID " + CORE::Int32ToString( channelId ) );
m_channels.erase( i );
i = m_channels.begin();
break;
}
++i;
}
// Alternatively channel config could have changed such that we have new channels
ChannelSettingsMap::iterator n = m_channelSettings.begin();
while ( n != m_channelSettings.end() )
{
CORE::Int32 channelId = (*n).first;
StorageChannelMap::iterator i = m_channels.find( channelId );
if ( i == m_channels.end() )
{
// This is a brand new channel. Lets add the channel object for it
GUCEF_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage:SetStandbyMode( false ): Found channel settings whith no corresponding channel object, creating new channel with ID " + CORE::Int32ToString( channelId ) );
m_channels[ channelId ] = CStorageChannelPtr( new CStorageChannel() );
}
++n;
}
CORE::ThreadPoolPtr threadPool = CORE::CCoreGlobal::Instance()->GetTaskManager().GetThreadPool();
n = m_channelSettings.begin();
while ( n != m_channelSettings.end() )
{
CORE::Int32 channelId = (*n).first;
StorageChannelMap::iterator i = m_channels.find( channelId );
if ( i != m_channels.end() )
{
const ChannelSettings& channelSettings = (*n).second;
CStorageChannelPtr channel = (*i).second;
if ( !channel->LoadConfig( channelSettings ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage::SetStandbyMode( false ): Failed to set channel settings on channel " + CORE::Int32ToString( channelId ) );
totalSuccess = false;
break;
}
if ( !threadPool->StartTask( channel ) )
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_IMPORTANT, "PubSub2Storage::SetStandbyMode( false ): Failed to start task (dedicated thread) for channel " + CORE::Int32ToString( channelId ) );
totalSuccess = false;
break;
}
}
++n;
}
if ( totalSuccess && m_transmitMetrics )
{
m_metricsTimer.SetInterval( 1000 );
m_metricsTimer.SetEnabled( true );
}
m_isInStandby = !totalSuccess;
return totalSuccess;
}
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::LoadConfig( const CORE::CValueList& appConfig )
{GUCEF_TRACE;
m_globalStandbyEnabled = CORE::StringToBool( appConfig.GetValueAlways( "GlobalStandbyEnabled" ), false );
m_channelCount = CORE::StringToUInt16( CORE::ResolveVars( appConfig.GetValueAlways( "ChannelCount", "1" ) ) );
m_storageStartChannelID = CORE::StringToInt32( CORE::ResolveVars( appConfig.GetValueAlways( "StorageStartChannelID", "1" ) ) );
bool applyCpuThreadAffinityByDefault = CORE::StringToBool( CORE::ResolveVars( appConfig.GetValueAlways( "ApplyCpuThreadAffinityByDefault" ) ), false );
CORE::UInt32 logicalCpuCount = CORE::GetLogicalCPUCount();
CORE::UInt32 currentCpu = 0;
CORE::Int32 maxChannelId = m_storageStartChannelID + m_channelCount;
for ( CORE::Int32 channelId = m_storageStartChannelID; channelId < maxChannelId; ++channelId )
{
ChannelSettings* channelSettings = GUCEF_NULL;
ChannelSettingsMap::iterator s = m_channelSettings.find( channelId );
if ( s == m_channelSettings.end() )
{
channelSettings = &m_channelSettings[ channelId ];
*channelSettings = m_templateChannelSettings;
if ( -1 == channelSettings->channelId )
channelSettings->channelId = channelId;
}
else
{
channelSettings = &m_channelSettings[ channelId ];
}
if ( channelSettings->applyThreadCpuAffinity || applyCpuThreadAffinityByDefault )
{
channelSettings->cpuAffinityForMainChannelThread = currentCpu;
++currentCpu;
if ( currentCpu >= logicalCpuCount ) // Wrap around if we run out of CPUs
currentCpu = 0;
if ( channelSettings->performPubSubInDedicatedThread )
{
channelSettings->cpuAffinityForDedicatedPubSubThread = currentCpu;
++currentCpu;
if ( currentCpu >= logicalCpuCount ) // Wrap around if we run out of CPUs
currentCpu = 0;
}
}
}
m_appConfig = appConfig;
m_httpServer.SetPort( CORE::StringToUInt16( CORE::ResolveVars( appConfig.GetValueAlways( "RestApiPort" ) ), 10000 ) );
m_httpRouter.SetResourceMapping( "/info", RestApiPubSub2StorageInfoResource::THTTPServerResourcePtr( new RestApiPubSub2StorageInfoResource( this ) ) );
m_httpRouter.SetResourceMapping( "/config/appargs", RestApiPubSub2StorageConfigResource::THTTPServerResourcePtr( new RestApiPubSub2StorageConfigResource( this, true ) ) );
m_httpRouter.SetResourceMapping( "/config", RestApiPubSub2StorageConfigResource::THTTPServerResourcePtr( new RestApiPubSub2StorageConfigResource( this, false ) ) );
m_httpRouter.SetResourceMapping( CORE::ResolveVars( appConfig.GetValueAlways( "RestBasicHealthUri", "/health/basic" ) ), RestApiPubSub2StorageConfigResource::THTTPServerResourcePtr( new WEB::CDummyHTTPServerResource() ) );
m_httpServer.GetRouterController()->AddRouterMapping( &m_httpRouter, "", "" );
return true;
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::SaveConfig( CORE::CDataNode& tree ) const
{GUCEF_TRACE;
// not fully supported right now
tree.Copy( m_globalConfig );
return true;
}
/*-------------------------------------------------------------------------*/
bool
PubSub2Storage::LoadConfig( const CORE::CDataNode& cfg )
{GUCEF_TRACE;
TChannelCfgMap channelMap;
CORE::CDataNode::TConstDataNodeSet channelParentCfgs = cfg.FindChildrenOfType( "Channels", true );
CORE::CDataNode::TConstDataNodeSet::iterator i = channelParentCfgs.begin();
while ( i != channelParentCfgs.end() )
{
CORE::CDataNode::const_iterator n = (*i)->ConstBegin();
while ( n != (*i)->ConstEnd() )
{
const CORE::CString& channelIndex = (*n)->GetName();
channelMap[ channelIndex ] = (*n)->FindChildrenOfType( "StorageChannel" );
++n;
}
++i;
}
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSub2Storage:LoadConfig: Found " + CORE::ToString( channelMap.size() ) + " configuration entries for storage channels" );
// load the template if any
TChannelCfgMap::iterator m = channelMap.find( "*" );
if ( m != channelMap.end() )
{
CORE::CDataNode::TConstDataNodeSet& matches = (*m).second;
if ( !matches.empty() )
{
if ( m_templateChannelSettings.LoadConfig( *(*matches.begin()) ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSub2Storage:LoadConfig: Successfully loaded template config for storage channels" );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "PubSub2Storage:LoadConfig: Failed to correctly load template config for storage channels" );
return false;
}
}
}
// load the specifically configured channels if any
m = channelMap.begin();
while ( m != channelMap.end() )
{
const CORE::CString& channelIndexStr = (*m).first;
if ( channelIndexStr != '*' )
{
CORE::CDataNode::TConstDataNodeSet& matches = (*m).second;
if ( !matches.empty() )
{
CORE::Int32 channelIndex = CORE::StringToInt32( channelIndexStr );
ChannelSettings& channelSettings = m_channelSettings[ channelIndex ];
if ( channelSettings.LoadConfig( *(*matches.begin()) ) )
{
GUCEF_LOG( CORE::LOGLEVEL_NORMAL, "PubSub2Storage:LoadConfig: Successfully loaded explicit config for storage channels " + channelIndexStr );
}
else
{
GUCEF_ERROR_LOG( CORE::LOGLEVEL_CRITICAL, "PubSub2Storage:LoadConfig: Failed to correctly load explicit config for storage channels " + channelIndexStr );
return false;
}
}
}
++m;
}
m_globalConfig.Copy( cfg );
return true;
}
/*-------------------------------------------------------------------------*/
const CORE::CDateTime&
PubSub2Storage::GetAppCompileDateTime( void )
{GUCEF_TRACE;
static CORE::CDateTime compileDt = CORE::CDateTime::CompileDateTime( __DATE__, __TIME__ );
return compileDt;
}
/*-------------------------------------------------------------------------*/
const CORE::CString&
PubSub2Storage::GetClassTypeName( void ) const
{GUCEF_TRACE;
static const CORE::CString classTypeName = "PubSub2Storage";
return classTypeName;
}
/*-------------------------------------------------------------------------*/
void
PubSub2Storage::OnMetricsTimerCycle( CORE::CNotifier* notifier ,
const CORE::CEvent& eventId ,
CORE::CICloneable* eventData )
{GUCEF_TRACE;
CORE::Int32 channelId = m_storageStartChannelID;
StorageChannelMap::iterator i = m_channels.begin();
while ( i != m_channels.end() )
{
const CStorageChannel::ChannelMetrics& metrics = (*i).second->GetMetrics();
CORE::CString metricPrefix = "pubsub2storage.ch" + CORE::Int32ToString( channelId ) + ".";
//GUCEF_METRIC_TIMING( metricPrefix + "redisErrorReplies", metrics.redisErrorReplies, 1.0f );
//GUCEF_METRIC_TIMING( metricPrefix + "redisMessagesTransmitted", metrics.redisMessagesTransmitted, 1.0f );
//GUCEF_METRIC_TIMING( metricPrefix + "redisPacketsInMessagesTransmitted", metrics.redisPacketsInMsgsTransmitted, 1.0f );
//GUCEF_METRIC_GAUGE( metricPrefix + "redisPacketsInMessagesRatio", metrics.redisPacketsInMsgsRatio, 1.0f );
//GUCEF_METRIC_GAUGE( metricPrefix + "redisTransmitQueueSize", metrics.redisTransmitQueueSize, 1.0f );
//GUCEF_METRIC_TIMING( metricPrefix + "udpBytesReceived", metrics.udpBytesReceived, 1.0f );
//GUCEF_METRIC_TIMING( metricPrefix + "udpPacketsReceived", metrics.udpPacketsReceived, 1.0f );
++i; ++channelId;
}
}
/*-------------------------------------------------------------------------*/
const CORE::CValueList&
PubSub2Storage::GetAppConfig( void ) const
{
return m_appConfig;
}
/*-------------------------------------------------------------------------*/
const CORE::CDataNode&
PubSub2Storage::GetGlobalConfig( void ) const
{
return m_globalConfig;
}
/*-------------------------------------------------------------------------*/<|fim▁end|>
|
// The current container is now considered to have enough content.
|
<|file_name|>HSS2_then_IsolatedLHS.py<|end_file_name|><|fim▁begin|>from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HSS2_then_IsolatedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HSS2_then_IsolatedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HSS2_then_IsolatedLHS, self).__init__(name='HSS2_then_IsolatedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'SS2_then')
# Set the node attributes
# Add the attribute equations
self["equations"] = []
<|fim▁hole|> @param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True<|fim▁end|>
|
def constraint(self, PreNode, graph):
"""
Executable constraint code.
|
<|file_name|>strings.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from django.utils.translation import ugettext_lazy as _<|fim▁hole|>SEARCH_FORM_KEYWORDS = _(u'Key Words / Profession')
SEARCH_FORM_LOCATION = _(u'City, State or Zip Code')
# SearchFiltersForm's strings
SEARCH_FILTERS_FORM_JOB_POSITION = _(u'Job Position')
SEARCH_FILTERS_FORM_EXPERIENCE_YEARS = _(u'Experience')
SEARCH_FILTERS_FORM_DISTANCE = _(u'Distance')
SEARCH_FILTERS_FORM_FULL_TIME = _(u'Full Time')
SEARCH_FILTERS_FORM_PART_TIME = _(u'Part Time')
SEARCH_FILTERS_FORM_VISA = _(u'Has a Visa / Visa required')<|fim▁end|>
|
# SearchForm's strings
|
<|file_name|>test_abf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from socket import inet_pton, inet_ntop, AF_INET, AF_INET6
import unittest
from framework import VppTestCase, VppTestRunner
from vpp_ip import DpoProto
from vpp_ip_route import VppIpRoute, VppRoutePath, VppMplsLabel, \
VppIpTable, FibPathProto
from vpp_acl import AclRule, VppAcl
from scapy.packet import Raw
from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP
from scapy.layers.inet6 import IPv6
from ipaddress import IPv4Network, IPv6Network
from vpp_object import VppObject
NUM_PKTS = 67
def find_abf_policy(test, id):
policies = test.vapi.abf_policy_dump()
for p in policies:
if id == p.policy.policy_id:
return True
return False
def find_abf_itf_attach(test, id, sw_if_index):
attachs = test.vapi.abf_itf_attach_dump()
for a in attachs:
if id == a.attach.policy_id and \
sw_if_index == a.attach.sw_if_index:
return True
return False
class VppAbfPolicy(VppObject):
def __init__(self,
test,
policy_id,
acl,
paths):
self._test = test
self.policy_id = policy_id
self.acl = acl
self.paths = paths
self.encoded_paths = []
for path in self.paths:
self.encoded_paths.append(path.encode())
def add_vpp_config(self):
self._test.vapi.abf_policy_add_del(
1,
{'policy_id': self.policy_id,
'acl_index': self.acl.acl_index,
'n_paths': len(self.paths),
'paths': self.encoded_paths})<|fim▁hole|> self._test.vapi.abf_policy_add_del(
0,
{'policy_id': self.policy_id,
'acl_index': self.acl.acl_index,
'n_paths': len(self.paths),
'paths': self.encoded_paths})
def query_vpp_config(self):
return find_abf_policy(self._test, self.policy_id)
def object_id(self):
return ("abf-policy-%d" % self.policy_id)
class VppAbfAttach(VppObject):
def __init__(self,
test,
policy_id,
sw_if_index,
priority,
is_ipv6=0):
self._test = test
self.policy_id = policy_id
self.sw_if_index = sw_if_index
self.priority = priority
self.is_ipv6 = is_ipv6
def add_vpp_config(self):
self._test.vapi.abf_itf_attach_add_del(
1,
{'policy_id': self.policy_id,
'sw_if_index': self.sw_if_index,
'priority': self.priority,
'is_ipv6': self.is_ipv6})
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.abf_itf_attach_add_del(
0,
{'policy_id': self.policy_id,
'sw_if_index': self.sw_if_index,
'priority': self.priority,
'is_ipv6': self.is_ipv6})
def query_vpp_config(self):
return find_abf_itf_attach(self._test,
self.policy_id,
self.sw_if_index)
def object_id(self):
return ("abf-attach-%d-%d" % (self.policy_id, self.sw_if_index))
class TestAbf(VppTestCase):
""" ABF Test Case """
@classmethod
def setUpClass(cls):
super(TestAbf, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(TestAbf, cls).tearDownClass()
def setUp(self):
super(TestAbf, self).setUp()
self.create_pg_interfaces(range(5))
for i in self.pg_interfaces[:4]:
i.admin_up()
i.config_ip4()
i.resolve_arp()
i.config_ip6()
i.resolve_ndp()
def tearDown(self):
for i in self.pg_interfaces:
i.unconfig_ip4()
i.unconfig_ip6()
i.admin_down()
super(TestAbf, self).tearDown()
def test_abf4(self):
""" IPv4 ACL Based Forwarding
"""
#
# We are not testing the various matching capabilities
# of ACLs, that's done elsewhere. Here ware are testing
# the application of ACLs to a forwarding path to achieve
# ABF
# So we construct just a few ACLs to ensure the ABF policies
# are correctly constructed and used. And a few path types
# to test the API path decoding.
#
#
# Rule 1
#
rule_1 = AclRule(is_permit=1, proto=17, ports=1234,
src_prefix=IPv4Network("1.1.1.1/32"),
dst_prefix=IPv4Network("1.1.1.2/32"))
acl_1 = VppAcl(self, rules=[rule_1])
acl_1.add_vpp_config()
#
# ABF policy for ACL 1 - path via interface 1
#
abf_1 = VppAbfPolicy(self, 10, acl_1,
[VppRoutePath(self.pg1.remote_ip4,
self.pg1.sw_if_index)])
abf_1.add_vpp_config()
#
# Attach the policy to input interface Pg0
#
attach_1 = VppAbfAttach(self, 10, self.pg0.sw_if_index, 50)
attach_1.add_vpp_config()
#
# fire in packet matching the ACL src,dst. If it's forwarded
# then the ABF was successful, since default routing will drop it
#
p_1 = (Ether(src=self.pg0.remote_mac,
dst=self.pg0.local_mac) /
IP(src="1.1.1.1", dst="1.1.1.2") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect(self.pg0, p_1*NUM_PKTS, self.pg1)
#
# Attach a 'better' priority policy to the same interface
#
abf_2 = VppAbfPolicy(self, 11, acl_1,
[VppRoutePath(self.pg2.remote_ip4,
self.pg2.sw_if_index)])
abf_2.add_vpp_config()
attach_2 = VppAbfAttach(self, 11, self.pg0.sw_if_index, 40)
attach_2.add_vpp_config()
self.send_and_expect(self.pg0, p_1*NUM_PKTS, self.pg2)
#
# Attach a policy with priority in the middle
#
abf_3 = VppAbfPolicy(self, 12, acl_1,
[VppRoutePath(self.pg3.remote_ip4,
self.pg3.sw_if_index)])
abf_3.add_vpp_config()
attach_3 = VppAbfAttach(self, 12, self.pg0.sw_if_index, 45)
attach_3.add_vpp_config()
self.send_and_expect(self.pg0, p_1*NUM_PKTS, self.pg2)
#
# remove the best priority
#
attach_2.remove_vpp_config()
self.send_and_expect(self.pg0, p_1*NUM_PKTS, self.pg3)
#
# Attach one of the same policies to Pg1
#
attach_4 = VppAbfAttach(self, 12, self.pg1.sw_if_index, 45)
attach_4.add_vpp_config()
p_2 = (Ether(src=self.pg1.remote_mac,
dst=self.pg1.local_mac) /
IP(src="1.1.1.1", dst="1.1.1.2") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect(self.pg1, p_2 * NUM_PKTS, self.pg3)
#
# detach the policy from PG1, now expect traffic to be dropped
#
attach_4.remove_vpp_config()
self.send_and_assert_no_replies(self.pg1, p_2 * NUM_PKTS, "Detached")
#
# Swap to route via a next-hop in the non-default table
#
table_20 = VppIpTable(self, 20)
table_20.add_vpp_config()
self.pg4.set_table_ip4(table_20.table_id)
self.pg4.admin_up()
self.pg4.config_ip4()
self.pg4.resolve_arp()
abf_13 = VppAbfPolicy(self, 13, acl_1,
[VppRoutePath(self.pg4.remote_ip4,
0xffffffff,
nh_table_id=table_20.table_id)])
abf_13.add_vpp_config()
attach_5 = VppAbfAttach(self, 13, self.pg0.sw_if_index, 30)
attach_5.add_vpp_config()
self.send_and_expect(self.pg0, p_1*NUM_PKTS, self.pg4)
self.pg4.unconfig_ip4()
self.pg4.set_table_ip4(0)
def test_abf6(self):
""" IPv6 ACL Based Forwarding
"""
#
# Simple test for matching IPv6 packets
#
#
# Rule 1
#
rule_1 = AclRule(is_permit=1, proto=17, ports=1234,
src_prefix=IPv6Network("2001::2/128"),
dst_prefix=IPv6Network("2001::1/128"))
acl_1 = VppAcl(self, rules=[rule_1])
acl_1.add_vpp_config()
#
# ABF policy for ACL 1 - path via interface 1
#
abf_1 = VppAbfPolicy(self, 10, acl_1,
[VppRoutePath("3001::1",
0xffffffff)])
abf_1.add_vpp_config()
attach_1 = VppAbfAttach(self, 10, self.pg0.sw_if_index,
45, is_ipv6=True)
attach_1.add_vpp_config()
#
# a packet matching the rule
#
p = (Ether(src=self.pg0.remote_mac,
dst=self.pg0.local_mac) /
IPv6(src="2001::2", dst="2001::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
#
# packets are dropped because there is no route to the policy's
# next hop
#
self.send_and_assert_no_replies(self.pg1, p * NUM_PKTS, "no route")
#
# add a route resolving the next-hop
#
route = VppIpRoute(self, "3001::1", 32,
[VppRoutePath(self.pg1.remote_ip6,
self.pg1.sw_if_index)])
route.add_vpp_config()
#
# now expect packets forwarded.
#
self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg1)
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)<|fim▁end|>
|
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
|
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2017 Didotech srl (www.didotech.com)<|fim▁hole|>
{
"name": "BoM Warning",
"version": "4.0.1.2",
"depends": [
"mrp",
"base",
"product",
"warning"
],
"author": "Didotech srl",
"description": """
This module is aim to track the warning on Bills of Material.
""",
"website": "https://www.didotech.com",
"category": "Manufacture Resource Planning",
"data": [
'views/product_view.xml',
'views/mrp_bom_view.xml'
],
"demo": [],
"active": False,
"installable": True,
}<|fim▁end|>
| |
<|file_name|>text_input.py<|end_file_name|><|fim▁begin|>"""
This module defines the following constants:
*InputText options*
* BGUI_INPUT_NONE = 0
* BGUI_INPUT_SELECT_ALL = 1
* BGUI_INPUT_DEFAULT = BGUI_INPUT_NONE
"""
from .widget import Widget, WeakMethod, BGUI_DEFAULT, BGUI_CENTERY, \
BGUI_NO_FOCUS, BGUI_MOUSE_ACTIVE, BGUI_MOUSE_CLICK, BGUI_MOUSE_RELEASE, \
BGUI_NO_NORMALIZE
from .key_defs import *
from .label import Label
from .frame import Frame
import time
# InputText options
BGUI_INPUT_NONE = 0
BGUI_INPUT_SELECT_ALL = 1
BGUI_INPUT_DEFAULT = BGUI_INPUT_NONE
class TextInput(Widget):
"""Widget for getting text input"""
theme_section = 'TextInput'
theme_options = {
'TextColor': (1, 1, 1, 1),
'FrameColor': (0, 0, 0, 0),
'BorderSize': 0,
'BorderColor': (0, 0, 0, 0),
'HighlightColor': (0.6, 0.6, 0.6, 0.5),
'InactiveTextColor': (1, 1, 1, 1),
'InactiveFrameColor': (0, 0, 0, 0),
'InactiveBorderSize': 0,
'InactiveBorderColor': (0, 0, 0, 0),
'InactiveHighlightColor': (0.6, 0.6, 0.6, 0.5),
'LabelSubTheme': '',
}
def __init__(self, parent, name=None, text="", prefix="", font=None, pt_size=None, color=None,
aspect=None, size=[1, 1], pos=[0, 0], sub_theme='', input_options=BGUI_INPUT_DEFAULT, options=BGUI_DEFAULT):
"""
:param parent: the widget's parent
:param name: the name of the widget
:param text: the text to display (this can be changed later via the text property)
:param prefix: prefix text displayed before user input, cannot be edited by user (this can be changed later via the prefix property)
:param font: the font to use
:param pt_size: the point size of the text to draw
:param color: color of the font for this widget
:param aspect: constrain the widget size to a specified aspect ratio
:param size: a tuple containing the width and height
:param pos: a tuple containing the x and y position
:param sub_theme: name of a sub_theme defined in the theme file (similar to CSS classes)
:param options: various other options
"""
Widget.__init__(self, parent, name, aspect, size, pos, sub_theme, options)
self.text_prefix = prefix
self.pos = len(text)
self.input_options = input_options
self.colors = {}
#create widgets
self.frame = Frame(self, size=[1, 1], options=BGUI_NO_FOCUS | BGUI_DEFAULT | BGUI_CENTERY)
self.highlight = Frame(self, size=self.frame.size, border=0, options=BGUI_NO_FOCUS | BGUI_CENTERY | BGUI_NO_NORMALIZE)
self.cursor = Frame(self, size=[1, 1], border=0, options=BGUI_NO_FOCUS | BGUI_CENTERY | BGUI_NO_NORMALIZE)
self.label = Label(self, text=text, font=font, pt_size=pt_size, sub_theme=self.theme['LabelSubTheme'], options=BGUI_NO_FOCUS | BGUI_DEFAULT)
#Color and setting initialization
self.colormode = 0
theme = self.theme
self.colors["text"] = [None, None]
self.colors["text"][0] = theme['InactiveTextColor']
self.colors["text"][1] = theme['TextColor']
self.colors["frame"] = [None, None]
self.colors["frame"][0] = theme['InactiveFrameColor']
self.colors["frame"][1] = theme['FrameColor']
self.colors["border"] = [None, None]
self.colors["border"][0] = theme['InactiveBorderColor']
self.colors["border"][1] = theme['BorderColor']
self.colors["highlight"] = [None, None]
self.colors["highlight"][0] = theme['HighlightColor']
self.colors["highlight"][1] = theme['HighlightColor']
self.border_size = [None, None]
self.border_size[0] = theme['InactiveBorderSize']
self.border_size[1] = theme['BorderSize']
self.swapcolors(0)
#gauge height of the drawn font
fd = self.system.textlib.dimensions(self.label.fontid, "Egj/}|^,")
py = .5 - (fd[1] / self.size[1] / 2)
px = fd[1] / self.size[0] - fd[1] / 1.5 / self.size[0]
self.label.position = [px, py]
self.fd = self.system.textlib.dimensions(self.label.fontid, self.text_prefix)[0] + fd[1] / 3.2
self.frame.size = [1, 1]
self.frame.position = [0, 0]
self.slice = [len(text), len(text)]
self.slice_direction = 0
self.mouse_slice_start = 0
self.mouse_slice_end = 0
#create the char width list
self._update_char_widths()
#initial call to update_selection
self.selection_refresh = 1
self.just_activated = 0
self._active = 0 # internal active state to avoid confusion from parent active chain
#blinking cursor
self.time = time.time()
#double/triple click functionality
self.click_counter = 0
self.single_click_time = 0.0
self.double_click_time = 0.0
# On Enter callback
self._on_enter_key = None
@property
def text(self):
return self.label.text
@text.setter
def text(self, value):
#setter intended for external access, internal changes can just change self.label.text
self.label.text = value
self._update_char_widths()
self.slice = [0, 0]
self.update_selection()
@property
def prefix(self):
return self.text_prefix
@prefix.setter
def prefix(self, value):
self.fd = self.system.textlib.dimensions(self.label.fontid, value)[0] + fd[1] / 3.2
self.text_prefix = value
@property
def on_enter_key(self):
"""A callback for when the enter key is pressed while the TextInput has focus"""
return self._on_enter_key
@on_enter_key.setter
def on_enter_key(self, value):
self._on_enter_key = WeakMethod(value)
#utility functions
def _update_char_widths(self):
self.char_widths = []
for char in self.text:
self.char_widths.append(self.system.textlib.dimensions(self.label.fontid, char * 20)[0] / 20)
def select_all(self):
"""Change the selection to include all of the text"""
self.slice = [0, len(self.text)]
self.update_selection()
def select_none(self):
"""Change the selection to include none of the text"""
self.slice = [0, 0]
self.update_selection()
#Activation Code
def activate(self):
if self.frozen:
return
self.system.focused_widget = self
self.swapcolors(1)
self.colormode = 1
if self.input_options & BGUI_INPUT_SELECT_ALL:
self.slice = [0, len(self.text)]
self.slice_direction = -1
self.just_activated = 1
self._active = 1
def deactivate(self):
self.system.focused_widget = self.system
self.swapcolors(0)
self.colormode = 0
self.just_activated = 0
self._active = 0
def swapcolors(self, state=0): # 0 inactive 1 active
self.frame.colors = [self.colors["frame"][state]] * 4
self.frame.border = self.border_size[state]
self.frame.border_color = self.colors["border"][state]
self.highlight.colors = [self.colors["highlight"][state]] * 4
self.label.color = self.colors["text"][state]
if state == 0:
self.cursor.colors = [[0.0, 0.0, 0.0, 0.0]] * 4
else:
self.cursor.colors = [self.colors["text"][state]] * 4
#Selection Code
def update_selection(self):
left = self.fd + self.system.textlib.dimensions(self.label.fontid, self.text[:self.slice[0]])[0]
right = self.fd + self.system.textlib.dimensions(self.label.fontid, self.text[:self.slice[1]])[0]
self.highlight.position = [left, 1]
self.highlight.size = [right - left, self.frame.size[1] * .8]
if self.slice_direction in [0, -1]:
self.cursor.position = [left, 1]
else:
self.cursor.position = [right, 1]
self.cursor.size = [2, self.frame.size[1] * .8]
def find_mouse_slice(self, pos):
cmc = self.calc_mouse_cursor(pos)
mss = self.mouse_slice_start
self.mouse_slice_end = cmc
if cmc < mss:
self.slice_direction = -1
self.slice = [self.mouse_slice_end, self.mouse_slice_start]
elif cmc > mss:
self.slice_direction = 1
self.slice = [self.mouse_slice_start, self.mouse_slice_end]
else:
self.slice_direction = 0
self.slice = [self.mouse_slice_start, self.mouse_slice_start]
self.selection_refresh = 1
def calc_mouse_cursor(self, pos):
adj_pos = pos[0] - (self.position[0] + self.fd)
find_slice = 0
i = 0
for entry in self.char_widths:
if find_slice + entry > adj_pos:
if abs((find_slice + entry) - adj_pos) >= abs(adj_pos - find_slice):
return i
else:
return i + 1
else:
find_slice += entry
i += 1
self.time = time.time() - 0.501
return i
def _handle_mouse(self, pos, event):
"""Extend function's behaviour by providing focus to unfrozen inactive TextInput,
swapping out colors.
"""
if self.frozen:
return
if event == BGUI_MOUSE_CLICK:
self.mouse_slice_start = self.calc_mouse_cursor(pos)
if not self._active:
self.activate()
if not self.input_options & BGUI_INPUT_SELECT_ALL:
self.find_mouse_slice(pos)
elif event == BGUI_MOUSE_ACTIVE:
if not self.just_activated or self.just_activated and not self.input_options & BGUI_INPUT_SELECT_ALL:
self.find_mouse_slice(pos)
if event == BGUI_MOUSE_RELEASE:
self.selection_refresh = 1
if self.slice[0] == self.slice[1]:
self.slice_direction = 0
self.just_activated = 0
#work out single / double / triple clicks
if self.click_counter == 0:
self.single_click_time = time.time()
self.click_counter = 1
elif self.click_counter == 1:
if time.time() - self.single_click_time < .2:
self.click_counter = 2
self.double_click_time = time.time()
words = self.text.split(" ")
i = 0
for entry in words:
if self.slice[0] < i + len(entry):
self.slice = [i, i + len(entry) + 1]
break
i += len(entry) + 1
else:
self.click_counter = 1
self.single_click_time = time.time()
elif self.click_counter == 2:
if time.time() - self.double_click_time < .2:
self.click_counter = 3
self.slice = [0, len(self.text)]
self.slice_direction = -1
else:
self.click_counter = 1
self.single_click_time = time.time()
elif self.click_counter == 3:
self.single_click_time = time.time()
self.click_counter = 1
self.time = time.time()
Widget._handle_mouse(self, pos, event)
def _handle_key(self, key, is_shifted):
"""Handle any keyboard input"""
if self != self.system.focused_widget:
return
# Try char to int conversion for alphanumeric keys... kinda hacky though
try:
key = ord(key)
except:
pass
if is_shifted:
sh = 0 #used for slicing
else:
sh = 1
slice_len = abs(self.slice[0] - self.slice[1])
x, y = 0, 0
if key == BACKSPACEKEY:
if slice_len != 0:
self.label.text = self.text[:self.slice[0]] + self.text[self.slice[1]:]
self.char_widths = self.char_widths[:self.slice[0]] + self.char_widths[self.slice[1]:]
self.slice = [self.slice[0], self.slice[0]]
#handle char length list
elif self.slice[0] > 0:
self.label.text = self.text[:self.slice[0] - 1] + self.text[self.slice[1]:]
self.slice = [self.slice[0] - 1, self.slice[1] - 1]
elif key == DELKEY:
if slice_len != 0:
self.label.text = self.text[:self.slice[0]] + self.text[self.slice[1]:]
self.char_widths = self.char_widths[:self.slice[0]] + self.char_widths[self.slice[1]:]
self.slice = [self.slice[0], self.slice[0]]
elif self.slice[1] < len(self.text):
self.label.text = self.text[:self.slice[0]] + self.text[self.slice[1] + 1:]
elif key == LEFTARROWKEY:
slice_len = abs(self.slice[0] - self.slice[1])
if (self.slice_direction in [-1, 0]):
if is_shifted and self.slice[0] > 0:
self.slice = [self.slice[0] - 1, self.slice[1]]
self.slice_direction = -1
elif is_shifted:
pass
else:
if slice_len > 0:
self.slice = [self.slice[0], self.slice[0]]
elif self.slice[0] > 0:
self.slice = [self.slice[0] - 1, self.slice[0] - 1]
self.slice_direction = 0
elif self.slice_direction == 1:
if is_shifted:
self.slice = [self.slice[0], self.slice[1] - 1]
else:
self.slice = [self.slice[0], self.slice[0]]
if self.slice[0] - self.slice[1] == 0:
self.slice_direction = 0
elif key == RIGHTARROWKEY:
slice_len = abs(self.slice[0] - self.slice[1])
if (self.slice_direction in [1, 0]):
if is_shifted and self.slice[1] < len(self.text):
self.slice = [self.slice[0], self.slice[1] + 1]
self.slice_direction = 1
elif is_shifted:
pass
else:
if slice_len > 0:
self.slice = [self.slice[1], self.slice[1]]
elif self.slice[1] < len(self.text):
self.slice = [self.slice[1] + 1, self.slice[1] + 1]
self.slice_direction = 0
elif self.slice_direction == -1:
if is_shifted:
self.slice = [self.slice[0] + 1, self.slice[1]]
else:
self.slice = [self.slice[1], self.slice[1]]
if self.slice[0] - self.slice[1] == 0:
self.slice_direction = 0
else:
char = None
if ord(AKEY) <= key <= ord(ZKEY):
if is_shifted: char = chr(key - 32)
else: char = chr(key)
elif ord(ZEROKEY) <= key <= ord(NINEKEY):
if not is_shifted: char = chr(key)
else:
key = chr(key)
if key == ZEROKEY: char = ")"
elif key == ONEKEY: char = "!"
elif key == TWOKEY: char = "@"
elif key == THREEKEY: char = "#"
elif key == FOURKEY: char = "$"
elif key == FIVEKEY: char = "%"
elif key == SIXKEY: char = "^"
elif key == SEVENKEY: char = "&"
elif key == EIGHTKEY: char = "*"
elif key == NINEKEY: char = "("
elif PAD0 <= key <= PAD9:
char = str(key - PAD0)
elif key == PADPERIOD: char = "."
elif key == PADSLASHKEY: char = "/"
elif key == PADASTERKEY: char = "*"
elif key == PADMINUS: char = "-"
elif key == PADPLUSKEY: char = "+"
elif key == SPACEKEY: char = " "
#elif key == TABKEY: char = "\t"
elif key in (ENTERKEY, PADENTER):
if self.on_enter_key:
self.on_enter_key(self)
elif not is_shifted:
if key == ACCENTGRAVEKEY: char = "`"
elif key == MINUSKEY: char = "-"
elif key == EQUALKEY: char = "="
elif key == LEFTBRACKETKEY: char = "["
elif key == RIGHTBRACKETKEY: char = "]"
elif key == BACKSLASHKEY: char = "\\"
elif key == SEMICOLONKEY: char = ";"
elif key == QUOTEKEY: char = "'"
elif key == COMMAKEY: char = ","
elif key == PERIODKEY: char = "."
elif key == SLASHKEY: char = "/"
else:
if key == ACCENTGRAVEKEY: char = "~"<|fim▁hole|> elif key == EQUALKEY: char = "+"
elif key == LEFTBRACKETKEY: char = "{"
elif key == RIGHTBRACKETKEY: char = "}"
elif key == BACKSLASHKEY: char = "|"
elif key == SEMICOLONKEY: char = ":"
elif key == QUOTEKEY: char = '"'
elif key == COMMAKEY: char = "<"
elif key == PERIODKEY: char = ">"
elif key == SLASHKEY: char = "?"
if char:
#need option to limit text to length of box
#need to replace all selected text with new char
#need copy place somewhere
self.label.text = self.text[:self.slice[0]] + char + self.text[self.slice[1]:]
self.char_widths = self.char_widths[:self.slice[0]] + [self.system.textlib.dimensions(self.label.fontid, char * 20)[0] / 20] + self.char_widths[self.slice[1]:]
self.slice = [self.slice[0] + 1, self.slice[0] + 1]
self.slice_direction = 0
#update selection widgets after next draw call
self.selection_refresh = 1
#ensure cursor is not hidden
self.time = time.time()
def _draw(self):
temp = self.text
self.label.text = self.text_prefix + temp
if self == self.system.focused_widget and self._active == 0:
self.activate()
# Now draw the children
Widget._draw(self)
self.label.text = temp
if self.colormode == 1 and self.system.focused_widget != self:
self._active = 0
self.swapcolors(0)
self.virgin = 1
self.colormode = 0
#selection code needs to be called after draw, which is tracked internally to TextInput
if self.selection_refresh == 1:
self.update_selection()
self.selection_refresh = 0
#handle blinking cursor
if self.slice[0] - self.slice[1] == 0 and self._active:
if time.time() - self.time > 1.0:
self.time = time.time()
elif time.time() - self.time > 0.5:
self.cursor.colors = [[0.0, 0.0, 0.0, 0.0]] * 4
else:
self.cursor.colors = [self.colors["text"][1]] * 4
else:
self.cursor.colors = [[0.0, 0.0, 0.0, 0.0]] * 4<|fim▁end|>
|
elif key == MINUSKEY: char = "_"
|
<|file_name|>aurelia-datatable.js<|end_file_name|><|fim▁begin|>'use strict';
exports.__esModule = true;
exports.configure = configure;
var _aureliaViewManager = require('aurelia-view-manager');
<|fim▁hole|>var _convertManager = require('./convert-manager');
function configure(aurelia) {
aurelia.plugin('aurelia-pager');
aurelia.container.get(_aureliaViewManager.Config).configureNamespace('spoonx/datatable', {
location: './{{framework}}/{{view}}.html'
});
aurelia.globalResources('./datatable');
}<|fim▁end|>
|
var _datatable = require('./datatable');
var _columnsFilter = require('./columns-filter');
|
<|file_name|>register_table.rs<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
use super::super::super::Register;
use std::collections::HashMap;
lazy_static! {
pub(super) static ref TO_REGISTER_HASH: HashMap<&'static str, Register> = {
// GENERATOR-BEGIN: RegisterHash
// ⚠️This was generated by GENERATOR!🦹♂️
let mut h = HashMap::with_capacity(249);
let _ = h.insert("none", Register::None);
let _ = h.insert("al", Register::AL);
let _ = h.insert("cl", Register::CL);
let _ = h.insert("dl", Register::DL);
let _ = h.insert("bl", Register::BL);
let _ = h.insert("ah", Register::AH);
let _ = h.insert("ch", Register::CH);
let _ = h.insert("dh", Register::DH);
let _ = h.insert("bh", Register::BH);
let _ = h.insert("spl", Register::SPL);
let _ = h.insert("bpl", Register::BPL);
let _ = h.insert("sil", Register::SIL);
let _ = h.insert("dil", Register::DIL);
let _ = h.insert("r8l", Register::R8L);
let _ = h.insert("r9l", Register::R9L);
let _ = h.insert("r10l", Register::R10L);
let _ = h.insert("r11l", Register::R11L);
let _ = h.insert("r12l", Register::R12L);
let _ = h.insert("r13l", Register::R13L);
let _ = h.insert("r14l", Register::R14L);
let _ = h.insert("r15l", Register::R15L);
let _ = h.insert("ax", Register::AX);
let _ = h.insert("cx", Register::CX);
let _ = h.insert("dx", Register::DX);
let _ = h.insert("bx", Register::BX);
let _ = h.insert("sp", Register::SP);
let _ = h.insert("bp", Register::BP);
let _ = h.insert("si", Register::SI);
let _ = h.insert("di", Register::DI);
let _ = h.insert("r8w", Register::R8W);
let _ = h.insert("r9w", Register::R9W);
let _ = h.insert("r10w", Register::R10W);
let _ = h.insert("r11w", Register::R11W);
let _ = h.insert("r12w", Register::R12W);
let _ = h.insert("r13w", Register::R13W);
let _ = h.insert("r14w", Register::R14W);
let _ = h.insert("r15w", Register::R15W);
let _ = h.insert("eax", Register::EAX);
let _ = h.insert("ecx", Register::ECX);
let _ = h.insert("edx", Register::EDX);
let _ = h.insert("ebx", Register::EBX);
let _ = h.insert("esp", Register::ESP);
let _ = h.insert("ebp", Register::EBP);
let _ = h.insert("esi", Register::ESI);
let _ = h.insert("edi", Register::EDI);
let _ = h.insert("r8d", Register::R8D);
let _ = h.insert("r9d", Register::R9D);
let _ = h.insert("r10d", Register::R10D);
let _ = h.insert("r11d", Register::R11D);
let _ = h.insert("r12d", Register::R12D);
let _ = h.insert("r13d", Register::R13D);
let _ = h.insert("r14d", Register::R14D);
let _ = h.insert("r15d", Register::R15D);
let _ = h.insert("rax", Register::RAX);
let _ = h.insert("rcx", Register::RCX);
let _ = h.insert("rdx", Register::RDX);
let _ = h.insert("rbx", Register::RBX);
let _ = h.insert("rsp", Register::RSP);
let _ = h.insert("rbp", Register::RBP);
let _ = h.insert("rsi", Register::RSI);
let _ = h.insert("rdi", Register::RDI);
let _ = h.insert("r8", Register::R8);
let _ = h.insert("r9", Register::R9);
let _ = h.insert("r10", Register::R10);
let _ = h.insert("r11", Register::R11);
let _ = h.insert("r12", Register::R12);
let _ = h.insert("r13", Register::R13);
let _ = h.insert("r14", Register::R14);
let _ = h.insert("r15", Register::R15);
let _ = h.insert("eip", Register::EIP);
let _ = h.insert("rip", Register::RIP);
let _ = h.insert("es", Register::ES);
let _ = h.insert("cs", Register::CS);
let _ = h.insert("ss", Register::SS);
let _ = h.insert("ds", Register::DS);
let _ = h.insert("fs", Register::FS);
let _ = h.insert("gs", Register::GS);
let _ = h.insert("xmm0", Register::XMM0);
let _ = h.insert("xmm1", Register::XMM1);
let _ = h.insert("xmm2", Register::XMM2);
let _ = h.insert("xmm3", Register::XMM3);
let _ = h.insert("xmm4", Register::XMM4);
let _ = h.insert("xmm5", Register::XMM5);
let _ = h.insert("xmm6", Register::XMM6);
let _ = h.insert("xmm7", Register::XMM7);
let _ = h.insert("xmm8", Register::XMM8);
let _ = h.insert("xmm9", Register::XMM9);
let _ = h.insert("xmm10", Register::XMM10);
let _ = h.insert("xmm11", Register::XMM11);
let _ = h.insert("xmm12", Register::XMM12);
let _ = h.insert("xmm13", Register::XMM13);
let _ = h.insert("xmm14", Register::XMM14);
let _ = h.insert("xmm15", Register::XMM15);
let _ = h.insert("xmm16", Register::XMM16);
let _ = h.insert("xmm17", Register::XMM17);
let _ = h.insert("xmm18", Register::XMM18);
let _ = h.insert("xmm19", Register::XMM19);
let _ = h.insert("xmm20", Register::XMM20);
let _ = h.insert("xmm21", Register::XMM21);
let _ = h.insert("xmm22", Register::XMM22);
let _ = h.insert("xmm23", Register::XMM23);
let _ = h.insert("xmm24", Register::XMM24);
let _ = h.insert("xmm25", Register::XMM25);
let _ = h.insert("xmm26", Register::XMM26);
let _ = h.insert("xmm27", Register::XMM27);
let _ = h.insert("xmm28", Register::XMM28);
let _ = h.insert("xmm29", Register::XMM29);
let _ = h.insert("xmm30", Register::XMM30);
let _ = h.insert("xmm31", Register::XMM31);
let _ = h.insert("ymm0", Register::YMM0);
let _ = h.insert("ymm1", Register::YMM1);
let _ = h.insert("ymm2", Register::YMM2);
let _ = h.insert("ymm3", Register::YMM3);
let _ = h.insert("ymm4", Register::YMM4);
let _ = h.insert("ymm5", Register::YMM5);
let _ = h.insert("ymm6", Register::YMM6);
let _ = h.insert("ymm7", Register::YMM7);
let _ = h.insert("ymm8", Register::YMM8);
let _ = h.insert("ymm9", Register::YMM9);
let _ = h.insert("ymm10", Register::YMM10);
let _ = h.insert("ymm11", Register::YMM11);
let _ = h.insert("ymm12", Register::YMM12);
let _ = h.insert("ymm13", Register::YMM13);
let _ = h.insert("ymm14", Register::YMM14);
let _ = h.insert("ymm15", Register::YMM15);
let _ = h.insert("ymm16", Register::YMM16);
let _ = h.insert("ymm17", Register::YMM17);
let _ = h.insert("ymm18", Register::YMM18);
let _ = h.insert("ymm19", Register::YMM19);
let _ = h.insert("ymm20", Register::YMM20);
let _ = h.insert("ymm21", Register::YMM21);
let _ = h.insert("ymm22", Register::YMM22);
let _ = h.insert("ymm23", Register::YMM23);
let _ = h.insert("ymm24", Register::YMM24);
let _ = h.insert("ymm25", Register::YMM25);
let _ = h.insert("ymm26", Register::YMM26);
let _ = h.insert("ymm27", Register::YMM27);
let _ = h.insert("ymm28", Register::YMM28);
let _ = h.insert("ymm29", Register::YMM29);
let _ = h.insert("ymm30", Register::YMM30);
let _ = h.insert("ymm31", Register::YMM31);
let _ = h.insert("zmm0", Register::ZMM0);
let _ = h.insert("zmm1", Register::ZMM1);
let _ = h.insert("zmm2", Register::ZMM2);
let _ = h.insert("zmm3", Register::ZMM3);
let _ = h.insert("zmm4", Register::ZMM4);
let _ = h.insert("zmm5", Register::ZMM5);
let _ = h.insert("zmm6", Register::ZMM6);
let _ = h.insert("zmm7", Register::ZMM7);
let _ = h.insert("zmm8", Register::ZMM8);
let _ = h.insert("zmm9", Register::ZMM9);
let _ = h.insert("zmm10", Register::ZMM10);
let _ = h.insert("zmm11", Register::ZMM11);
let _ = h.insert("zmm12", Register::ZMM12);
let _ = h.insert("zmm13", Register::ZMM13);
let _ = h.insert("zmm14", Register::ZMM14);
let _ = h.insert("zmm15", Register::ZMM15);
let _ = h.insert("zmm16", Register::ZMM16);
let _ = h.insert("zmm17", Register::ZMM17);
let _ = h.insert("zmm18", Register::ZMM18);
let _ = h.insert("zmm19", Register::ZMM19);
let _ = h.insert("zmm20", Register::ZMM20);
let _ = h.insert("zmm21", Register::ZMM21);
let _ = h.insert("zmm22", Register::ZMM22);
let _ = h.insert("zmm23", Register::ZMM23);
let _ = h.insert("zmm24", Register::ZMM24);
let _ = h.insert("zmm25", Register::ZMM25);
let _ = h.insert("zmm26", Register::ZMM26);
let _ = h.insert("zmm27", Register::ZMM27);
let _ = h.insert("zmm28", Register::ZMM28);
let _ = h.insert("zmm29", Register::ZMM29);
let _ = h.insert("zmm30", Register::ZMM30);
let _ = h.insert("zmm31", Register::ZMM31);
let _ = h.insert("k0", Register::K0);
let _ = h.insert("k1", Register::K1);
let _ = h.insert("k2", Register::K2);
let _ = h.insert("k3", Register::K3);
let _ = h.insert("k4", Register::K4);<|fim▁hole|> let _ = h.insert("k7", Register::K7);
let _ = h.insert("bnd0", Register::BND0);
let _ = h.insert("bnd1", Register::BND1);
let _ = h.insert("bnd2", Register::BND2);
let _ = h.insert("bnd3", Register::BND3);
let _ = h.insert("cr0", Register::CR0);
let _ = h.insert("cr1", Register::CR1);
let _ = h.insert("cr2", Register::CR2);
let _ = h.insert("cr3", Register::CR3);
let _ = h.insert("cr4", Register::CR4);
let _ = h.insert("cr5", Register::CR5);
let _ = h.insert("cr6", Register::CR6);
let _ = h.insert("cr7", Register::CR7);
let _ = h.insert("cr8", Register::CR8);
let _ = h.insert("cr9", Register::CR9);
let _ = h.insert("cr10", Register::CR10);
let _ = h.insert("cr11", Register::CR11);
let _ = h.insert("cr12", Register::CR12);
let _ = h.insert("cr13", Register::CR13);
let _ = h.insert("cr14", Register::CR14);
let _ = h.insert("cr15", Register::CR15);
let _ = h.insert("dr0", Register::DR0);
let _ = h.insert("dr1", Register::DR1);
let _ = h.insert("dr2", Register::DR2);
let _ = h.insert("dr3", Register::DR3);
let _ = h.insert("dr4", Register::DR4);
let _ = h.insert("dr5", Register::DR5);
let _ = h.insert("dr6", Register::DR6);
let _ = h.insert("dr7", Register::DR7);
let _ = h.insert("dr8", Register::DR8);
let _ = h.insert("dr9", Register::DR9);
let _ = h.insert("dr10", Register::DR10);
let _ = h.insert("dr11", Register::DR11);
let _ = h.insert("dr12", Register::DR12);
let _ = h.insert("dr13", Register::DR13);
let _ = h.insert("dr14", Register::DR14);
let _ = h.insert("dr15", Register::DR15);
let _ = h.insert("st0", Register::ST0);
let _ = h.insert("st1", Register::ST1);
let _ = h.insert("st2", Register::ST2);
let _ = h.insert("st3", Register::ST3);
let _ = h.insert("st4", Register::ST4);
let _ = h.insert("st5", Register::ST5);
let _ = h.insert("st6", Register::ST6);
let _ = h.insert("st7", Register::ST7);
let _ = h.insert("mm0", Register::MM0);
let _ = h.insert("mm1", Register::MM1);
let _ = h.insert("mm2", Register::MM2);
let _ = h.insert("mm3", Register::MM3);
let _ = h.insert("mm4", Register::MM4);
let _ = h.insert("mm5", Register::MM5);
let _ = h.insert("mm6", Register::MM6);
let _ = h.insert("mm7", Register::MM7);
let _ = h.insert("tr0", Register::TR0);
let _ = h.insert("tr1", Register::TR1);
let _ = h.insert("tr2", Register::TR2);
let _ = h.insert("tr3", Register::TR3);
let _ = h.insert("tr4", Register::TR4);
let _ = h.insert("tr5", Register::TR5);
let _ = h.insert("tr6", Register::TR6);
let _ = h.insert("tr7", Register::TR7);
let _ = h.insert("tmm0", Register::TMM0);
let _ = h.insert("tmm1", Register::TMM1);
let _ = h.insert("tmm2", Register::TMM2);
let _ = h.insert("tmm3", Register::TMM3);
let _ = h.insert("tmm4", Register::TMM4);
let _ = h.insert("tmm5", Register::TMM5);
let _ = h.insert("tmm6", Register::TMM6);
let _ = h.insert("tmm7", Register::TMM7);
// GENERATOR-END: RegisterHash
h
};
}<|fim▁end|>
|
let _ = h.insert("k5", Register::K5);
let _ = h.insert("k6", Register::K6);
|
<|file_name|>ex01.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Sample String : "Twinkle, twinkle, little star, How I wonder what you are! Up above the world so high, Like a diamond
# in the sky. Twinkle, twinkle, little star, How I wonder what you are" Output :
# Twinkle, twinkle, little star,
# How I wonder what you are!
# Up above the world so high,
# Like a diamond in the sky.
# Twinkle, twinkle, little star,
# How I wonder what you are
string = """
Twinkle, twinkle, little star,
\t\tUp above the world so high,
\t\tLike a diamond in the sky.
Twinkle, twinkle, little star,
\tHow I wonder what you are
"""
print string<|fim▁end|>
|
# https://www.w3resource.com/python-exercises/
# 1. Write a Python program to print the following string in a specific format (see the output).
|
<|file_name|>rec.rs<|end_file_name|><|fim▁begin|>use super::Fibonacci;
pub struct Recursive;
impl Fibonacci for &Recursive {
fn fib(self, n: u64) -> u64 {
if n == 0 || n == 1 {
1
} else {
self.fib(n - 1) + self.fib(n - 2)
}
}
}
#[cfg(test)]
mod tests {
use super::super::Fibonacci;
use super::Recursive;
macro_rules! fib_test {
($name:ident, $($i:expr, $e:expr),+) => {
#[test]
fn $name() {
let r = Recursive;
$({
let o = r.fib($i);
assert_eq!(o, $e);
})*
}
}
}
<|fim▁hole|>}<|fim▁end|>
|
fib_test!(zero, 0, 1);
fib_test!(one, 1, 1);
fib_test!(two, 2, 2);
fib_test!(three, 3, 3);
|
<|file_name|>cartoonmad.py<|end_file_name|><|fim▁begin|>"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
import re
from urllib.parse import parse_qsl
from cmdlr.analyzer import BaseAnalyzer
from cmdlr.autil import fetch
class Analyzer(BaseAnalyzer):
"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
entry_patterns = [
re.compile(
r'^https?://(?:www.)?cartoonmad.com/comic/(\d+)(?:\.html)?$'
),
]
def entry_normalizer(self, url):
"""Normalize all possible entry url to single one form."""
match = self.entry_patterns[0].search(url)
id = match.group(1)
return 'https://www.cartoonmad.com/comic/{}.html'.format(id)
@staticmethod
def __extract_name(fetch_result):
return fetch_result.soup.title.string.split(' - ')[0]
@staticmethod
def __extract_volumes(fetch_result):
a_tags = (fetch_result.soup
.find('legend', string=re.compile('漫畫線上觀看'))
.parent
.find_all(href=re.compile(r'^/comic/')))
return {a.string: fetch_result.absurl(a.get('href'))
for a in a_tags}
@staticmethod
def __extract_finished(fetch_result):
return (True
if fetch_result.soup.find('img', src='/image/chap9.gif')
else False)
@staticmethod<|fim▁hole|> return (fetch_result.soup
.find('fieldset', id='info').td.get_text().strip())
@staticmethod
def __extract_authors(fetch_result):
return [fetch_result.soup
.find(string=re.compile('作者:'))
.string.split(':')[1].strip()]
async def get_comic_info(self, url, request, **unused):
"""Get comic info."""
fetch_result = await fetch(url, request, encoding='big5')
return {
'name': self.__extract_name(fetch_result),
'volumes': self.__extract_volumes(fetch_result),
'description': self.__extract_description(fetch_result),
'authors': self.__extract_authors(fetch_result),
'finished': self.__extract_finished(fetch_result),
}
@staticmethod
def __get_imgurl_func(soup, absurl):
# print(soup.find('img', src=re.compile('comicpic.asp')))
src = soup.find('img', src=re.compile(r'comicpic.asp'))['src']
abspath, qs_string = absurl(src).split('?', maxsplit=1)
qs = dict(parse_qsl(qs_string))
file_parts = qs['file'].split('/')
file_parts[-1] = '{:0>3}'
qs['file'] = '/'.join(file_parts)
qs_tpl = '&'.join(['{}={}'.format(key, value)
for key, value in qs.items()])
abspath_tpl = '{}?{}'.format(abspath, qs_tpl)
def get_imgurl(page_number):
return abspath_tpl.format(page_number)
return get_imgurl
async def save_volume_images(self, url, request, save_image, **unused):
"""Get all images in one volume."""
soup, absurl = await fetch(url, request, encoding='big5')
get_img_url = self.__get_imgurl_func(soup, absurl)
page_count = len(soup.find_all('option', value=True))
for page_num in range(1, page_count + 1):
save_image(
page_num,
url=get_img_url(page_num),
headers={'Referer': url},
)<|fim▁end|>
|
def __extract_description(fetch_result):
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>//
// main.cpp
// test
//
// Created by 刘金鑫 on 14-3-11.
//
//
#ifndef _WIN32
#include <stdio.h>
#else
#include "stdafx.h"
#include <conio.h>
#endif
#include "../../src/tcpclient.h"
#ifndef _WIN32
#include <termios.h>
#include <unistd.h>
#include <fcntl.h>
int _kbhit(void)
{
struct termios oldt, newt;
int ch;
int oldf;
tcgetattr(STDIN_FILENO, &oldt);
newt = oldt;
newt.c_lflag &= ~(ICANON | ECHO);
tcsetattr(STDIN_FILENO, TCSANOW, &newt);
oldf = fcntl(STDIN_FILENO, F_GETFL, 0);
fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK);
ch = getchar();
tcsetattr(STDIN_FILENO, TCSANOW, &oldt);<|fim▁hole|> return 1;
}
return 0;
}
#define Sleep sleep
#define _getch getchar
#endif
int main(int argc, const char * argv[])
{
boost::shared_ptr<adrianx::tcp_client> client(new adrianx::tcp_client());
client->connect("10.10.10.6",8082);
int key = 0;
for (;;)
{
key = _kbhit();
if(!key)
{
Sleep(1);
continue;
}
key = _getch();
//ESC key
if(key==0x1B)
break;
if(key==' ')
{
std::cout<<" space key down"<<std::endl;
char s[] = "asdfasdfasdf";
client->send((uint8_t*)s,sizeof(s));
}
}
client->close();
return 0;
}<|fim▁end|>
|
fcntl(STDIN_FILENO, F_SETFL, oldf);
if(ch != EOF)
{
ungetc(ch, stdin);
|
<|file_name|>any_object.js<|end_file_name|><|fim▁begin|>// @flow
<|fim▁hole|> let obj: Object = {};
obj.foo = x; // error, taint ~> any
obj[y] = x; // error, taint ~> any
},
// getting a property
function() {
let obj: Object = { foo: 'foo' };
(obj.foo: $Tainted<string>); // ok
},
// calling a method
function(x: $Tainted<string>) {
let obj: Object = {};
obj.foo(x); // error, taint ~> any
let foo = obj.foo;
foo(x); // error, taint ~> any
},
];<|fim▁end|>
|
let tests = [
// setting a property
function(x: $Tainted<string>, y: string) {
|
<|file_name|>api_urls.py<|end_file_name|><|fim▁begin|># ~*~ coding: utf-8 ~*~
from __future__ import unicode_literals
from django.conf.urls import url<|fim▁hole|>app_name = "audits"
router = DefaultRouter()
router.register(r'ftp-log', api.FTPLogViewSet, 'ftp-log')
urlpatterns = [
]
urlpatterns += router.urls<|fim▁end|>
|
from rest_framework.routers import DefaultRouter
from .. import api
|
<|file_name|>application-basic-inline.ts<|end_file_name|><|fim▁begin|>import {Component, NgModule} from '@angular/core';
import {BrowserModule} from '@angular/platform-browser';<|fim▁hole|> template: `<div>Hello!</div>`
})
export class BasicInlineComponent {}
@NgModule({
imports: [BrowserModule],
declarations: [BasicInlineComponent],
bootstrap: [BasicInlineComponent],
})
export class BasicInlineModule {}<|fim▁end|>
|
@Component({
selector: 'application',
|
<|file_name|>teste.py<|end_file_name|><|fim▁begin|>from netfilterqueue import NetfilterQueue
from dpkt import ip, icmp, tcp, udp
from scapy.all import *
import socket<|fim▁hole|>def print_and_accept(pkt):
data=pkt.get_payload()
res = ip.IP(data)
res2 = IP(data)
i = ICMP(data)
t = TCP(data)
u = UDP(data)
print "SOURCE IP: %s\tDESTINATION IP: %s" % (socket.inet_ntoa(res.src),socket.inet_ntoa(res.dst))
print res2.show2()
resp=srp1(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst='192.168.0.34'),iface="eth0",timeout=2)
print resp.dst
eth_dst = resp.src
eth_src = resp.dst
eth = Ether(src=eth_src, dst=eth_dst)
eth.type = 2048
sendp(eth/res2/res2,iface="eth0")
pkt.accept()
nfqueue = NetfilterQueue()
nfqueue.bind(6, print_and_accept)
try:
nfqueue.run()
except KeyboardInterrupt, ex:
print ex<|fim▁end|>
| |
<|file_name|>noop.rs<|end_file_name|><|fim▁begin|>use anyhow::Result;
use crate::models::AuditRecord;
use super::AuditReporter;
use super::Auditor;
use super::AuditorFactory;
/// AuditReporter that drops all records
pub struct NoopReporter {}
impl AuditorFactory for NoopReporter {
fn make(&self) -> Auditor {
Auditor::wrap(NoopReporter {})<|fim▁hole|>#[async_trait::async_trait(?Send)]
impl AuditReporter for NoopReporter {
async fn send(&self, _: AuditRecord) -> Result<()> {
Ok(())
}
}<|fim▁end|>
|
}
}
|
<|file_name|>filemanager.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use crate::channel::*;
use crate::threadpool::*;
use crossbeam_channel::*;
pub enum FileManagerRequests {
ReadAll {
file: String,
sender: Sender<Message>,
},
}
pub struct ReadAllResult {}
pub enum FileManagerRequestsResponses {
Ok,
ReadAllResult(ReadAllResult),
}
pub struct FileManager<'a> {
epoll: libc::c_int,
dispatcher: TypedThreadDispatcher<'a, FileManagerRequests, FileManagerRequestsResponses>,
}
pub struct TempFile {
pub path: std::path::PathBuf,
}
impl TempFile {
pub fn all_equal(path: &str, data: u8) -> std::io::Result<Self> {
use std::io::*;
let _ = std::fs::remove_file(&path);
let mut f = std::fs::File::create(&path)?;
let data = vec![data; 16 * 1024];
f.write_all(&data)?;<|fim▁hole|> f.sync_all()?;
Ok(Self { path: path.into() })
}
pub fn with_callback<F>(path: &str, f: F) -> std::io::Result<Self>
where
F: Fn(usize) -> u8,
{
use std::io::*;
let _ = std::fs::remove_file(&path);
let mut file = std::fs::File::create(&path)?;
let mut data = vec![0u8; 16 * 1024];
for (i, v) in data.iter_mut().enumerate() {
*v = f(i);
}
file.write_all(&data)?;
file.sync_all()?;
Ok(Self { path: path.into() })
}
pub fn random(path: &str, mut size: usize) -> std::io::Result<Self> {
use std::io::*;
let _ = std::fs::remove_file(&path);
let mut file = std::fs::File::create(&path)?;
let mut data = vec![0u8; 4 * 1024];
while size > 0 {
for v in (4 * 1024).min(0) {
*v = rand::random::<u8>()
}
size -= data.len();
file.write_all(&data)?;
}
file.sync_all()?;
Ok(Self { path: path.into() })
}
}
impl Drop for TempFile {
fn drop(&mut self) {
let _ = std::fs::remove_file(self.path.as_path());
}
}
pub fn handle_read_all<P: AsRef<Path>>(file: P, sender: &Sender<Message>) -> ReadAllResult {
let mut path = file.as_ref().to_str().unwrap().to_string();
path.push('\0');
let fd = {
let r = unsafe {
libc::open(
path.as_ptr() as *const i8,
libc::O_RDONLY, /*| libc::O_NONBLOCK*/
)
};
if r < 0 {
let err = errno::errno();
eprintln!("{}", err);
}
// let flags = unsafe { libc::fcntl(r, libc::F_GETFL, 0) };
// let _rcontrol = unsafe { libc::fcntl(r, libc::F_SETFL, flags | libc::O_NONBLOCK) };
r
};
// let _r = unsafe {
// libc::posix_fadvise(fd, 0, 0, libc::POSIX_FADV_NORMAL | libc::POSIX_FADV_NOREUSE)
// };
let mut offset = 0;
loop {
let mut buffer = Buffer::all_zero(4 * 1024);
buffer.size = unsafe {
let r = libc::pread(
fd,
buffer.data.as_mut_ptr() as *mut libc::c_void,
buffer.size,
offset,
);
if r == 0 {
break;
}
if r < 0 {
let err = errno::errno();
eprintln!("{}", err); // TODO
break;
}
r as usize
};
offset += buffer.size as i64;
let _ = sender.send(Message::Buffer(buffer));
}
let _ = sender.send(Message::Eof);
unsafe { libc::close(fd) };
ReadAllResult {}
}
impl<'a> FileManager<'a> {
pub fn new(pool: &mut Threadpool<'a>) -> Self {
let dispatcher = pool.new_dispatcher(move |request| match request {
FileManagerRequests::ReadAll { file, sender } => {
FileManagerRequestsResponses::ReadAllResult(handle_read_all(file, sender))
}
});
let epoll = {
let r = unsafe { libc::epoll_create1(0) };
if r < 0 {
let err = errno::errno();
eprintln!("{}", err); //TODO
}
r
};
Self { dispatcher, epoll }
}
fn send(&mut self, req: FileManagerRequests) -> RunResult<FileManagerRequestsResponses> {
self.dispatcher.send(req)
}
pub fn read_all(
&mut self,
file: &str,
sender: Sender<Message>,
) -> std::result::Result<
ReceiverFutureMap<FileManagerRequestsResponses, ReadAllResult>,
ThreadpoolRunError,
> {
let future = self
.send(FileManagerRequests::ReadAll {
file: file.to_string(),
sender,
})?
.map(|x| {
if let FileManagerRequestsResponses::ReadAllResult(r) = x {
r
} else {
panic!("unexpected result")
}
});
Ok(future)
}
}
impl<'a> Drop for FileManager<'a> {
fn drop(&mut self) {
if self.epoll > 0 {
unsafe { libc::close(self.epoll) };
}
}
}
#[cfg(test)]
mod tests {
use crate::threadpool::Threadpool;
use crossbeam_channel::*;
#[test]
fn read_all() {
let file =
super::TempFile::all_equal(".test.read_all", 1).expect("Cannot create temo file");
let mut pool = Threadpool::with_qty(1).expect("Cannot create Threadpool");
let mut mgr = super::FileManager::new(&mut pool);
let (sender, receiver) = bounded(4);
let readl_all_result = mgr
.read_all(file.path.to_str().unwrap(), 1, sender)
.expect("Cannot read file");
for _ in 0..4 {
if let Ok(crate::channel::Message::Buffer(buffer, next)) =
receiver.recv_timeout(std::time::Duration::from_secs(1))
{
testlib::assert!(next == 1);
testlib::assert!(buffer.data.len() == 4096);
testlib::assert!(buffer.data.iter().all(|x| *x == 1u8));
}
}
readl_all_result
.wait(std::time::Duration::from_secs(1))
.expect("Read all timeout");
}
}<|fim▁end|>
| |
<|file_name|>test_save.py<|end_file_name|><|fim▁begin|>import pytest
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy import io
from mitmproxy import exceptions
from mitmproxy.addons import save
from mitmproxy.addons import view
def test_configure(tmpdir):
sa = save.Save()
with taddons.context(sa) as tctx:
with pytest.raises(exceptions.OptionsError):
tctx.configure(sa, save_stream_file=str(tmpdir))
with pytest.raises(Exception, match="Invalid filter"):
tctx.configure(
sa, save_stream_file=str(tmpdir.join("foo")), save_stream_filter="~~"
)
tctx.configure(sa, save_stream_filter="foo")
assert sa.filt
tctx.configure(sa, save_stream_filter=None)
assert not sa.filt
def rd(p):
with open(p, "rb") as f:
x = io.FlowReader(f)
return list(x.stream())
def test_tcp(tmpdir):
sa = save.Save()
with taddons.context(sa) as tctx:
p = str(tmpdir.join("foo"))
tctx.configure(sa, save_stream_file=p)
tt = tflow.ttcpflow()
sa.tcp_start(tt)
sa.tcp_end(tt)
tt = tflow.ttcpflow()
sa.tcp_start(tt)
sa.tcp_error(tt)
tctx.configure(sa, save_stream_file=None)
assert len(rd(p)) == 2
def test_websocket(tmpdir):
sa = save.Save()
with taddons.context(sa) as tctx:
p = str(tmpdir.join("foo"))
tctx.configure(sa, save_stream_file=p)
f = tflow.twebsocketflow()
sa.request(f)
sa.websocket_end(f)
f = tflow.twebsocketflow()
sa.request(f)
sa.websocket_end(f)
tctx.configure(sa, save_stream_file=None)
assert len(rd(p)) == 2
def test_save_command(tmpdir):
sa = save.Save()
with taddons.context() as tctx:
p = str(tmpdir.join("foo"))
sa.save([tflow.tflow(resp=True)], p)
assert len(rd(p)) == 1
sa.save([tflow.tflow(resp=True)], p)
assert len(rd(p)) == 1
sa.save([tflow.tflow(resp=True)], "+" + p)
assert len(rd(p)) == 2
with pytest.raises(exceptions.CommandError):
sa.save([tflow.tflow(resp=True)], str(tmpdir))
v = view.View()
tctx.master.addons.add(v)
tctx.master.addons.add(sa)
tctx.master.commands.execute("save.file @shown %s" % p)
def test_simple(tmpdir):
sa = save.Save()
with taddons.context(sa) as tctx:
p = str(tmpdir.join("foo"))
tctx.configure(sa, save_stream_file=p)
f = tflow.tflow(resp=True)
sa.request(f)
sa.response(f)
tctx.configure(sa, save_stream_file=None)
assert rd(p)[0].response
tctx.configure(sa, save_stream_file="+" + p)
f = tflow.tflow(err=True)
sa.request(f)
sa.error(f)
tctx.configure(sa, save_stream_file=None)
assert rd(p)[1].error<|fim▁hole|>
tctx.configure(sa, save_stream_file="+" + p)
f = tflow.tflow()
sa.request(f)
tctx.configure(sa, save_stream_file=None)
assert not rd(p)[2].response<|fim▁end|>
| |
<|file_name|>issue-33569.rs<|end_file_name|><|fim▁begin|>macro_rules! foo {
{ $+ } => { //~ ERROR expected identifier, found `+`<|fim▁hole|> }
}
foo!();
fn main() {}<|fim▁end|>
|
//~^ ERROR missing fragment specifier
$(x)(y) //~ ERROR expected one of: `*`, `+`, or `?`
|
<|file_name|>LiveHTMLVisitorController.ts<|end_file_name|><|fim▁begin|>import AbstractServiceController from "../../Base/AbstractServiceController";
import MessageChannelUtil from "../../Base/Util/MessageChannelUtil";
import { LiveHTMLVisitorView } from "./LiveHTMLVisitorView";
import LiveHTMLVisitorModel from "./LiveHTMLVisitorModel";
import { LiveHTMLVisitorReceiver } from "./LiveHTMLVisitorReceiver";
import GetLiveHTMLSender from "../../Contents/Sender/GetLiveHTMLSender";
import GetCastSettingSedner from "../../Contents/Sender/GetCastSettingSedner";
export default class LiveHTMLVisitorController extends AbstractServiceController<LiveHTMLVisitorView, LiveHTMLVisitorModel> {
public ControllerName(): string { return "LiveHTMLVisitor"; }
public View: LiveHTMLVisitorView;
/**
* コンストラクタ
*/
constructor() {
super();
this.Receiver = new LiveHTMLVisitorReceiver(this);
};
/**
* 自身のPeer生成時イベント
*/
public OnPeerOpen(peer: PeerJs.Peer) {
this.View = new LiveHTMLVisitorView(this, () => {
//
});
}
public OnPeerClose() {
MessageChannelUtil.RemoveChild(this.SwPeer.PeerId);
}
// Peerエラー
public OnPeerError(err: Error) {
document.getElementById('sbj-message-layer').hidden = false;
document.getElementById('sbj-subtitle-message').textContent = "接続に失敗、またはLiveHTMLは終了しています";
}
/**
* オーナー接続時イベント
*/
public OnOwnerConnection() {
// キャスト情報の要求
this.SwPeer.SendToOwner(new GetCastSettingSedner());
// カーソル表示の初期化はOwnerとの接続後に開始する。
this.View.InitializeCursor();
}
/**
* オーナー側が切断した場合
*/
public OnOwnerClose() {
document.getElementById('sbj-message-layer').hidden = false;
if (document.getElementById('sbj-subtitle-message').textContent.trim().length === 0) {
document.getElementById('sbj-subtitle-message').textContent = "LiveHTMLは終了しました";
}
}
/**
*
* @param conn
*/
public OnDataConnectionOpen(conn: PeerJs.DataConnection) {
super.OnDataConnectionOpen(conn);
}
/**
*
* @param conn
*/<|fim▁hole|> super.OnDataConnectionClose(conn);
this.View.Cursor.Remove(conn.remoteId);
}
};<|fim▁end|>
|
public OnDataConnectionClose(conn: PeerJs.DataConnection) {
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var today = new Date();
var birthday = new Date(1981, 1, 16);
var age = today.getTime() - birthday.getTime();
alert(age);
// alert(age / 1000 / 60 / 60 / 24/ 365.25);<|fim▁end|>
| |
<|file_name|>yr-download-tips.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
export default Ember.Component.extend({
tagName: '',
didInsertElement: function() {
this.startPoppover();
},
willDestroyElement: function() {
this.dismissPoppover();
},
startPoppover: function() {
var options = this.getPoppoverOptions();
Ember.$(function() {
Ember.$('[data-toggle="popover"]').popover(options);
});
},
getPoppoverOptions: function() {
var template = Ember.$('.poppover-template').innerHTML;
var content = Ember.$('.poppover-content').html();
return {
template: template,
placement: 'right',
title: 'Download Tip',<|fim▁hole|> html: true
};
},
dismissPoppover: function() {
Ember.$('[data-toggle="popover"]').popover('hide');
}
});<|fim▁end|>
|
trigger: 'hover',
content: content,
|
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Amrvis(MakefilePackage):
"""Amrvis is a visualization package specifically designed to
read and display output and profiling data from codes built
on the AMReX framework.
"""
homepage = "https://github.com/AMReX-Codes/Amrvis"
git = "https://github.com/AMReX-Codes/Amrvis.git"
version('main', tag='main')
variant(
'dims',
default='3',
values=('1', '2', '3'),
multi=False,
description='Number of spatial dimensions'
)
variant(
'prec',
default='DOUBLE',
values=('FLOAT', 'DOUBLE'),
multi=False,
description='Floating point precision'
)
variant('mpi', default=True, description='Enable MPI parallel support')
variant('debug', default=False, description='Enable debugging features')
variant('profiling', default=False,
description='Enable AMReX profiling features')
depends_on('gmake', type='build')
depends_on('mpi', when='+mpi')
depends_on('libsm')
depends_on('libice')
depends_on('libxpm')
depends_on('libx11')
depends_on('libxt')
depends_on('libxext')
depends_on('motif')
depends_on('flex')
depends_on('bison')
conflicts(
'+profiling', when='dims=1',
msg='Amrvis profiling support requires a 2D build'
)
conflicts(
'+profiling', when='dims=3',
msg='Amrvis profiling support requires a 2D build'
)
# Only doing gcc and clang at the moment.
# Intel currently fails searching for mpiicc, mpiicpc, etc.
for comp in ['%intel', '%cce', '%nag', '%pgi', '%xl', '%xl_r']:
conflicts(
comp,
msg='Amrvis currently only builds with gcc and clang'
)
# Need to clone AMReX into Amrvis because Amrvis uses AMReX's source
resource(name='amrex',
git='https://github.com/AMReX-Codes/amrex.git',
tag='development',
placement='amrex')
def edit(self, spec, prefix):
# libquadmath is only available x86_64 and powerle
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85440
if self.spec.target.family not in ['x86_64', 'ppc64le']:
comps = join_path('amrex', 'Tools', 'GNUMake', 'comps')
maks = [
join_path(comps, 'gnu.mak'),
join_path(comps, 'llvm.mak'),
]
for mak in maks:
filter_file('-lquadmath', '', mak)
# Set all available makefile options to values we want
makefile = FileFilter('GNUmakefile')
makefile.filter(
r'^AMREX_HOME\s*\?=.*',
'AMREX_HOME = {0}'.format('./amrex')
)
makefile.filter(
r'^PRECISION\s*=.*',
'PRECISION = {0}'.format(spec.variants['prec'].value)
)
makefile.filter(
r'^DIM\s*=.*',
'DIM = {0}'.format(spec.variants['dims'].value)
)
makefile.filter(
r'^PROFILE\s*=.*',
'PROFILE = {0}'.format(
spec.variants['profiling'].value
).upper()
)
makefile.filter(
r'^TRACE_PROFILE\s*=.*',
'TRACE_PROFILE = {0}'.format(
spec.variants['profiling'].value
).upper()
)
makefile.filter(
r'^COMM_PROFILE\s*=.*',
'COMM_PROFILE = {0}'.format(
spec.variants['profiling'].value
).upper()
)
makefile.filter(
r'^COMP\s*=.*',
'COMP = {0}'.format(self.compiler.name)
)
makefile.filter(
r'^DEBUG\s*=.*',
'DEBUG = {0}'.format(spec.variants['debug'].value).upper()
)
makefile.filter(
r'^USE_ARRAYVIEW\s*=.*',
'USE_ARRAY_VIEW = FALSE'
)
makefile.filter(
r'^USE_MPI\s*=.*',
'USE_MPI = {0}'.format(spec.variants['mpi'].value).upper()
)
makefile.filter(
r'^USE_CXX11\s*=.*',
'USE_CXX11 = TRUE'
)
makefile.filter(
r'^USE_VOLRENDER\s*=.*',
'USE_VOLRENDER = FALSE'
)
makefile.filter(
r'^USE_PARALLELVOLRENDER\s*=.*',
'USE_PARALLELVOLRENDER = FALSE'
)
makefile.filter(
r'^USE_PROFPARSER\s*=.*',
'USE_PROFPARSER = {0}'.format(
spec.variants['profiling'].value
).upper()
)
# A bit risky here deleting all /usr and /opt X
# library default search paths in makefile
makefile.filter(
r'^.*\b(usr|opt)\b.*$',
'# Spack removed INCLUDE_LOCATIONS and LIBRARY_LOCATIONS'
)<|fim▁hole|>
# Read GNUmakefile into array
with open('GNUmakefile', 'r') as file:
contents = file.readlines()
# Edit GNUmakefile includes and libraries to point to Spack
# dependencies.
# The safest bet is to put the LIBRARY_LOCATIONS and
# INCLUDE_LOCATIONS at the beginning of the makefile.
line_offset = 0
count = 0
for lib in ['libsm', 'libice', 'libxpm', 'libx11',
'libxt', 'libxext', 'motif']:
contents.insert(
line_offset + count,
'LIBRARY_LOCATIONS += {0}\n'.format(spec[lib].prefix.lib)
)
contents.insert(
line_offset + count + 1,
'INCLUDE_LOCATIONS += {0}\n'.format(spec[lib].prefix.include)
)
count += 1
# Write GNUmakefile
with open('GNUmakefile', 'w') as file:
file.writelines(contents)
def setup_build_environment(self, env):
# We don't want an AMREX_HOME the user may have set already
env.unset('AMREX_HOME')
# Help force Amrvis to not pick up random system compilers
if '+mpi' in self.spec:
env.set('MPI_HOME', self.spec['mpi'].prefix)
env.set('CC', self.spec['mpi'].mpicc)
env.set('CXX', self.spec['mpi'].mpicxx)
env.set('F77', self.spec['mpi'].mpif77)
env.set('FC', self.spec['mpi'].mpifc)
def install(self, spec, prefix):
# Install exe manually
mkdirp(prefix.bin)
install('*.ex', prefix.bin)<|fim▁end|>
| |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from node import models
from django.forms import ModelForm
from . import cdmsportalfunc as cpf
from django.core.exceptions import ValidationError
from django import forms
class MoleculeForm(ModelForm):
class Meta:
model = models.Molecules
fields = '__all__'
class SpecieForm(ModelForm):
datearchived = forms.DateField(
widget=forms.TextInput(attrs={'readonly': 'readonly'})
)
dateactivated = forms.DateField(<|fim▁hole|> )
class Meta:
model = models.Species
fields = '__all__'
class FilterForm(ModelForm):
class Meta:
model = models.QuantumNumbersFilter
fields = '__all__'
class XsamsConversionForm(forms.Form):
inurl = forms.URLField(
label='Input URL',
required=False,
widget=forms.TextInput(
attrs={'size': 50,
'title': 'Paste here a URL that delivers an XSAMS '
'document.',
}))
infile = forms.FileField()
format = forms.ChoiceField(
choices=[("RAD 3D", "RAD 3D"), ("CSV", "CSV")], )
def clean(self):
infile = self.cleaned_data.get('infile')
inurl = self.cleaned_data.get('inurl')
if (infile and inurl):
raise ValidationError('Give either input file or URL!')
if inurl:
try:
data = cpf.urlopen(inurl)
except Exception as err:
raise ValidationError('Could not open given URL: %s' % err)
elif infile:
data = infile
else:
raise ValidationError('Give either input file or URL!')
try:
self.cleaned_data['result'] = cpf.applyStylesheet2File(data)
except Exception as err:
raise ValidationError('Could not transform XML file: %s' % err)
return self.cleaned_data<|fim▁end|>
|
widget=forms.TextInput(attrs={'readonly': 'readonly'})
|
<|file_name|>difficulty_test_util.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package tests
import (
"fmt"
"math/big"
"testing"
"github.com/ethereumproject/go-ethereum/common"
"github.com/ethereumproject/go-ethereum/common/hexutil"
"github.com/ethereumproject/go-ethereum/core"
"github.com/ethereumproject/go-ethereum/core/types"
"github.com/ethereumproject/go-ethereum/params"
)
// DifficultyTest is the structure of JSON from test files
type DifficultyTest struct {
ParentTimestamp string `json:"parentTimestamp"`
ParentDifficulty string `json:"parentDifficulty"`
UncleHash common.Hash `json:"parentUncles"`
CurrentTimestamp string `json:"currentTimestamp"`
CurrentBlockNumber string `json:"currentBlockNumber"`
CurrentDifficulty string `json:"currentDifficulty"`
}
func (test *DifficultyTest) runDifficulty(t *testing.T, config *core.ChainConfig) error {
currentNumber, _ := hexutil.HexOrDecimalToBigInt(test.CurrentBlockNumber)
parentNumber := new(big.Int).Sub(currentNumber, big.NewInt(1))
parentTimestamp, _ := hexutil.HexOrDecimalToBigInt(test.ParentTimestamp)
parentDifficulty, _ := hexutil.HexOrDecimalToBigInt(test.ParentDifficulty)
currentTimestamp, _ := hexutil.HexOrDecimalToUint64(test.CurrentTimestamp)
parent := &types.Header{
Number: parentNumber,
Time: parentTimestamp,
Difficulty: parentDifficulty,
UncleHash: test.UncleHash,
}
// Check to make sure difficulty is above minimum
if parentDifficulty.Cmp(params.MinimumDifficulty) < 0 {
t.Skip("difficulty below minimum")
return nil
}
<|fim▁hole|> exp, _ := hexutil.HexOrDecimalToBigInt(test.CurrentDifficulty)
if actual.Cmp(exp) != 0 {
return fmt.Errorf("parent[time %v diff %v unclehash:%x] child[time %v number %v] diff %v != expected %v",
test.ParentTimestamp, test.ParentDifficulty, test.UncleHash,
test.CurrentTimestamp, test.CurrentBlockNumber, actual, exp)
}
return nil
}<|fim▁end|>
|
actual := core.CalcDifficulty(config, currentTimestamp, parent)
|
<|file_name|>cast-rfc0401-vtable-kinds.rs<|end_file_name|><|fim▁begin|>// Check that you can cast between different pointers to trait objects
// whose vtable have the same kind (both lengths, or both trait pointers).
trait Foo<T> {
fn foo(&self, _: T) -> u32 { 42 }
}
trait Bar {
fn bar(&self) { println!("Bar!"); }
}
impl<T> Foo<T> for () {}
impl Foo<u32> for u32 { fn foo(&self, _: u32) -> u32 { self+43 } }
impl Bar for () {}
unsafe fn round_trip_and_call<'a>(t: *const (dyn Foo<u32>+'a)) -> u32 {
let foo_e : *const dyn Foo<u16> = t as *const _;
let r_1 = foo_e as *mut dyn Foo<u32>;
(&*r_1).foo(0)
}<|fim▁hole|>
#[repr(C)]
struct FooS<T:?Sized>(T);
#[repr(C)]
struct BarS<T:?Sized>(T);
fn foo_to_bar<T:?Sized>(u: *const FooS<T>) -> *const BarS<T> {
u as *const BarS<T>
}
fn main() {
let x = 4u32;
let y : &dyn Foo<u32> = &x;
let fl = unsafe { round_trip_and_call(y as *const dyn Foo<u32>) };
assert_eq!(fl, (43+4));
let s = FooS([0,1,2]);
let u: &FooS<[u32]> = &s;
let u: *const FooS<[u32]> = u;
let bar_ref : *const BarS<[u32]> = foo_to_bar(u);
let z : &BarS<[u32]> = unsafe{&*bar_ref};
assert_eq!(&z.0, &[0,1,2]);
// If validation fails here, that's likely because an immutable suspension is recovered mutably.
}<|fim▁end|>
| |
<|file_name|>scim.component.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, OnInit} from '@angular/core';
import {SnackbarService} from "../../../services/snackbar.service";
import {ActivatedRoute, Router} from "@angular/router";
import {AppConfig} from "../../../../config/app.config";
import {DomainService} from "../../../services/domain.service";
import {AuthService} from "../../../services/auth.service";
@Component({
selector: 'app-scim',
templateUrl: './scim.component.html',
styleUrls: ['./scim.component.scss']
})
export class ScimComponent implements OnInit {
domainId: string;
domain: any = {};
formChanged = false;
editMode: boolean;
constructor(private domainService: DomainService,
private snackbarService: SnackbarService,
private authService: AuthService,
private route: ActivatedRoute,
private router: Router) {
}
ngOnInit() {
this.domain = this.route.snapshot.data['domain'];
this.domainId = this.domain.id;
this.editMode = this.authService.hasPermissions(['domain_scim_update']);
}
<|fim▁hole|> this.domainService.patchScimSettings(this.domainId, this.domain).subscribe(data => {
this.domain = data;
this.formChanged = false;
this.snackbarService.open('SCIM configuration updated');
});
}
enableSCIM(event) {
this.domain.scim = { 'enabled': (event.checked) };
this.formChanged = true;
}
isSCIMEnabled() {
return this.domain.scim && this.domain.scim.enabled;
}
}<|fim▁end|>
|
save() {
|
<|file_name|>v3.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"unicode/utf8"
"github.com/coreos/etcd/clientv3"
"golang.org/x/net/context"
)
const (
// constant vendor
// copy from https://github.com/coreos/etcd/blob/80d15948bcfc93aabd2c5245d7993c8a9e76bf8f/internal/mvcc/mvccpb/kv.pb.go
//PUT Event_EventType = 0
//DELETE Event_EventType = 1
ETCD_EVENT_PUT = 0
ETCD_EVENT_DELETE = 1
)
func etcdMon_v3(prefix string, c3 *clientv3.Client, bus chan fileChangeEvent, startRevision int64) {
key, option := prefixToKeyOption(prefix)
ch := c3.Watch(context.Background(), key, option, clientv3.WithRev(startRevision))
for chEvent := range ch {
for _, event := range chEvent.Events {
fileEvent := fileChangeEvent{
Path: string(event.Kv.Key),
Content: event.Kv.Value,
}
event.IsCreate()
switch int(event.Type) {
case ETCD_EVENT_PUT:
bus <- fileEvent
case ETCD_EVENT_DELETE:
fileEvent.IsRemoved = true
bus <- fileEvent
default:
log.Println("etcdMon_v3 undefined event type: ", event.Type)
}
}
}
close(bus)
}
/*
Sync localdir to etcd server state.
WARNING: ALL CONTENT OF localdir WILL BE LOST
Return revision of synced state
*/
func firstSyncEtcDir_v3(prefix string, c *clientv3.Client, localdir string) int64 {
cleanDir(localdir)
key, option := prefixToKeyOption(prefix)
// Get all values
resp, err := c.Get(context.Background(), key, option, clientv3.WithSort(clientv3.SortByKey, clientv3.SortDescend))
if err != nil {
panic(err)
}
for _, kv := range resp.Kvs {
targetPath := keyToLocalPath(strings.TrimPrefix(string(kv.Key), prefix), localdir)
if targetPath == "" {
continue
}
targetDir := filepath.Dir(targetPath)
os.MkdirAll(targetDir, DEFAULT_DIRMODE)
err = ioutil.WriteFile(targetPath, kv.Value, DEFAULT_FILEMODE)
if err != nil {
log.Printf("firstSyncEtcDir_v3 error write file '%v': %v\n", targetPath, err)
}
}
return resp.Header.Revision
}
/*
return path to key in localdir.
Return "" if error.
*/
func keyToLocalPath(key string, localdir string) string {
if !utf8.ValidString(key) {
log.Printf("Key skip, becouse it isn't valid utf8: %x\n", key)
return ""
}
targetPath := filepath.Clean(filepath.Join(localdir, key))
if !strings.HasPrefix(targetPath, localdir+string(filepath.Separator)) {
log.Printf("Key skip, becouse it out of base directory. Key: '%s', TargetPath: '%s'\n, base: '%s'\n", key, targetPath, localdir)
return ""
}
return targetPath
}
func prefixToKeyOption(prefix string) (key string, option clientv3.OpOption) {
if prefix == "" {
key = "\x00"
option = clientv3.WithFromKey()
} else {
key = prefix
option = clientv3.WithPrefix()
}
return key, option
}
func syncProcess_v3(localDir string, serverPrefix string, c3 *clientv3.Client, etcdChan, fsChan <-chan fileChangeEvent) {
fsMarkFile := filepath.Join(localDir, MARK_FILE_NAME)
for {
select {
case event := <-etcdChan:
filePath := keyToLocalPath(strings.TrimPrefix(event.Path, serverPrefix), localDir)
if filePath == "" || filePath == fsMarkFile {
continue
}
if event.IsRemoved {
os.RemoveAll(filePath)
//fmt.Println("Remove: " + filePath)
} else {
fileContent, err := ioutil.ReadFile(event.Path)
if err == nil && bytes.Equal(fileContent, event.Content) {
continue
}
dirName := filepath.Dir(event.Path)
os.MkdirAll(dirName, DEFAULT_DIRMODE)
err = ioutil.WriteFile(filePath, event.Content, DEFAULT_FILEMODE)
if err != nil {
log.Printf("syncProcess_v3 error while put file '%v': %v\n", event.Path, err)
}
}
case event := <-fsChan:
if event.Path == fsMarkFile {
continue
}
syncProcess_v3FSEvent(localDir, serverPrefix, c3, event)
}
}<|fim▁hole|> if err != nil {
log.Printf("syncProcess_v3 error get relpath '%v': %v\n", event.Path, err)
return
}
etcdPath = serverPrefix + etcdPath
etcdPath = strings.Replace(etcdPath, string(os.PathSeparator), "/", -1)
switch {
case event.IsRemoved:
_, err := c3.Delete(context.Background(), etcdPath)
if err != nil {
log.Printf("syncProcess_v3 error while delete etcdkey '%v': %v\n", etcdPath, err)
}
case event.IsDir:
files, _ := ioutil.ReadDir(event.Path)
for _, file := range files {
path := filepath.Join(event.Path, file.Name())
content := []byte(nil)
if !file.IsDir() {
content, err = ioutil.ReadFile(path)
if err != nil {
log.Println(err)
}
}
syncProcess_v3FSEvent(localDir, serverPrefix, c3, fileChangeEvent{
Path: path,
IsDir: file.IsDir(),
IsRemoved: false,
Content: content,
})
}
case !event.IsDir:
resp, err := c3.Get(context.Background(), etcdPath)
if err != nil {
log.Printf("syncProcess_v3 Can't read key '%v': %v\n", etcdPath, err)
}
if len(resp.Kvs) > 0 {
if bytes.Equal(resp.Kvs[0].Value, event.Content) {
return
}
}
_, err = c3.Put(context.Background(), etcdPath, string(event.Content))
if err != nil {
log.Printf("syncProcess_v3 error while put etcdkey '%v': %v\n", etcdPath, err)
}
}
}<|fim▁end|>
|
}
func syncProcess_v3FSEvent(localDir string, serverPrefix string, c3 *clientv3.Client, event fileChangeEvent) {
etcdPath, err := filepath.Rel(localDir, event.Path)
|
<|file_name|>PlaceholderId.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spectator.placeholders;
import com.netflix.spectator.api.Id;
import com.netflix.spectator.api.Tag;
import java.util.Map;
/**
* An extension of the {@link Id} interface that allows the list of tag names attached
* to the Id to be declared in advance of the use of the metric. This can be used to
* provide a default value for a tag or to use a TagFactory implementation that uses
* context available in the execution environment to compute the value of the tag.
*/
public interface PlaceholderId {
/** Description of the measurement that is being collected. */
String name();
/** New id with an additional tag value. */
PlaceholderId withTag(String k, String v);
/** New id with an additional tag value. */
PlaceholderId withTag(Tag t);
/** New id with additional tag values. */
PlaceholderId withTags(Iterable<Tag> tags);
/** New id with additional tag values. */
PlaceholderId withTags(Map<String, String> tags);
/**
* New id with an additional tag factory.
* @param factory
* the factory to use to generate the values for the tag
*/
PlaceholderId withTagFactory(TagFactory factory);
/**
* New id with additional tag factories.
* @param factories
* a collection of factories for producing values for the tags
*/
PlaceholderId withTagFactories(Iterable<TagFactory> factories);
<|fim▁hole|> * Tag associated with it.
*
* @return an Id that has the same name as this id and the resolved tag values attached
*/
Id resolveToId();
}<|fim▁end|>
|
/**
* Invokes each of the associated tag factories to produce a Id based on the
* runtime context available when this method is invoked. If an associated
* TagFactory produces a non-null Tag, then the returned Id will have that
|
<|file_name|>case.test.js<|end_file_name|><|fim▁begin|>import assert from 'assert'
import {fixCase} from '../../src/lib/words/case'
import Locale from '../../src/locale/locale'
describe('Corrects accidental uPPERCASE\n', () => {
let testCase = {
'Hey, JEnnifer!': 'Hey, Jennifer!',
'CMSko': 'CMSko',
'FPs': 'FPs',
'ČSNka': 'ČSNka',
'BigONE': 'BigONE', // specific brand names
'two Panzer IVs': 'two Panzer IVs',<|fim▁hole|> 'iPhone': 'iPhone',
'iT': 'it',
'Central Europe and Cyrillic tests: aĎIÉUБUГ': 'Central Europe and Cyrillic tests: aďiéuбuг',
}
Object.keys(testCase).forEach((key) => {
it('', () => {
assert.equal(fixCase(key, new Locale('en-us')), testCase[key])
})
})
})<|fim▁end|>
|
'How about ABC?': 'How about ABC?',
'cAPSLOCK': 'capslock',
'(cAPSLOCK)': '(capslock)',
|
<|file_name|>class-method-cross-crate.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_2.rs
extern crate cci_class_2;
use cci_class_2::kitties::cat;
pub fn main() {
let nyan : cat = cat(52, 99);
let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);<|fim▁hole|> nyan.speak();
}<|fim▁end|>
|
assert_eq!(kitty.how_hungry, 2);
|
<|file_name|>auth_backends.py<|end_file_name|><|fim▁begin|>from __future__ import division,print_function,unicode_literals,with_statement
import logging
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User=get_user_model()
class EmailBackend(ModelBackend):
def authenticate(self,username=None,password=None,**kwargs):
"""
"username" being passed is really email address and being compared to as such.<|fim▁hole|> """
try:
user=User.objects.get(email=username)
if user.check_password(password):
return user
except (User.DoesNotExist,User.MultipleObjectsReturned):
logging.warning('Unsuccessful login attempt using username/email: {0}'.format(username))
return None<|fim▁end|>
| |
<|file_name|>interlis_model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics):
self.name = sanitize_name(name)
self.topics = topics
def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
result += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
def __init__(self, name, tables):<|fim▁hole|>
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())])<|fim▁end|>
|
self.name = sanitize_name(name)
self.tables = tables
|
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
from mongoengine.python_support import txt_type
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
'ValidationError')
class NotRegistered(Exception):
pass
class InvalidDocumentError(Exception):
pass
class LookUpError(AttributeError):
pass
class DoesNotExist(Exception):
pass
class MultipleObjectsReturned(Exception):
pass
class InvalidQueryError(Exception):
pass
class OperationError(Exception):
pass
class NotUniqueError(OperationError):
pass
class FieldDoesNotExist(Exception):
"""Raised when trying to set a field
not declared in a :class:`~mongoengine.Document`
or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading,
you should the :attr:`strict` to ``False``
in the :attr:`meta` dictionnary.
"""
class ValidationError(AssertionError):
"""Validation exception.
May represent an error validating a field or a
document containing fields with validation errors.
:ivar errors: A dictionary of errors for fields within this
document or list, or None if the error is for an
individual field.
"""
errors = {}
field_name = None
_message = None
def __init__(self, message="", **kwargs):
self.errors = kwargs.get('errors', {})
self.field_name = kwargs.get('field_name')
self.message = message
def __str__(self):
return txt_type(self.message)
def __repr__(self):
return '%s(%s,)' % (self.__class__.__name__, self.message)
def __getattribute__(self, name):
message = super(ValidationError, self).__getattribute__(name)
if name == 'message':
if self.field_name:
message = '%s' % message
if self.errors:
message = '%s(%s)' % (message, self._format_errors())
return message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def to_dict(self):
"""Returns a dictionary of all errors within a document
Keys are field names or list indices and values are the
validation error messages, or a nested dictionary of
errors for an embedded document or list.
"""
def build_dict(source):
errors_dict = {}
if not source:
return errors_dict
if isinstance(source, dict):
for field_name, error in source.iteritems():
errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors)
else:
return unicode(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
def _format_errors(self):
"""Returns a string listing all errors within a document"""
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])
results = "%s.%s" % (prefix, value) if prefix else value
return results
error_dict = defaultdict(list)<|fim▁hole|><|fim▁end|>
|
for k, v in self.to_dict().iteritems():
error_dict[generate_key(v)].append(k)
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function
import numpy as np
import warnings
def _bit_length_26(x):
if x == 0:
return 0
elif x == 1:
return 1
else:
return len(bin(x)) - 2
try:
from scipy.lib._version import NumpyVersion
except ImportError:
import re
string_types = basestring
class NumpyVersion():
"""Parse and compare numpy version strings.
Numpy has the following versioning scheme (numbers given are examples; they
can be >9) in principle):
- Released version: '1.8.0', '1.8.1', etc.
- Alpha: '1.8.0a1', '1.8.0a2', etc.
- Beta: '1.8.0b1', '1.8.0b2', etc.
- Release candidates: '1.8.0rc1', '1.8.0rc2', etc.
- Development versions: '1.8.0.dev-f1234afa' (git commit hash appended)
- Development versions after a1: '1.8.0a1.dev-f1234afa',
'1.8.0b2.dev-f1234afa',
'1.8.1rc1.dev-f1234afa', etc.
- Development versions (no git hash available): '1.8.0.dev-Unknown'
Comparing needs to be done against a valid version string or other
`NumpyVersion` instance.
Parameters
----------
vstring : str
Numpy version string (``np.__version__``).
Notes
-----
All dev versions of the same (pre-)release compare equal.
Examples
--------
>>> from scipy.lib._version import NumpyVersion
>>> if NumpyVersion(np.__version__) < '1.7.0':
... print('skip')
skip
>>> NumpyVersion('1.7') # raises ValueError, add ".0"
"""
def __init__(self, vstring):
self.vstring = vstring
ver_main = re.match(r'\d[.]\d+[.]\d+', vstring)
if not ver_main:
raise ValueError("Not a valid numpy version string")
self.version = ver_main.group()
self.major, self.minor, self.bugfix = [int(x) for x in
self.version.split('.')]
if len(vstring) == ver_main.end():
self.pre_release = 'final'
else:
alpha = re.match(r'a\d', vstring[ver_main.end():])
beta = re.match(r'b\d', vstring[ver_main.end():])
rc = re.match(r'rc\d', vstring[ver_main.end():])
pre_rel = [m for m in [alpha, beta, rc] if m is not None]
if pre_rel:
self.pre_release = pre_rel[0].group()
else:
self.pre_release = ''
self.is_devversion = bool(re.search(r'.dev-', vstring))
def _compare_version(self, other):
"""Compare major.minor.bugfix"""
if self.major == other.major:
if self.minor == other.minor:
if self.bugfix == other.bugfix:
vercmp = 0
elif self.bugfix > other.bugfix:
vercmp = 1
else:
vercmp = -1
elif self.minor > other.minor:
vercmp = 1
else:
vercmp = -1
elif self.major > other.major:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare_pre_release(self, other):
"""Compare alpha/beta/rc/final."""
if self.pre_release == other.pre_release:
vercmp = 0
elif self.pre_release == 'final':
vercmp = 1
elif other.pre_release == 'final':
vercmp = -1
elif self.pre_release > other.pre_release:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare(self, other):
if not isinstance(other, (string_types, NumpyVersion)):
raise ValueError("Invalid object to compare with NumpyVersion.")
if isinstance(other, string_types):
other = NumpyVersion(other)
vercmp = self._compare_version(other)
if vercmp == 0:
# Same x.y.z version, check for alpha/beta/rc
vercmp = self._compare_pre_release(other)
if vercmp == 0:
# Same version and same pre-release, check if dev version
if self.is_devversion is other.is_devversion:
vercmp = 0
elif self.is_devversion:
vercmp = -1
else:
vercmp = 1
return vercmp
def __lt__(self, other):
return self._compare(other) < 0
def __le__(self, other):
return self._compare(other) <= 0
def __eq__(self, other):
return self._compare(other) == 0
def __ne__(self, other):
return self._compare(other) != 0
def __gt__(self, other):
return self._compare(other) > 0
def __ge__(self, other):
return self._compare(other) >= 0
def __repr(self):
return "NumpyVersion(%s)" % self.vstring
class ResettableCache(dict):
"""
Dictionary whose elements mey depend one from another.
If entry `B` depends on entry `A`, changing the values of entry `A` will
reset the value of entry `B` to a default (None); deleteing entry `A` will
delete entry `B`. The connections between entries are stored in a
`_resetdict` private attribute.
Parameters
----------
reset : dictionary, optional
An optional dictionary, associated a sequence of entries to any key
of the object.
items : var, optional
An optional dictionary used to initialize the dictionary
Examples
--------
>>> reset = dict(a=('b',), b=('c',))
>>> cache = resettable_cache(a=0, b=1, c=2, reset=reset)
>>> assert_equal(cache, dict(a=0, b=1, c=2))
>>> print("Try resetting a")
>>> cache['a'] = 1
>>> assert_equal(cache, dict(a=1, b=None, c=None))
>>> cache['c'] = 2
>>> assert_equal(cache, dict(a=1, b=None, c=2))
>>> cache['b'] = 0
>>> assert_equal(cache, dict(a=1, b=0, c=None))
>>> print("Try deleting b")
>>> del(cache['a'])
>>> assert_equal(cache, {})
"""
def __init__(self, reset=None, **items):
self._resetdict = reset or {}
dict.__init__(self, **items)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
# if hasattr needed for unpickling with protocol=2
if hasattr(self, '_resetdict'):
for mustreset in self._resetdict.get(key, []):
self[mustreset] = None
def __delitem__(self, key):
dict.__delitem__(self, key)
for mustreset in self._resetdict.get(key, []):
del(self[mustreset])
# def __getstate__(self):
# print('pickling wrapper', self.__dict__)
# return self.__dict__
#
# def __setstate__(self, dict_):
# print('unpickling wrapper', dict_)
# self.__dict__.update(dict_)
resettable_cache = ResettableCache
def _next_regular(target):
"""
Find the next regular number greater than or equal to target.
Regular numbers are composites of the prime factors 2, 3, and 5.
Also known as 5-smooth numbers or Hamming numbers, these are the optimal
size for inputs to FFTPACK.
Target must be a positive integer.
"""
if target <= 6:
return target
# Quickly check if it's already a power of 2
if not (target & (target - 1)):
return target
match = float('inf') # Anything found will be smaller
p5 = 1
while p5 < target:
p35 = p5
while p35 < target:
# Ceiling integer division, avoiding conversion to float
# (quotient = ceil(target / p35))
quotient = -(-target // p35)
# Quickly find next power of 2 >= quotient
try:
p2 = 2 ** ((quotient - 1).bit_length())
except AttributeError:
# Fallback for Python <2.7
p2 = 2 ** _bit_length_26(quotient - 1)
N = p2 * p35
if N == target:
return N
elif N < match:
match = N
p35 *= 3
if p35 == target:
return p35
if p35 < match:
match = p35
p5 *= 5
if p5 == target:
return p5
if p5 < match:
match = p5
return match
if NumpyVersion(np.__version__) >= '1.7.1':
np_matrix_rank = np.linalg.matrix_rank
else:
def np_matrix_rank(M, tol=None):
"""
Return matrix rank of array using SVD method
Rank of the array is the number of SVD singular values of the array that are
greater than `tol`.
Parameters
----------
M : {(M,), (M, N)} array_like
array of <=2 dimensions
tol : {None, float}, optional
threshold below which SVD values are considered zero. If `tol` is
None, and ``S`` is an array with singular values for `M`, and
``eps`` is the epsilon value for datatype of ``S``, then `tol` is
set to ``S.max() * max(M.shape) * eps``.
Notes
-----
The default threshold to detect rank deficiency is a test on the magnitude
of the singular values of `M`. By default, we identify singular values less
than ``S.max() * max(M.shape) * eps`` as indicating rank deficiency (with
the symbols defined above). This is the algorithm MATLAB uses [1]. It also
appears in *Numerical recipes* in the discussion of SVD solutions for linear
least squares [2].
This default threshold is designed to detect rank deficiency accounting for
the numerical errors of the SVD computation. Imagine that there is a column
in `M` that is an exact (in floating point) linear combination of other
columns in `M`. Computing the SVD on `M` will not produce a singular value
exactly equal to 0 in general: any difference of the smallest SVD value from
0 will be caused by numerical imprecision in the calculation of the SVD.
Our threshold for small SVD values takes this numerical imprecision into
account, and the default threshold will detect such numerical rank
deficiency. The threshold may declare a matrix `M` rank deficient even if
the linear combination of some columns of `M` is not exactly equal to
another column of `M` but only numerically very close to another column of
`M`.
We chose our default threshold because it is in wide use. Other thresholds
are possible. For example, elsewhere in the 2007 edition of *Numerical
recipes* there is an alternative threshold of ``S.max() *
np.finfo(M.dtype).eps / 2. * np.sqrt(m + n + 1.)``. The authors describe
this threshold as being based on "expected roundoff error" (p 71).
The thresholds above deal with floating point roundoff error in the
calculation of the SVD. However, you may have more information about the
sources of error in `M` that would make you consider other tolerance values
to detect *effective* rank deficiency. The most useful measure of the
tolerance depends on the operations you intend to use on your matrix. For
example, if your data come from uncertain measurements with uncertainties
greater than floating point epsilon, choosing a tolerance near that
uncertainty may be preferable. The tolerance may be absolute if the
uncertainties are absolute rather than relative.
References
----------
.. [1] MATLAB reference documention, "Rank"
http://www.mathworks.com/help/techdoc/ref/rank.html
.. [2] W. H. Press, S. A. Teukolsky, W. T. Vetterling and B. P. Flannery,
"Numerical Recipes (3rd edition)", Cambridge University Press, 2007,
page 795.
Examples
--------
>>> from numpy.linalg import matrix_rank
>>> matrix_rank(np.eye(4)) # Full rank matrix
4<|fim▁hole|> >>> I=np.eye(4); I[-1,-1] = 0. # rank deficient matrix
>>> matrix_rank(I)
3
>>> matrix_rank(np.ones((4,))) # 1 dimension - rank 1 unless all 0
1
>>> matrix_rank(np.zeros((4,)))
0
"""
M = np.asarray(M)
if M.ndim > 2:
raise TypeError('array should have 2 or fewer dimensions')
if M.ndim < 2:
return int(not all(M == 0))
S = np.linalg.svd(M, compute_uv=False)
if tol is None:
tol = S.max() * max(M.shape) * np.finfo(S.dtype).eps
return np.sum(S > tol)
class CacheWriteWarning(UserWarning):
pass
class CachedAttribute(object):
def __init__(self, func, cachename=None, resetlist=None):
self.fget = func
self.name = func.__name__
self.cachename = cachename or '_cache'
self.resetlist = resetlist or ()
def __get__(self, obj, type=None):
if obj is None:
return self.fget
# Get the cache or set a default one if needed
_cachename = self.cachename
_cache = getattr(obj, _cachename, None)
if _cache is None:
setattr(obj, _cachename, resettable_cache())
_cache = getattr(obj, _cachename)
# Get the name of the attribute to set and cache
name = self.name
_cachedval = _cache.get(name, None)
# print("[_cachedval=%s]" % _cachedval)
if _cachedval is None:
# Call the "fget" function
_cachedval = self.fget(obj)
# Set the attribute in obj
# print("Setting %s in cache to %s" % (name, _cachedval))
try:
_cache[name] = _cachedval
except KeyError:
setattr(_cache, name, _cachedval)
# Update the reset list if needed (and possible)
resetlist = self.resetlist
if resetlist is not ():
try:
_cache._resetdict[name] = self.resetlist
except AttributeError:
pass
# else:
# print("Reading %s from cache (%s)" % (name, _cachedval))
return _cachedval
def __set__(self, obj, value):
errmsg = "The attribute '%s' cannot be overwritten" % self.name
warnings.warn(errmsg, CacheWriteWarning)
class _cache_readonly(object):
"""
Decorator for CachedAttribute
"""
def __init__(self, cachename=None, resetlist=None):
self.func = None
self.cachename = cachename
self.resetlist = resetlist or None
def __call__(self, func):
return CachedAttribute(func,
cachename=self.cachename,
resetlist=self.resetlist)
cache_readonly = _cache_readonly()<|fim▁end|>
| |
<|file_name|>clock.py<|end_file_name|><|fim▁begin|>"""Clock for keeping track of the wall time.
"""
__all__ = ['ClockError', 'Clock', 'log']
import datetime
import logging
import time
from typing import Optional # noqa: F401. Used for mypy.
class ClockError(Exception):
"""Invalid clock operation."""
pass
class Clock:
"""Clock for keeping track of time.
"""
def __init__(self) -> None:
self.start = None # type: Optional[float]
self.stop = None # type: Optional[float]
def tic(self) -> None:
"""Start the clock."""
self.start = time.monotonic()
self.stop = None
def toc(self) -> None:
"""Stop the clock."""
assert self.start is not None
self.stop = time.monotonic()
def __str__(self) -> str:
"""Human-readable representation of elapsed time."""
if self.start is None:
raise ClockError('The clock has not been started')
else:
start = datetime.datetime.fromtimestamp(self.start)
if self.stop is None:
stop = datetime.datetime.fromtimestamp(time.monotonic())<|fim▁hole|>
return str(delta)
def __enter__(self):
if self.start is None and self.stop is None:
self.tic()
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.start is not None:
self.toc()
def log(function):
"""Create a decorator that logs the elapsed time.
"""
def wrapper(*args, **kwargs):
with Clock() as clock:
result = function(*args, **kwargs)
logging.debug('Completed {} after {} seconds.'
.format(function.__name__, clock))
return result
return wrapper<|fim▁end|>
|
else:
stop = datetime.datetime.fromtimestamp(self.stop)
delta = stop - start
|
<|file_name|>complex_types_macros.rs<|end_file_name|><|fim▁begin|>extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate rusted_cypher;
use rusted_cypher::GraphClient;
use rusted_cypher::cypher::result::Row;
const URI: &'static str = "http://neo4j:[email protected]:7474/db/data";
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Language {
name: String,
level: String,
safe: bool,
}
#[test]
fn without_params() {
let graph = GraphClient::connect(URI).unwrap();
let stmt = cypher_stmt!("MATCH (n:NTLY_INTG_TEST_MACROS_1) RETURN n").unwrap();
let result = graph.exec(stmt);
assert!(result.is_ok());
}
#[test]
fn save_retrive_struct() {
let rust = Language {
name: "Rust".to_owned(),
level: "low".to_owned(),
safe: true,
};
let graph = GraphClient::connect(URI).unwrap();
let stmt = cypher_stmt!("CREATE (n:NTLY_INTG_TEST_MACROS_2 {lang}) RETURN n", {<|fim▁hole|> "lang" => &rust
}).unwrap();
let results = graph.exec(stmt).unwrap();
let rows: Vec<Row> = results.rows().take(1).collect();
let row = rows.first().unwrap();
let lang: Language = row.get("n").unwrap();
assert_eq!(rust, lang);
graph.exec("MATCH (n:NTLY_INTG_TEST_MACROS_2) DELETE n").unwrap();
}<|fim▁end|>
| |
<|file_name|>Test_Watchdog.py<|end_file_name|><|fim▁begin|>""" unit test for Watchdog.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# imports
import os
from mock import MagicMock
# sut
from DIRAC.WorkloadManagementSystem.JobWrapper.Watchdog import Watchdog
mock_exeThread = MagicMock()
mock_spObject = MagicMock()
def test_calibrate():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
def test__performChecks():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
<|fim▁hole|>def test__performChecksFull():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
wd.testCPULimit = 1
wd.testMemoryLimit = 1
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True<|fim▁end|>
| |
<|file_name|>InterferogramOp.java<|end_file_name|><|fim▁begin|>package org.jlinda.nest.gpf;
import com.bc.ceres.core.ProgressMonitor;
import org.apache.commons.math3.util.FastMath;
import org.esa.beam.framework.datamodel.Band;
import org.esa.beam.framework.datamodel.MetadataElement;
import org.esa.beam.framework.datamodel.Product;
import org.esa.beam.framework.datamodel.ProductData;
import org.esa.beam.framework.gpf.Operator;
import org.esa.beam.framework.gpf.OperatorException;
import org.esa.beam.framework.gpf.OperatorSpi;
import org.esa.beam.framework.gpf.Tile;
import org.esa.beam.framework.gpf.annotations.OperatorMetadata;
import org.esa.beam.framework.gpf.annotations.Parameter;
import org.esa.beam.framework.gpf.annotations.SourceProduct;
import org.esa.beam.framework.gpf.annotations.TargetProduct;
import org.esa.beam.util.ProductUtils;
import org.esa.snap.datamodel.AbstractMetadata;
import org.esa.snap.datamodel.Unit;
import org.esa.snap.gpf.OperatorUtils;
import org.esa.snap.gpf.ReaderUtils;
import org.jblas.ComplexDoubleMatrix;
import org.jblas.DoubleMatrix;
import org.jblas.MatrixFunctions;
import org.jblas.Solve;
import org.jlinda.core.Orbit;
import org.jlinda.core.SLCImage;
import org.jlinda.core.Window;
import org.jlinda.core.utils.MathUtils;
import org.jlinda.core.utils.PolyUtils;
import org.jlinda.core.utils.SarUtils;
import org.jlinda.nest.utils.BandUtilsDoris;
import org.jlinda.nest.utils.CplxContainer;
import org.jlinda.nest.utils.ProductContainer;
import org.jlinda.nest.utils.TileUtilsDoris;
import java.awt.*;
import java.util.HashMap;
import java.util.Map;
@OperatorMetadata(alias = "Interferogram",
category = "SAR Processing/Interferometric/Products",
authors = "Petar Marinkovic",
copyright = "Copyright (C) 2013 by PPO.labs",
description = "Compute interferograms from stack of coregistered images : JBLAS implementation")
public class InterferogramOp extends Operator {
@SourceProduct
private Product sourceProduct;
@TargetProduct
private Product targetProduct;
@Parameter(valueSet = {"1", "2", "3", "4", "5", "6", "7", "8"},
description = "Order of 'Flat earth phase' polynomial",
defaultValue = "5",
label = "Degree of \"Flat Earth\" polynomial")
private int srpPolynomialDegree = 5;
@Parameter(valueSet = {"301", "401", "501", "601", "701", "801", "901", "1001"},
description = "Number of points for the 'flat earth phase' polynomial estimation",
defaultValue = "501",
label = "Number of 'Flat earth' estimation points")
private int srpNumberPoints = 501;
@Parameter(valueSet = {"1", "2", "3", "4", "5"},
description = "Degree of orbit (polynomial) interpolator",
defaultValue = "3",
label = "Orbit interpolation degree")
private int orbitDegree = 3;
@Parameter(defaultValue="false", label="Do NOT subtract flat-earth phase from interferogram.")
private boolean doNotSubtract = false;
// flat_earth_polynomial container
private HashMap<String, DoubleMatrix> flatEarthPolyMap = new HashMap<String, DoubleMatrix>();
// source
private HashMap<Integer, CplxContainer> masterMap = new HashMap<Integer, CplxContainer>();
private HashMap<Integer, CplxContainer> slaveMap = new HashMap<Integer, CplxContainer>();
// target
private HashMap<String, ProductContainer> targetMap = new HashMap<String, ProductContainer>();
// operator tags<|fim▁hole|> private int sourceImageHeight;
/**
* Initializes this operator and sets the one and only target product.
* <p>The target product can be either defined by a field of type {@link org.esa.beam.framework.datamodel.Product} annotated with the
* {@link org.esa.beam.framework.gpf.annotations.TargetProduct TargetProduct} annotation or
* by calling {@link #setTargetProduct} method.</p>
* <p>The framework calls this method after it has created this operator.
* Any client code that must be performed before computation of tile data
* should be placed here.</p>
*
* @throws org.esa.beam.framework.gpf.OperatorException
* If an error occurs during operator initialisation.
* @see #getTargetProduct()
*/
@Override
public void initialize() throws OperatorException {
try {
// rename product if no subtraction of the flat-earth phase
if (doNotSubtract) {
productName = "ifgs";
productTag = "ifg";
} else {
productName = "srp_ifgs";
productTag = "ifg_srp";
}
checkUserInput();
constructSourceMetadata();
constructTargetMetadata();
createTargetProduct();
// final String[] masterBandNames = sourceProduct.getBandNames();
// for (int i = 0; i < masterBandNames.length; i++) {
// if (masterBandNames[i].contains("mst")) {
// masterBand1 = sourceProduct.getBand(masterBandNames[i]);
// if (masterBand1.getUnit() != null && masterBand1.getUnit().equals(Unit.REAL)) {
// masterBand2 = sourceProduct.getBand(masterBandNames[i + 1]);
// }
// break;
// }
// }
//
// getMetadata();
getSourceImageDimension();
if (!doNotSubtract) {
constructFlatEarthPolynomials();
}
} catch (Exception e) {
throw new OperatorException(e);
}
}
private void getSourceImageDimension() {
sourceImageWidth = sourceProduct.getSceneRasterWidth();
sourceImageHeight = sourceProduct.getSceneRasterHeight();
}
private void constructFlatEarthPolynomials() throws Exception {
for (Integer keyMaster : masterMap.keySet()) {
CplxContainer master = masterMap.get(keyMaster);
for (Integer keySlave : slaveMap.keySet()) {
CplxContainer slave = slaveMap.get(keySlave);
flatEarthPolyMap.put(slave.name, estimateFlatEarthPolynomial(master.metaData, master.orbit, slave.metaData, slave.orbit));
}
}
}
private void constructTargetMetadata() {
for (Integer keyMaster : masterMap.keySet()) {
CplxContainer master = masterMap.get(keyMaster);
for (Integer keySlave : slaveMap.keySet()) {
// generate name for product bands
final String productName = keyMaster.toString() + "_" + keySlave.toString();
final CplxContainer slave = slaveMap.get(keySlave);
final ProductContainer product = new ProductContainer(productName, master, slave, true);
product.targetBandName_I = "i_" + productTag + "_" + master.date + "_" + slave.date;
product.targetBandName_Q = "q_" + productTag + "_" + master.date + "_" + slave.date;
// put ifg-product bands into map
targetMap.put(productName, product);
}
}
}
private void constructSourceMetadata() throws Exception {
// define sourceMaster/sourceSlave name tags
final String masterTag = "mst";
final String slaveTag = "slv";
// get sourceMaster & sourceSlave MetadataElement
final MetadataElement masterMeta = AbstractMetadata.getAbstractedMetadata(sourceProduct);
final String slaveMetadataRoot = AbstractMetadata.SLAVE_METADATA_ROOT;
/* organize metadata */
// put sourceMaster metadata into the masterMap
metaMapPut(masterTag, masterMeta, sourceProduct, masterMap);
// plug sourceSlave metadata into slaveMap
MetadataElement slaveElem = sourceProduct.getMetadataRoot().getElement(slaveMetadataRoot);
if(slaveElem == null) {
slaveElem = sourceProduct.getMetadataRoot().getElement("Slave Metadata");
}
MetadataElement[] slaveRoot = slaveElem.getElements();
for (MetadataElement meta : slaveRoot) {
metaMapPut(slaveTag, meta, sourceProduct, slaveMap);
}
}
private void metaMapPut(final String tag,
final MetadataElement root,
final Product product,
final HashMap<Integer, CplxContainer> map) throws Exception {
// TODO: include polarization flags/checks!
// pull out band names for this product
final String[] bandNames = product.getBandNames();
final int numOfBands = bandNames.length;
// map key: ORBIT NUMBER
int mapKey = root.getAttributeInt(AbstractMetadata.ABS_ORBIT);
// metadata: construct classes and define bands
final String date = OperatorUtils.getAcquisitionDate(root);
final SLCImage meta = new SLCImage(root);
final Orbit orbit = new Orbit(root, orbitDegree);
// TODO: resolve multilook factors
meta.setMlAz(1);
meta.setMlRg(1);
Band bandReal = null;
Band bandImag = null;
for (int i = 0; i < numOfBands; i++) {
String bandName = bandNames[i];
if (bandName.contains(tag) && bandName.contains(date)) {
final Band band = product.getBandAt(i);
if (BandUtilsDoris.isBandReal(band)) {
bandReal = band;
} else if (BandUtilsDoris.isBandImag(band)) {
bandImag = band;
}
}
}
try {
map.put(mapKey, new CplxContainer(date, meta, orbit, bandReal, bandImag));
} catch (Exception e) {
e.printStackTrace();
}
}
private void createTargetProduct() {
// construct target product
targetProduct = new Product(productName,
sourceProduct.getProductType(),
sourceProduct.getSceneRasterWidth(),
sourceProduct.getSceneRasterHeight());
ProductUtils.copyProductNodes(sourceProduct, targetProduct);
for (final Band band : targetProduct.getBands()) {
targetProduct.removeBand(band);
}
for (String key : targetMap.keySet()) {
String targetBandName_I = targetMap.get(key).targetBandName_I;
targetProduct.addBand(targetBandName_I, ProductData.TYPE_FLOAT32);
targetProduct.getBand(targetBandName_I).setUnit(Unit.REAL);
String targetBandName_Q = targetMap.get(key).targetBandName_Q;
targetProduct.addBand(targetBandName_Q, ProductData.TYPE_FLOAT32);
targetProduct.getBand(targetBandName_Q).setUnit(Unit.IMAGINARY);
final String tag0 = targetMap.get(key).sourceMaster.date;
final String tag1 = targetMap.get(key).sourceSlave.date;
if (CREATE_VIRTUAL_BAND) {
String countStr = "_" + productTag + "_" + tag0 + "_" + tag1;
ReaderUtils.createVirtualIntensityBand(targetProduct, targetProduct.getBand(targetBandName_I), targetProduct.getBand(targetBandName_Q), countStr);
ReaderUtils.createVirtualPhaseBand(targetProduct, targetProduct.getBand(targetBandName_I), targetProduct.getBand(targetBandName_Q), countStr);
}
}
// For testing: the optimal results with 1024x1024 pixels tiles, not clear whether it's platform dependent?
// targetProduct.setPreferredTileSize(512, 512);
}
private void checkUserInput() throws OperatorException {
// check for the logic in input paramaters
final MetadataElement masterMeta = AbstractMetadata.getAbstractedMetadata(sourceProduct);
final int isCoregStack = masterMeta.getAttributeInt(AbstractMetadata.coregistered_stack);
if (isCoregStack != 1) {
throw new OperatorException("Input should be a coregistered SLC stack");
}
}
private DoubleMatrix estimateFlatEarthPolynomial(SLCImage masterMetadata, Orbit masterOrbit, SLCImage slaveMetadata, Orbit slaveOrbit) throws Exception {
// estimation window : this works only for NEST "crop" logic
// long minLine = masterMetadata.getCurrentWindow().linelo;
// long maxLine = masterMetadata.getCurrentWindow().linehi;
// long minPixel = masterMetadata.getCurrentWindow().pixlo;
// long maxPixel = masterMetadata.getCurrentWindow().pixhi;
long minLine = 0;
long maxLine = sourceImageHeight;
long minPixel = 0;
long maxPixel = sourceImageWidth;
int numberOfCoefficients = PolyUtils.numberOfCoefficients(srpPolynomialDegree);
int[][] position = MathUtils.distributePoints(srpNumberPoints, new Window(minLine,maxLine,minPixel,maxPixel));
// setup observation and design matrix
DoubleMatrix y = new DoubleMatrix(srpNumberPoints);
DoubleMatrix A = new DoubleMatrix(srpNumberPoints, numberOfCoefficients);
double masterMinPi4divLam = (-4 * Math.PI * org.jlinda.core.Constants.SOL) / masterMetadata.getRadarWavelength();
double slaveMinPi4divLam = (-4 * Math.PI * org.jlinda.core.Constants.SOL) / slaveMetadata.getRadarWavelength();
// Loop through vector or distributedPoints()
for (int i = 0; i < srpNumberPoints; ++i) {
double line = position[i][0];
double pixel = position[i][1];
// compute azimuth/range time for this pixel
final double masterTimeRange = masterMetadata.pix2tr(pixel + 1);
// compute xyz of this point : sourceMaster
org.jlinda.core.Point xyzMaster = masterOrbit.lp2xyz(line + 1, pixel + 1, masterMetadata);
org.jlinda.core.Point slaveTimeVector = slaveOrbit.xyz2t(xyzMaster, slaveMetadata);
final double slaveTimeRange = slaveTimeVector.x;
// observation vector
y.put(i, (masterMinPi4divLam * masterTimeRange) - (slaveMinPi4divLam * slaveTimeRange));
// set up a system of equations
// ______Order unknowns: A00 A10 A01 A20 A11 A02 A30 A21 A12 A03 for degree=3______
double posL = PolyUtils.normalize2(line, minLine, maxLine);
double posP = PolyUtils.normalize2(pixel, minPixel, maxPixel);
int index = 0;
for (int j = 0; j <= srpPolynomialDegree; j++) {
for (int k = 0; k <= j; k++) {
A.put(i, index, (FastMath.pow(posL, (double) (j - k)) * FastMath.pow(posP, (double) k)));
index++;
}
}
}
// Fit polynomial through computed vector of phases
DoubleMatrix Atranspose = A.transpose();
DoubleMatrix N = Atranspose.mmul(A);
DoubleMatrix rhs = Atranspose.mmul(y);
// this should be the coefficient of the reference phase
// flatEarthPolyCoefs = Solve.solve(N, rhs);
return Solve.solve(N, rhs);
}
/**
* Called by the framework in order to compute a tile for the given target band.
* <p>The default implementation throws a runtime exception with the message "not implemented".</p>
*
* @param targetTileMap The target tiles associated with all target bands to be computed.
* @param targetRectangle The rectangle of target tile.
* @param pm A progress monitor which should be used to determine computation cancelation requests.
* @throws org.esa.beam.framework.gpf.OperatorException
* If an error occurs during computation of the target raster.
*/
@Override
public void computeTileStack(Map<Band, Tile> targetTileMap, Rectangle targetRectangle, ProgressMonitor pm)
throws OperatorException {
try {
int y0 = targetRectangle.y;
int yN = y0 + targetRectangle.height - 1;
int x0 = targetRectangle.x;
int xN = targetRectangle.x + targetRectangle.width - 1;
final Window tileWindow = new Window(y0, yN, x0, xN);
// Band flatPhaseBand;
Band targetBand_I;
Band targetBand_Q;
for (String ifgKey : targetMap.keySet()) {
ProductContainer product = targetMap.get(ifgKey);
/// check out results from source ///
Tile tileReal = getSourceTile(product.sourceMaster.realBand, targetRectangle);
Tile tileImag = getSourceTile(product.sourceMaster.imagBand, targetRectangle);
ComplexDoubleMatrix complexMaster = TileUtilsDoris.pullComplexDoubleMatrix(tileReal, tileImag);
/// check out results from source ///
tileReal = getSourceTile(product.sourceSlave.realBand, targetRectangle);
tileImag = getSourceTile(product.sourceSlave.imagBand, targetRectangle);
ComplexDoubleMatrix complexSlave = TileUtilsDoris.pullComplexDoubleMatrix(tileReal, tileImag);
// if (srpPolynomialDegree > 0) {
if (!doNotSubtract) {
// normalize range and azimuth axis
DoubleMatrix rangeAxisNormalized = DoubleMatrix.linspace(x0, xN, complexMaster.columns);
rangeAxisNormalized = normalizeDoubleMatrix(rangeAxisNormalized, sourceImageWidth);
DoubleMatrix azimuthAxisNormalized = DoubleMatrix.linspace(y0, yN, complexMaster.rows);
azimuthAxisNormalized = normalizeDoubleMatrix(azimuthAxisNormalized, sourceImageHeight);
// pull polynomial from the map
DoubleMatrix polyCoeffs = flatEarthPolyMap.get(product.sourceSlave.name);
// estimate the phase on the grid
DoubleMatrix realReferencePhase =
PolyUtils.polyval(azimuthAxisNormalized, rangeAxisNormalized,
polyCoeffs, PolyUtils.degreeFromCoefficients(polyCoeffs.length));
// compute the reference phase
ComplexDoubleMatrix complexReferencePhase =
new ComplexDoubleMatrix(MatrixFunctions.cos(realReferencePhase),
MatrixFunctions.sin(realReferencePhase));
complexSlave.muli(complexReferencePhase); // no conjugate here!
}
SarUtils.computeIfg_inplace(complexMaster, complexSlave.conji());
/// commit to target ///
targetBand_I = targetProduct.getBand(product.targetBandName_I);
Tile tileOutReal = targetTileMap.get(targetBand_I);
TileUtilsDoris.pushDoubleMatrix(complexMaster.real(), tileOutReal, targetRectangle);
targetBand_Q = targetProduct.getBand(product.targetBandName_Q);
Tile tileOutImag = targetTileMap.get(targetBand_Q);
TileUtilsDoris.pushDoubleMatrix(complexMaster.imag(), tileOutImag, targetRectangle);
}
} catch (Throwable e) {
OperatorUtils.catchOperatorException(getId(), e);
}
}
private DoubleMatrix normalizeDoubleMatrix(DoubleMatrix matrix, int factor) {
matrix.subi(0.5 * (factor - 1));
matrix.divi(0.25 * (factor - 1));
return matrix;
}
/**
* The SPI is used to register this operator in the graph processing framework
* via the SPI configuration file
* {@code META-INF/services/org.esa.beam.framework.gpf.OperatorSpi}.
* This class may also serve as a factory for new operator instances.
*
* @see org.esa.beam.framework.gpf.OperatorSpi#createOperator()
* @see org.esa.beam.framework.gpf.OperatorSpi#createOperator(java.util.Map, java.util.Map)
*/
public static class Spi extends OperatorSpi {
public Spi() {
super(InterferogramOp.class);
}
}
}<|fim▁end|>
|
private static final boolean CREATE_VIRTUAL_BAND = true;
private String productName;
public String productTag;
private int sourceImageWidth;
|
<|file_name|>timer.d.ts<|end_file_name|><|fim▁begin|>import { Observable } from '../Observable';
import { SchedulerLike } from '../types';
/**
* Creates an Observable that starts emitting after an `initialDelay` and
* emits ever increasing numbers after each `period` of time thereafter.
*
* <span class="informal">Its like {@link interval}, but you can specify when
* should the emissions start.</span><|fim▁hole|> *
* `timer` returns an Observable that emits an infinite sequence of ascending
* integers, with a constant interval of time, `period` of your choosing
* between those emissions. The first emission happens after the specified
* `initialDelay`. The initial delay may be a {@link Date}. By default, this
* operator uses the `async` IScheduler to provide a notion of time, but you
* may pass any IScheduler to it. If `period` is not specified, the output
* Observable emits only one value, `0`. Otherwise, it emits an infinite
* sequence.
*
* @example <caption>Emits ascending numbers, one every second (1000ms), starting after 3 seconds</caption>
* var numbers = Rx.Observable.timer(3000, 1000);
* numbers.subscribe(x => console.log(x));
*
* @example <caption>Emits one number after five seconds</caption>
* var numbers = Rx.Observable.timer(5000);
* numbers.subscribe(x => console.log(x));
*
* @see {@link interval}
* @see {@link delay}
*
* @param {number|Date} [dueTime] The initial delay time to wait before
* emitting the first value of `0`.
* @param {number|SchedulerLike} [periodOrScheduler] The period of time between emissions of the
* subsequent numbers.
* @param {SchedulerLike} [scheduler=async] The IScheduler to use for scheduling
* the emission of values, and providing a notion of "time".
* @return {Observable} An Observable that emits a `0` after the
* `initialDelay` and ever increasing numbers after each `period` of time
* thereafter.
* @static true
* @name timer
* @owner Observable
*/
export declare function timer(dueTime?: number | Date, periodOrScheduler?: number | SchedulerLike, scheduler?: SchedulerLike): Observable<number>;<|fim▁end|>
|
*
* <img src="./img/timer.png" width="100%">
|
<|file_name|>config_flow.py<|end_file_name|><|fim▁begin|>"""Config flow for the Daikin platform."""
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, DOMAIN, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def _create_entry(self, host, mac, key=None, uuid=None, password=None):
"""Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
password = None
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=self.schema)
return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""<|fim▁hole|> if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user()<|fim▁end|>
|
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
|
<|file_name|>pep4.py<|end_file_name|><|fim▁begin|>#Project Euler Problem 4
#A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 * 99.
#Find the largest palindrome made from the product of two 3-digit numbers.
def palindrome(test):
while len(test) > 2:
if test[0] == test[-1]:
test = test.rstrip(test[-1])
test = test.lstrip(test[0])
else:
return False
if test[0] == test[-1]:
return True
#print palindrome(str(99000009))
def palindrome2(test):
if test == "".join(reversed(test)):
return True
else:
return False
#print palindrome2("31213")
def largest_palindrome_from_3digitproducts():
hi_p = 0
t1 = 999
t2 = 999
count = 0
while t1 >= 100:
t2 = 999 - count
#print "t1 = {}".format(t1)
while t2 >= 1:
#print "t2 = {}".format(t2)
test = t1*t2
if palindrome2(str(test)) == True and test > hi_p:
hi_p = test
#print hi_p
t2 -=1
count += 1
t1 -=1
return "hi_p = {}".format(hi_p)
print largest_palindrome_from_3digitproducts()
def largest_palindrome_from_3digitproductsr(test=999): #with recursion (doesn't work yet) (semantic error cos only 999*999 and 999*998 not 999*997)
large_num = test * test
large_num2 = test * (test-1)
if palindrome(str(large_num)) == True:
return large_num
elif palindrome(str(large_num2)) == True:
return large_num2
else:
return largest_palindrome_from_3digitproductsr(test-1)
#print largest_palindrome_from_3digitproductsr()
"""
print 9*9 #highest square #digits involved 1
print 9*8 #new number times highest #because old digits finished, add new digit
print 8*8 #new squared #multiply involved digit by all involved hi to low
print 9*7 #new digit times highest
print 8*7#new times next highest
print 9*6#new2 times highest
print 7*7#new squared #new2 now new
print 8*6#new times next highest
print 9*5#
print 7*6
print 8*5
print 6*6<|fim▁hole|>print 7*4
print 9*3
print 5*5
print 8*3
print 6*4
print 7*3
print 5*4
print 9*2
print 6*3
print 8*2
print 4*4
print 5*3
print 7*2
print 6*2
print 4*3
print 5*2
print 9*1
print 3*3
print 8*1
print 4*2
print 7*1
print 6*1
print 3*2
print 5*1
print 4*1
print 2*2
print 3*1
print 2*1
print 1*1 """<|fim▁end|>
|
print 9*4
print 7*5
print 8*4
print 6*5
|
<|file_name|>list.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import logging
import warnings
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound
from pip.index import PackageFinder
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions, dist_is_editable
from pip.utils.deprecation import RemovedInPip7Warning
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, remote_version_raw, remote_version_parsed in \
self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
logger.info(
'%s (Current: %s Latest: %s)',
dist.project_name, dist.version, remote_version_raw,
)
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
if options.mirrors:
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
index_urls += options.mirrors
dependency_links = []
for dist in get_installed_distributions(local_only=options.local,
user_only=options.user):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
include_editables=False,
)
for dist in installed_packages:
req = InstallRequirement.from_line(dist.key, None)
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most
# up-to-date
if link is None:<|fim▁hole|> # It might be a good idea that link or finder had a public
# method that returned version
remote_version = finder._link_package_versions(
link, req.name
)[0]
remote_version_raw = remote_version[2]
remote_version_parsed = remote_version[0]
yield dist, remote_version_raw, remote_version_parsed
def run_listing(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=True,
)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(
installed_packages,
key=lambda dist: dist.project_name.lower(),
)
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.info(line)
def run_uptodate(self, options):
uptodate = []
for dist, remote_version_raw, remote_version_parsed in \
self.find_packages_latests_versions(options):
if dist.parsed_version == remote_version_parsed:
uptodate.append(dist)
self.output_package_listing(uptodate)<|fim▁end|>
|
continue
except DistributionNotFound:
continue
else:
|
<|file_name|>snmalloc.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
pub use crate::default::*;
pub type Allocator = snmalloc_rs::SnMalloc;
pub const fn allocator() -> Allocator {
snmalloc_rs::SnMalloc<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>callbacks.rs<|end_file_name|><|fim▁begin|>#![allow(deprecated)]
use std::{
ffi::CStr,
io::{self, prelude::*},
panic::{self, UnwindSafe},
str::Utf8Error,
thread,
};
use ffi;
use libc;
use static_assertions::assert_obj_safe;
use crate::{edit, utils::FdWriter, Data, Error};
assert_obj_safe!(PassphraseProviderNew);
assert_obj_safe!(ProgressReporter);
assert_obj_safe!(StatusHandler);
assert_obj_safe!(InteractorNew);
#[derive(Debug, Copy, Clone)]
pub struct PassphraseRequest<'a> {
uid_hint: Option<&'a CStr>,
desc: Option<&'a CStr>,
pub prev_attempt_failed: bool,
}
impl<'a> PassphraseRequest<'a> {
pub fn user_id_hint(&self) -> Result<&'a str, Option<Utf8Error>> {
self.uid_hint
.map_or(Err(None), |s| s.to_str().map_err(Some))
}
pub fn user_id_hint_raw(&self) -> Option<&'a CStr> {
self.uid_hint
}
pub fn description(&self) -> Result<&'a str, Option<Utf8Error>> {
self.desc.map_or(Err(None), |s| s.to_str().map_err(Some))
}
pub fn description_raw(&self) -> Option<&'a CStr> {
self.desc
}
}
/// Upstream documentation:
/// [`gpgme_passphrase_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Passphrase-Callback.html#index-gpgme_005fpassphrase_005fcb_005ft)
#[deprecated(
since = "0.9.2",
note = "trait will be replaced with a new object safe trait of the same name"
)]
pub trait PassphraseProvider: UnwindSafe + Send {
fn get_passphrase<W: io::Write>(
&mut self, request: PassphraseRequest<'_>, out: W,
) -> Result<(), Error>;
}
impl<T: UnwindSafe + Send> PassphraseProvider for T
where T: FnMut(PassphraseRequest<'_>, &mut dyn io::Write) -> Result<(), Error>
{
fn get_passphrase<W: io::Write>(
&mut self, request: PassphraseRequest<'_>, mut out: W,
) -> Result<(), Error> {
(*self)(request, &mut out)
}
}
/// Upstream documentation:
/// [`gpgme_passphrase_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Passphrase-Callback.html#index-gpgme_005fpassphrase_005fcb_005ft)
pub(crate) trait PassphraseProviderNew: UnwindSafe + Send {
fn get_passphrase(
&mut self, request: PassphraseRequest<'_>, out: &mut dyn Write,
) -> Result<(), Error>;
}
impl<T> PassphraseProviderNew for T
where T: PassphraseProvider
{
fn get_passphrase(
&mut self, request: PassphraseRequest<'_>, out: &mut dyn Write,
) -> Result<(), Error> {
<Self as PassphraseProvider>::get_passphrase(self, request, out)
}
}
#[derive(Debug, Copy, Clone)]
pub struct ProgressInfo<'a> {
what: Option<&'a CStr>,
pub typ: i64,
pub current: i64,
pub total: i64,
}
impl<'a> ProgressInfo<'a> {
pub fn what(&self) -> Result<&'a str, Option<Utf8Error>> {
self.what.map_or(Err(None), |s| s.to_str().map_err(Some))
}
pub fn what_raw(&self) -> Option<&'a CStr> {
self.what
}
}
/// Upstream documentation:
/// [`gpgme_progress_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Progress-Meter-Callback.html#index-gpgme_005fprogress_005fcb_005ft)
#[deprecated(
since = "0.9.2",
note = "trait will be replaced with the `ProgressReporter` trait"
)]
pub trait ProgressHandler: UnwindSafe + Send {
fn handle(&mut self, _info: ProgressInfo<'_>);
}
impl<T: UnwindSafe + Send> ProgressHandler for T
where T: FnMut(ProgressInfo<'_>)
{
fn handle(&mut self, info: ProgressInfo<'_>) {
(*self)(info);
}
}
/// Upstream documentation:
/// [`gpgme_progress_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Progress-Meter-Callback.html#index-gpgme_005fprogress_005fcb_005ft)
pub trait ProgressReporter: UnwindSafe + Send {
fn report(&mut self, info: ProgressInfo<'_>);
}
impl<T: ProgressHandler> ProgressReporter for T {
fn report(&mut self, info: ProgressInfo<'_>) {
self.handle(info);
}
}
/// Upstream documentation:
/// [`gpgme_status_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Status-Message-Callback.html#index-gpgme_005fstatus_005fcb_005ft)
pub trait StatusHandler: UnwindSafe + Send {
fn handle(&mut self, keyword: Option<&CStr>, args: Option<&CStr>) -> Result<(), Error>;
}
impl<T: UnwindSafe + Send> StatusHandler for T
where T: FnMut(Option<&CStr>, Option<&CStr>) -> Result<(), Error>
{
fn handle(&mut self, keyword: Option<&CStr>, args: Option<&CStr>) -> Result<(), Error> {
(*self)(keyword, args)
}
}
#[derive(Debug)]
pub struct EditInteractionStatus<'a> {
pub code: edit::StatusCode,
args: Option<&'a CStr>,
pub response: &'a mut Data<'a>,
}
impl<'a> EditInteractionStatus<'a> {
pub fn args(&self) -> Result<&'a str, Option<Utf8Error>> {
match self.args {
Some(s) => s.to_str().map_err(Some),
None => Err(None),
}
}
pub fn args_raw(&self) -> Option<&'a CStr> {
self.args
}
}
/// Upstream documentation:
/// [`gpgme_edit_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Deprecated-Functions.html#index-gpgme_005fedit_005fcb_005ft)
#[deprecated(since = "0.9.2")]
pub trait EditInteractor: UnwindSafe + Send {
fn interact<W: io::Write>(
&mut self, status: EditInteractionStatus<'_>, out: Option<W>,
) -> Result<(), Error>;
}
#[derive(Debug)]
pub struct InteractionStatus<'a> {
keyword: Option<&'a CStr>,
args: Option<&'a CStr>,
pub response: &'a mut Data<'a>,
}
impl<'a> InteractionStatus<'a> {
pub fn keyword(&self) -> Result<&'a str, Option<Utf8Error>> {
self.keyword.map_or(Err(None), |s| s.to_str().map_err(Some))
}
pub fn keyword_raw(&self) -> Option<&'a CStr> {
self.keyword
}
pub fn args(&self) -> Result<&'a str, Option<Utf8Error>> {
self.args.map_or(Err(None), |s| s.to_str().map_err(Some))
}
pub fn args_raw(&self) -> Option<&'a CStr> {
self.args
}
}
/// Upstream documentation:
/// [`gpgme_interact_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Advanced-Key-Editing.html#index-gpgme_005finteract_005fcb_005ft)
#[deprecated(
since = "0.9.2",
note = "trait will be replaced with a new object safe trait of the same name"
)]
pub trait Interactor: UnwindSafe + Send {
fn interact<W: io::Write>(
&mut self, status: InteractionStatus<'_>, out: Option<W>,
) -> Result<(), Error>;
}
/// Upstream documentation:
/// [`gpgme_interact_cb_t`](https://www.gnupg.org/documentation/manuals/gpgme/Advanced-Key-Editing.html#index-gpgme_005finteract_005fcb_005ft)
pub(crate) trait InteractorNew: UnwindSafe + Send {
fn interact(
&mut self, status: InteractionStatus<'_>, out: Option<&mut dyn Write>,
) -> Result<(), Error>;
}
impl<T: Interactor> InteractorNew for T {
fn interact(
&mut self, status: InteractionStatus<'_>, out: Option<&mut dyn Write>,
) -> Result<(), Error> {
<Self as Interactor>::interact(self, status, out)
}
}
pub(crate) struct Hook<T>(Option<thread::Result<T>>);
impl<T> From<T> for Hook<T> {
fn from(hook: T) -> Self {
Self(Some(Ok(hook)))
}
}
impl<T> Drop for Hook<T> {
fn drop(&mut self) {
if let Some(Err(err)) = self.0.take() {
panic::resume_unwind(err);
}
}
}
pub(crate) struct PassphraseCbGuard {
pub ctx: ffi::gpgme_ctx_t,
pub old: (ffi::gpgme_passphrase_cb_t, *mut libc::c_void),<|fim▁hole|>impl Drop for PassphraseCbGuard {
fn drop(&mut self) {
unsafe {
ffi::gpgme_set_passphrase_cb(self.ctx, self.old.0, self.old.1);
}
}
}
pub(crate) struct ProgressCbGuard {
pub ctx: ffi::gpgme_ctx_t,
pub old: (ffi::gpgme_progress_cb_t, *mut libc::c_void),
}
impl Drop for ProgressCbGuard {
fn drop(&mut self) {
unsafe {
ffi::gpgme_set_progress_cb(self.ctx, self.old.0, self.old.1);
}
}
}
pub(crate) struct StatusCbGuard {
pub ctx: ffi::gpgme_ctx_t,
pub old: (ffi::gpgme_status_cb_t, *mut libc::c_void),
}
impl Drop for StatusCbGuard {
fn drop(&mut self) {
unsafe {
ffi::gpgme_set_status_cb(self.ctx, self.old.0, self.old.1);
}
}
}
pub(crate) struct InteractorHook<'a, I> {
pub inner: Hook<I>,
pub response: *mut Data<'a>,
}
fn update_hook<T, F>(hook: &mut Option<thread::Result<T>>, f: F) -> ffi::gpgme_error_t
where
T: UnwindSafe,
F: UnwindSafe + FnOnce(&mut T) -> Result<(), Error>, {
let mut provider = match hook.take() {
Some(Ok(p)) => p,
other => {
*hook = other;
return ffi::GPG_ERR_GENERAL;
}
};
match panic::catch_unwind(move || {
let result = f(&mut provider);
(provider, result)
}) {
Ok((provider, result)) => {
*hook = Some(Ok(provider));
result.err().map_or(0, |err| err.raw())
}
Err(err) => {
*hook = Some(Err(err));
ffi::GPG_ERR_GENERAL
}
}
}
pub(crate) extern "C" fn passphrase_cb<P: PassphraseProviderNew>(
hook: *mut libc::c_void, uid_hint: *const libc::c_char, info: *const libc::c_char,
was_bad: libc::c_int, fd: libc::c_int,
) -> ffi::gpgme_error_t {
let hook = unsafe { &mut *(hook as *mut Hook<P>) };
update_hook(&mut hook.0, move |h| unsafe {
let info = PassphraseRequest {
uid_hint: uid_hint.as_ref().map(|s| CStr::from_ptr(s)),
desc: info.as_ref().map(|s| CStr::from_ptr(s)),
prev_attempt_failed: was_bad != 0,
};
let mut writer = FdWriter::new(fd);
h.get_passphrase(info, &mut writer)
.and_then(|_| writer.write_all(b"\n").map_err(Error::from))
})
}
pub(crate) extern "C" fn progress_cb<H: ProgressReporter>(
hook: *mut libc::c_void, what: *const libc::c_char, typ: libc::c_int, current: libc::c_int,
total: libc::c_int,
) {
let hook = unsafe { &mut *(hook as *mut Hook<H>) };
update_hook(&mut hook.0, move |h| unsafe {
let info = ProgressInfo {
what: what.as_ref().map(|s| CStr::from_ptr(s)),
typ: typ.into(),
current: current.into(),
total: total.into(),
};
h.report(info);
Ok(())
});
}
pub(crate) extern "C" fn status_cb<H: StatusHandler>(
hook: *mut libc::c_void, keyword: *const libc::c_char, args: *const libc::c_char,
) -> ffi::gpgme_error_t {
let hook = unsafe { &mut *(hook as *mut Hook<H>) };
update_hook(&mut hook.0, move |h| unsafe {
let keyword = keyword.as_ref().map(|s| CStr::from_ptr(s));
let args = args.as_ref().map(|s| CStr::from_ptr(s));
h.handle(args, keyword)
})
}
pub(crate) extern "C" fn edit_cb<E: EditInteractor>(
hook: *mut libc::c_void, status: ffi::gpgme_status_code_t, args: *const libc::c_char,
fd: libc::c_int,
) -> ffi::gpgme_error_t {
let hook = unsafe { &mut *(hook as *mut InteractorHook<'_, E>) };
let response = hook.response;
update_hook(&mut hook.inner.0, move |h| unsafe {
let status = EditInteractionStatus {
code: edit::StatusCode::from_raw(status),
args: args.as_ref().map(|s| CStr::from_ptr(s)),
response: &mut *response,
};
if fd < 0 {
h.interact(status, None::<&mut dyn io::Write>)
} else {
h.interact(status, Some(FdWriter::new(fd)))
}
})
}
pub(crate) extern "C" fn interact_cb<I: InteractorNew>(
hook: *mut libc::c_void, keyword: *const libc::c_char, args: *const libc::c_char,
fd: libc::c_int,
) -> ffi::gpgme_error_t {
let hook = unsafe { &mut *(hook as *mut InteractorHook<'_, I>) };
let response = hook.response;
update_hook(&mut hook.inner.0, move |h| unsafe {
let status = InteractionStatus {
keyword: keyword.as_ref().map(|s| CStr::from_ptr(s)),
args: args.as_ref().map(|s| CStr::from_ptr(s)),
response: &mut *response,
};
if fd < 0 {
h.interact(status, None)
} else {
h.interact(status, Some(&mut FdWriter::new(fd)))
}
})
}<|fim▁end|>
|
}
|
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>"""
accounts.test_views
===================
Tests the REST API calls.
Add more specific social registration tests
"""
import responses
from django.core.urlresolvers import reverse
from django.core import mail
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from django.test.utils import override_settings
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from allauth.account import app_settings
from allauth.socialaccount.models import SocialApp
from allauth.socialaccount.providers.facebook.provider import GRAPH_API_URL
from .serializers import LoginSerializer
class TestAccounts(APITestCase):
""" Tests normal use - non social login. """
def setUp(self):
self.login_url = reverse('accounts:rest_login')
self.logout_url = reverse('accounts:rest_logout')
self.register_url = reverse('accounts:rest_register')
self.password_reset_url = reverse('accounts:rest_password_reset')
self.rest_password_reset_confirm_url = reverse('accounts:rest_password_reset_confirm')
self.password_change_url = reverse('accounts:rest_password_change')
self.verify_url = reverse('accounts:rest_verify_email')
self.user_url = reverse('accounts:rest_user_details')
self.client = APIClient()
self.reusable_user_data = {'username': 'admin', 'email': '[email protected]', 'password': 'password12'}
self.reusable_user_data_change_password = {'username': 'admin', 'email': '[email protected]', 'password': 'password_same'}
self.reusable_register_user_data = {'username': 'admin', 'email': '[email protected]', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data1 = {'username': 'admin1', 'email': '[email protected]', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data_no_username = {'email': '[email protected]', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data_no_email = {'username': 'admin', 'password1': 'password12', 'password2': 'password12'}
self.change_password_data_incorrect = {"new_password1": "password_not_same", "new_password2": "password_same"}
self.change_password_data = {"new_password1": "password_same", "new_password2": "password_same"}
self.change_password_data_old_password_field_enabled = {"old_password": "password12", "new_password1": "password_same", "new_password2": "password_same"}
def create_user_and_login(self):
""" Helper function to create a basic user, login and assign token credentials. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, "Snap! Basic Login has failed with a helper function 'create_user_and_login'. Something is really wrong here.")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + response.data['key'])
def _generate_uid_and_token(self, user):
result = {}
from django.utils.encoding import force_bytes
from django.contrib.auth.tokens import default_token_generator
from django import VERSION
if VERSION[1] == 5:
from django.utils.http import int_to_base36
result['uid'] = int_to_base36(user.pk)
else:
from django.utils.http import urlsafe_base64_encode
result['uid'] = urlsafe_base64_encode(force_bytes(user.pk))
result['token'] = default_token_generator.make_token(user)
return result
def cleanUp(self):
pass
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_basic_username_auth_method(self):
""" Tests basic functionality of login with authentication method of username. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin3', '', 'password12')
data = {"username": 'admin3', "email": "", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL,
ACCOUNT_EMAIL_REQUIRED=True)
def test_login_basic_email_auth_method(self):
""" Tests basic functionality of login with authentication method of email. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
data = {"username": '', "email": "[email protected]", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_basic_username_email_auth_method(self):
""" Tests basic functionality of login with authentication method of username or email. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
# Check email
data = {"username": '', "email": "[email protected]", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
# Check username
data = {"username": 'admin', "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_auth_method_username_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'username'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'email'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'username_email'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_test_login_fail_incorrect_change(self):
# Create user, login and try and change password INCORRECTLY
self.create_user_and_login()
self.client.post(self.password_change_url, data=self.change_password_data_incorrect, format='json')
# Remove credentials
self.client.credentials()
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth username). """
self.common_test_login_fail_incorrect_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth email). """
self.common_test_login_fail_incorrect_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth username_email). """
self.common_test_login_fail_incorrect_change()
def common_test_login_correct_password_change(self):
# Create user, login and try and change password successfully
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
# Remove credentials
self.client.credentials()
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth username). """
self.common_test_login_correct_password_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth email). """
self.common_test_login_correct_password_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth username_email). """
self.common_test_login_correct_password_change()
def test_login_fail_no_input(self):
""" Tests login fails when you provide no username and no email (login auth username_email). """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
data = {"username": '', "email": '', "password": ''}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username (login auth username). """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
data = {"username": '', "email": "[email protected]", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username (login auth email). """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
data = {"username": "admin", "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username and no email (login auth username_email). """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
data = {"username": '', "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
# need to check for token
# test login with password change
# test login with wrong password chaneg if fails
def test_logout(self):
""" Tests basic logout functionality. """
self.create_user_and_login()
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
def test_logout_but_already_logged_out(self):
""" Tests logout when already logged out. """
self.create_user_and_login()
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
self.client.credentials() # remember to remove manual token credential
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
def test_change_password_basic(self):
""" Tests basic functionality of 'change of password'. """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"New password has been saved."}')
def test_change_password_basic_fails_not_authorised(self):
""" Tests basic functionality of 'change of password' fails if not authorised. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEquals(response.content, '{"detail":"Authentication credentials were not provided."}')
def common_change_password_login_fail_with_old_password(self, password_change_data):
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_change_password_login_pass_with_new_password(self, password_change_data):
self.create_user_and_login()
response = self.client.post(self.password_change_url, password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_change_password_login_fail_with_old_password_pass_with_new_password(self, password_change_data):
""" Tests change of password with old password fails but new password successes. """
self.create_user_and_login()
response = self.client.post(self.password_change_url, password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
def test_change_password_login_fail_with_old_password(self):
""" Tests change of password with old password. """
self.common_change_password_login_fail_with_old_password(self.change_password_data)
def test_change_password_login_pass_with_new_password(self):
""" Tests change of password with new password. """
self.common_change_password_login_pass_with_new_password(self.change_password_data)
def test_change_password_login_fail_with_old_password_pass_with_new_password(self):
""" Tests change of password with old password fails but new password successes. """
self.common_change_password_login_fail_with_old_password_pass_with_new_password(self.change_password_data)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_old_password_field_required_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' fails if old password not given as part of input (old password field enabled). """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"old_password":["This field is required."]}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_basic_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' (old password enabled). """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data_old_password_field_enabled, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"New password has been saved."}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_basic_fails_not_authorised_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' fails if not authorised (old password field enabled). """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.post(self.password_change_url, data=self.change_password_data_old_password_field_enabled, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEquals(response.content, '{"detail":"Authentication credentials were not provided."}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_fail_with_old_password_old_password_field_enabled(self):
""" Tests change of password with old password (old password field enabled). """
self.common_change_password_login_fail_with_old_password(self.change_password_data_old_password_field_enabled)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_pass_with_new_password_old_password_field_enabled(self):
""" Tests change of password with new password (old password field enabled). """
self.common_change_password_login_pass_with_new_password(self.change_password_data_old_password_field_enabled)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_fail_with_old_password_pass_with_new_password_old_password_field_enabled(self):
""" Tests change of password with old password fails but new password successes (old password field enabled). """
self.common_change_password_login_fail_with_old_password_pass_with_new_password(self.change_password_data_old_password_field_enabled)
"""
Registrations Tests
===================
"""
def common_test_registration_basic(self, data):
response = self.client.post(self.register_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_201_CREATED, response.content)
return response
@override_settings(ACCOUNT_EMAIL_REQUIRED=True, ACCOUNT_USERNAME_REQUIRED=True)
def test_registration_basic(self):
""" Tests basic functionality of registration. """
self.common_test_registration_basic(self.reusable_register_user_data)
@override_settings(ACCOUNT_EMAIL_REQUIRED=True, ACCOUNT_USERNAME_REQUIRED=False)
def test_registration_basic_no_username(self):
""" Tests basic functionality of registration (no username required). """
self.common_test_registration_basic(self.reusable_register_user_data_no_username)
@override_settings(ACCOUNT_EMAIL_REQUIRED=False, ACCOUNT_USERNAME_REQUIRED=True)
def test_registration_basic_no_email(self):
""" Tests basic functionality of registration (no username required). """
self.common_test_registration_basic(self.reusable_register_user_data_no_email)
@override_settings(ACCOUNTS_REGISTRATION_OPEN=False)
def test_registration_basic_registration_not_open(self):
""" Tests basic registration fails if registration is closed. """
response = self.client.post(self.register_url, self.reusable_register_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none")
def test_registration_email_verification_not_necessary(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data)
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional")
def test_registration_email_verification_neccessary(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data)
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_test_registration(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_test_registration_email_verification_not_necessary_email(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_test_registration_email_verification_not_necessary_username(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_email()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_optional_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_email()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_username()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_optional_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_username()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_username_email(self):
""" Tests you canT log in without email verification for username & email auth. """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_optional_username_email(self):
""" Tests you canT log in without email verification for username & email auth. """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_necessary_login_fail_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_necessary_login_fail_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_necessary_login_fail_username_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic({'username': 'admin_man', 'email': '[email protected]', 'password1': 'password12', 'password2': 'password12'})
response = self.client.post(self.login_url, {'username': 'admin_man', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_registration_email_verification_neccessary_verified_login(self, login_data):
mail_count = len(mail.outbox)
reg_response = self.common_test_registration_basic(self.reusable_register_user_data1)
self.assertEquals(len(mail.outbox), mail_count + 1)
new_user = get_user_model().objects.latest('id')
login_response = self.client.post(self.login_url, login_data, format='json')
self.assertEquals(login_response.status_code, status.HTTP_400_BAD_REQUEST)
# verify email
email_confirmation = new_user.emailaddress_set.get(email=self.reusable_register_user_data1['email']).emailconfirmation_set.order_by('-created')[0]
verify_response = self.client.post(self.verify_url, {'key': email_confirmation.key}, format='json')
self.assertEquals(verify_response.status_code, status.HTTP_200_OK)
login_response = self.client.post(self.login_url, login_data, format='json')
self.assertEquals(login_response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_verified_login_username(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'username': 'admin1', 'password': 'password12'})
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_verified_login_email(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'email': '[email protected]', 'password': 'password12'})
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_verified_login_username_email(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'username': 'admin1', 'password': 'password12'})
"""
Password Reset Tests
====================
"""
def test_password_reset(self):
""" Test basic functionality of password reset. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
payload = {'email': '[email protected]'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Password reset e-mail has been sent."}')
@override_settings(ACCOUNTS_PASSWORD_RESET_NOTIFY_EMAIL_NOT_IN_SYSTEM=True)
def test_password_reset_fail_no_user_with_email_no_notify_not_in_system(self):
""" Test basic functionality of password reset fails when there is no email on record (notify email not in system). """
payload = {'email': '[email protected]'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"error":"User with email doesn\'t exist. Did not send reset email."}')
@override_settings(ACCOUNTS_PASSWORD_RESET_NOTIFY_EMAIL_NOT_IN_SYSTEM=False)
def test_password_reset_no_user_with_email_no_notify_not_in_system(self):
""" Test basic functionality of password reset fails when there is no email on record. """
payload = {'email': '[email protected]'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Password reset e-mail has been sent."}')
def test_password_reset_confirm_fail_invalid_token(self):
""" Test password reset confirm fails if token is invalid. """
user = get_user_model().objects.create_user('admin', '[email protected]', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': url_kwargs['uid'],
'token': '-wrong-token-'
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"token":["Invalid value"]}')
def test_password_reset_confirm_fail_invalid_uid(self):
""" Test password reset confirm fails if uid is invalid. """
user = get_user_model().objects.create_user('admin', '[email protected]', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': 0,
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"uid":["Invalid value"]}')
def test_password_reset_confirm_fail_passwords_not_the_same(self):
""" Test password reset confirm fails if uid is invalid. """
user = get_user_model().objects.create_user('admin', '[email protected]', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_not_the_same_password',
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"new_password2":["The two password fields didn\'t match."]}')
def test_password_reset_confirm_login(self):
""" Tests password reset confirm works -> can login afterwards. """
user = get_user_model().objects.create_user('admin', '[email protected]', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',<|fim▁hole|> self.assertEquals(response.status_code, status.HTTP_200_OK)
response = self.client.post(self.login_url, {'username': 'admin', 'email': '[email protected]', 'password': 'new_password'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_password_reset_confirm_login_fails_with_old_password(self):
""" Tests password reset confirm fails with old password. """
user = get_user_model().objects.create_user('admin', '[email protected]', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
response = self.client.post(self.login_url, {'username': 'admin', 'email': '[email protected]', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
"""
User Detail Tests
=================
"""
def test_user_details_get(self):
""" Test to retrieve user details. """
self.create_user_and_login()
response = self.client.get(self.user_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"admin","email":"[email protected]","first_name":"","last_name":""}')
def test_user_details_put(self):
""" Test to put update user details. """
self.create_user_and_login()
response = self.client.put(self.user_url, {"username":"changed","email":"[email protected]","first_name":"changed","last_name":"name"}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"changed","email":"[email protected]","first_name":"changed","last_name":"name"}')
def test_user_details_patch(self):
""" Test to patch update user details. """
self.create_user_and_login()
response = self.client.patch(self.user_url, {'username': 'changed_username', 'email': '[email protected]'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"changed_username","email":"[email protected]","first_name":"","last_name":""}')
def test_user_details_put_not_authenticated(self):
""" Test to put update user details. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.put(self.user_url, {"username":"changed","email":"[email protected]","first_name":"changed","last_name":"name"}, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_details_patch_not_authenticated(self):
""" Test to patch update user details. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.patch(self.user_url, {'username': 'changed_username', 'email': '[email protected]'}, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_details_get_not_authenticated(self):
""" Test to retrieve user details. """
get_user_model().objects.create_user('admin', '[email protected]', 'password12')
response = self.client.get(self.user_url, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TestAccountsSocial(APITestCase):
""" Tests normal for social login. """
urls = 'accounts.test_social_urls'
def setUp(self):
self.fb_login_url = reverse('fb_login')
social_app = SocialApp.objects.create(
provider='facebook',
name='Facebook',
client_id='123123123',
secret='321321321',
)
site = Site.objects.get_current()
social_app.sites.add(site)
self.graph_api_url = GRAPH_API_URL + '/me'
@responses.activate
def test_social_auth(self):
""" Tests Social Login. """
resp_body = '{"id":"123123123123","first_name":"John","gender":"male","last_name":"Smith","link":"https:\\/\\/www.facebook.com\\/john.smith","locale":"en_US","name":"John Smith","timezone":2,"updated_time":"2014-08-13T10:14:38+0000","username":"john.smith","verified":true}' # noqa
responses.add(
responses.GET,
self.graph_api_url,
body=resp_body,
status=200,
content_type='application/json'
)
users_count = get_user_model().objects.all().count()
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
@responses.activate
def test_social_auth_only_one_user_created(self):
""" Tests Social Login. """
resp_body = '{"id":"123123123123","first_name":"John","gender":"male","last_name":"Smith","link":"https:\\/\\/www.facebook.com\\/john.smith","locale":"en_US","name":"John Smith","timezone":2,"updated_time":"2014-08-13T10:14:38+0000","username":"john.smith","verified":true}' # noqa
responses.add(
responses.GET,
self.graph_api_url,
body=resp_body,
status=200,
content_type='application/json'
)
users_count = get_user_model().objects.all().count()
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
# make sure that second request will not create a new user
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
@responses.activate
def test_failed_social_auth(self):
# fake response
responses.add(
responses.GET,
self.graph_api_url,
body='',
status=400,
content_type='application/json'
)
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)<|fim▁end|>
|
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
|
<|file_name|>entity.module.ts<|end_file_name|><|fim▁begin|>import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
import { GatewayQuoteModule as QuotesQuoteModule } from './quotes/quote/quote.module';
/* jhipster-needle-add-entity-module-import - JHipster will add entity modules imports here */
@NgModule({
// prettier-ignore<|fim▁hole|> QuotesQuoteModule,
/* jhipster-needle-add-entity-module - JHipster will add entity modules here */
],
declarations: [],
entryComponents: [],
providers: [],
schemas: [CUSTOM_ELEMENTS_SCHEMA]
})
export class GatewayEntityModule {}<|fim▁end|>
|
imports: [
|
<|file_name|>player.py<|end_file_name|><|fim▁begin|># This file is part of Gem.
#
# Gem is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Gem is distributed in the hope that it will be useful,<|fim▁hole|># GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gem. If not, see <http://www.gnu.org/licenses/\>.
from gem.api import Location
from enum import Enum
LOG_TAG = "player"
def player_position_update(player, location, warped):
profile = player.profile
profile.location = location<|fim▁end|>
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
<|file_name|>CombatEvent.ts<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
export {};
declare global {
interface CombatEvent {
fromName: string;
fromFaction: any;
toName: string;
toFaction: any;
damages?: {
sent: number;
received: number;
part: BodyPart;
type: DamageType;
}[];
// damage against an abilities disruption health, high enough disruption damage causes and interrupt
disruption?: {
sent: number;
received: number;
tracksInterrupted?: AbilityTrack;
source: string;
};
heals?: {
sent: number;
received: number;
part: BodyPart;
}[];
// Array of statuses
statuses?: {
name: string;
action: any;
duration: number;
}[];
// Array of body Part ids that received a cure, ie [1, 1, 2] = 2 cures on body part 1 and 1 cure ont body part 2
cures?: BodyPart[];
// resources spent or gained
resources?: {
sent: number;
received: number;
type: any;
}[];
// impulse = knock back or a force applied to your character
impulse?: {
sent: number;
received: number;
};
activeEffects?: {
name: string;
action: ActiveEffectAction;
duration: string;<|fim▁hole|>
errors?: {
msg: string;
}[];
}
}<|fim▁end|>
|
}[];
|
<|file_name|>user.ts<|end_file_name|><|fim▁begin|>/* tslint:disable */
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/startWith';
import 'rxjs/add/operator/switchMap';
import 'rxjs/add/operator/mergeMap';
import 'rxjs/add/operator/toArray';
import { Observable } from 'rxjs/Observable';
import { of } from 'rxjs/observable/of';
import { concat } from 'rxjs/observable/concat';
import { Injectable, Inject } from '@angular/core';
import { Effect, Actions, toPayload } from '@ngrx/effects';
import { Action } from '@ngrx/store';
<|fim▁hole|>import { AccountApi } from '../../sdk';
@Injectable()
export class UserEffects extends BaseEffects {
@Effect()
protected login: Observable<Action> = this.actions$
.ofType(UserActionTypes.LOGIN)
.map(toPayload)
.mergeMap((payload) =>
this.user.login(payload.credentials, payload.include, payload.rememberMe)
.map((response) => new UserActions.loginSuccess(response))
.catch((error) => concat(
of(new UserActions.loginFail(error)),
of(new ErrorActions.error(error))
))
);
@Effect()
protected register: Observable<Action> = this.actions$
.ofType(UserActionTypes.REGISTER)
.map(toPayload)
.mergeMap((payload) =>
this.user.create(payload.credentials)
.map((response) => new UserActions.registerSuccess(response))
.catch((error) => concat(
of(new UserActions.registerFail(error)),
of(new ErrorActions.error(error))
))
);
@Effect()
protected logout: Observable<Action> = this.actions$
.ofType(UserActionTypes.LOGOUT)
.map(toPayload)
.mergeMap((payload) =>
this.user.logout()
.map(() => new UserActions.logoutSuccess())
.catch((error) => concat(
of(new UserActions.logoutFail()),
of(new ErrorActions.error(error))
))
);
@Effect() protected create: any;
@Effect() protected createMany: any;
@Effect() protected findById: any;
@Effect() protected find: any;
@Effect() protected findOne: any;
@Effect() protected updateAll: any;
@Effect() protected deleteById: any;
@Effect() protected updateAttributes: any;
@Effect() protected upsert: any;
@Effect() protected upsertWithWhere: any;
@Effect() protected replaceOrCreate: any;
@Effect() protected replaceById: any;
@Effect() protected patchOrCreate: any;
@Effect() protected patchAttributes: any;
constructor(
@Inject(Actions) public actions$: Actions,
@Inject(AccountApi) public user: AccountApi
) {
super(actions$, user, 'Account', UserActionTypes);
}
}<|fim▁end|>
|
import { BaseEffects } from './base';
import { UserActionTypes, UserActions } from '../actions/user';
import { ErrorActions } from '../actions/error';
|
<|file_name|>OEP.py<|end_file_name|><|fim▁begin|>"Yang/Wu's OEP implementation, in PyQuante."
from math import sqrt
import settings
from PyQuante.NumWrap import zeros,matrixmultiply,transpose,dot,identity,\
array,solve
from PyQuante.Ints import getbasis, getints, getJ,get2JmK,getK
from PyQuante.LA2 import geigh,mkdens,trace2,simx
from PyQuante.hartree_fock import get_fock
from PyQuante.CGBF import three_center
from PyQuante.optimize import fminBFGS
from PyQuante.fermi_dirac import get_efermi, get_fermi_occs,mkdens_occs,\
get_entropy,mkdens_fermi
import logging
logger = logging.getLogger("pyquante")
gradcall=0
class EXXSolver:
"EXXSolver(solver)"
def __init__(self,solver):
# Solver is a pointer to a HF or a DFT calculation that has
# already converged
self.solver = solver
self.bfs = self.solver.bfs
self.nbf = len(self.bfs)
self.S = self.solver.S
self.h = self.solver.h
self.Ints = self.solver.Ints
self.molecule = self.solver.molecule
self.nel = self.molecule.get_nel()
self.nclosed, self.nopen = self.molecule.get_closedopen()
self.Enuke = self.molecule.get_enuke()
self.norb = self.nbf
self.orbs = self.solver.orbs
self.orbe = self.solver.orbe
self.Gij = []
for g in xrange(self.nbf):
gmat = zeros((self.nbf,self.nbf),'d')
self.Gij.append(gmat)
gbf = self.bfs[g]
for i in xrange(self.nbf):
ibf = self.bfs[i]<|fim▁hole|> gmat[i,j] = gij
gmat[j,i] = gij
D0 = mkdens(self.orbs,0,self.nclosed)
J0 = getJ(self.Ints,D0)
Vfa = (2.0*(self.nel-1.0)/self.nel)*J0
self.H0 = self.h + Vfa
self.b = zeros(self.nbf,'d')
return
def iterate(self,**kwargs):
self.iter = 0
self.etemp = kwargs.get("etemp",settings.DFTElectronTemperature)
logging.debug("iter Energy <b|b>")
logging.debug("---- ------ -----")
self.b = fminBFGS(self.get_energy,self.b,self.get_gradient,logger=logging)
return
def get_energy(self,b):
self.iter += 1
self.Hoep = get_Hoep(b,self.H0,self.Gij)
self.orbe,self.orbs = geigh(self.Hoep,self.S)
if self.etemp:
self.D,self.entropy = mkdens_fermi(self.nel,self.orbe,self.orbs,
self.etemp)
else:
self.D = mkdens(self.orbs,0,self.nclosed)
self.entropy=0
self.F = get_fock(self.D,self.Ints,self.h)
self.energy = trace2(self.h+self.F,self.D)+self.Enuke + self.entropy
if self.iter == 1 or self.iter % 10 == 0:
logging.debug("%4d %10.5f %10.5f" % (self.iter,self.energy,dot(b,b)))
return self.energy
def get_gradient(self,b):
energy = self.get_energy(b)
Fmo = simx(self.F,self.orbs)
bp = zeros(self.nbf,'d')
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbs)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nclosed):
for a in xrange(self.nclosed,self.norb):
bp[g] = bp[g] + Fmo[i,a]*Gmo[i,a]/(self.orbe[i]-self.orbe[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return bp
class UEXXSolver:
"EXXSolver(solver)"
def __init__(self,solver):
# Solver is a pointer to a UHF calculation that has
# already converged
self.solver = solver
self.bfs = self.solver.bfs
self.nbf = len(self.bfs)
self.S = self.solver.S
self.h = self.solver.h
self.Ints = self.solver.Ints
self.molecule = self.solver.molecule
self.nel = self.molecule.get_nel()
self.nalpha, self.nbeta = self.molecule.get_alphabeta()
self.Enuke = self.molecule.get_enuke()
self.norb = self.nbf
self.orbsa = self.solver.orbsa
self.orbsb = self.solver.orbsb
self.orbea = self.solver.orbea
self.orbeb = self.solver.orbeb
self.Gij = []
for g in xrange(self.nbf):
gmat = zeros((self.nbf,self.nbf),'d')
self.Gij.append(gmat)
gbf = self.bfs[g]
for i in xrange(self.nbf):
ibf = self.bfs[i]
for j in xrange(i+1):
jbf = self.bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
D0 = mkdens(self.orbsa,0,self.nalpha)+mkdens(self.orbsb,0,self.nbeta)
J0 = getJ(self.Ints,D0)
Vfa = ((self.nel-1.)/self.nel)*J0
self.H0 = self.h + Vfa
self.b = zeros(2*self.nbf,'d')
return
def iterate(self,**kwargs):
self.etemp = kwargs.get("etemp",settings.DFTElectronTemperature)
self.iter = 0
logging.debug("iter Energy <b|b>")
logging.debug("---- ------ -----")
self.b = fminBFGS(self.get_energy,self.b,self.get_gradient,logger=logging)
return
def get_energy(self,b):
self.iter += 1
ba = b[:self.nbf]
bb = b[self.nbf:]
self.Hoepa = get_Hoep(ba,self.H0,self.Gij)
self.Hoepb = get_Hoep(bb,self.H0,self.Gij)
self.orbea,self.orbsa = geigh(self.Hoepa,self.S)
self.orbeb,self.orbsb = geigh(self.Hoepb,self.S)
if self.etemp:
self.Da,entropya = mkdens_fermi(2*self.nalpha,self.orbea,self.orbsa,
self.etemp)
self.Db,entropyb = mkdens_fermi(2*self.nbeta,self.orbeb,self.orbsb,
self.etemp)
self.entropy = 0.5*(entropya+entropyb)
else:
self.Da = mkdens(self.orbsa,0,self.nalpha)
self.Db = mkdens(self.orbsb,0,self.nbeta)
self.entropy=0
J = getJ(self.Ints,self.Da+self.Db)
Ka = getK(self.Ints,self.Da)
Kb = getK(self.Ints,self.Db)
self.Fa = self.h + J - Ka
self.Fb = self.h + J - Kb
self.energy = 0.5*(trace2(self.h+self.Fa,self.Da) +
trace2(self.h+self.Fb,self.Db))\
+ self.Enuke + self.entropy
if self.iter == 1 or self.iter % 10 == 0:
logging.debug("%4d %10.5f %10.5f" % (self.iter,self.energy,dot(b,b)))
return self.energy
def get_gradient(self,b):
energy = self.get_energy(b)
Fmoa = simx(self.Fa,self.orbsa)
Fmob = simx(self.Fb,self.orbsb)
bp = zeros(2*self.nbf,'d')
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbsa)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nalpha):
for a in xrange(self.nalpha,self.norb):
bp[g] += Fmoa[i,a]*Gmo[i,a]/(self.orbea[i]-self.orbea[a])
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbsb)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nbeta):
for a in xrange(self.nbeta,self.norb):
bp[self.nbf+g] += Fmob[i,a]*Gmo[i,a]/(self.orbeb[i]-self.orbeb[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return bp
def exx(atoms,orbs,**kwargs):
return oep_hf(atoms,orbs,**kwargs)
def oep_hf(atoms,orbs,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
See notes on options and other args in oep routine.
"""
return oep(atoms,orbs,get_exx_energy,get_exx_gradient,**kwargs)
def oep(atoms,orbs,energy_func,grad_func=None,**kwargs):
"""oep - Form the optimized effective potential for a given energy expression
oep(atoms,orbs,energy_func,grad_func=None,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
energy_func The function that returns the energy for the given method
grad_func The function that returns the force for the given method
Options
-------
verbose False Output terse information to stdout (default)
True Print out additional information
ETemp False Use ETemp value for finite temperature DFT (default)
float Use (float) for the electron temperature
bfs None The basis functions to use. List of CGBF's
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
verbose = kwargs.get('verbose')
ETemp = kwargs.get('ETemp',settings.DFTElectronTemperature)
opt_method = kwargs.get('opt_method',settings.OEPOptMethod)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms,**kwargs)
nel = atoms.get_nel()
nocc,nopen = atoms.get_closedopen()
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
bp = zeros(nbf,'d')
bvec = kwargs.get('bvec')
if bvec:
assert len(bvec) == npbf
b = array(bvec)
else:
b = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbs,0,nocc)
J0 = getJ(Ints,D0)
Vfa = (2*(nel-1.)/nel)*J0
H0 = h + Vfa
b = fminBFGS(energy_func,b,grad_func,
(nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij),
logger=logging)
energy,orbe,orbs = energy_func(b,nbf,nel,nocc,ETemp,Enuke,
S,h,Ints,H0,Gij,return_flag=1)
return energy,orbe,orbs
def get_exx_energy(b,nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij,**kwargs):
"""Computes the energy for the OEP/HF functional
Options:
return_flag 0 Just return the energy
1 Return energy, orbe, orbs
2 Return energy, orbe, orbs, F
"""
return_flag = kwargs.get('return_flag')
Hoep = get_Hoep(b,H0,Gij)
orbe,orbs = geigh(Hoep,S)
if ETemp:
efermi = get_efermi(nel,orbe,ETemp)
occs = get_fermi_occs(efermi,orbe,ETemp)
D = mkdens_occs(orbs,occs)
entropy = get_entropy(occs,ETemp)
else:
D = mkdens(orbs,0,nocc)
F = get_fock(D,Ints,h)
energy = trace2(h+F,D)+Enuke
if ETemp: energy += entropy
iref = nel/2
gap = 627.51*(orbe[iref]-orbe[iref-1])
logging.debug("EXX Energy, B, Gap: %10.5f %10.5f %10.5f"
% (energy,sqrt(dot(b,b)),gap))
#logging.debug("%s" % orbe)
if return_flag == 1:
return energy,orbe,orbs
elif return_flag == 2:
return energy,orbe,orbs,F
return energy
def get_exx_gradient(b,nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij,**kwargs):
"""Computes the gradient for the OEP/HF functional.
return_flag 0 Just return gradient
1 Return energy,gradient
2 Return energy,gradient,orbe,orbs
"""
# Dump the gradient every 10 steps so we can restart...
global gradcall
gradcall += 1
#if gradcall % 5 == 0: logging.debug("B vector:\n%s" % b)
# Form the new potential and the new orbitals
energy,orbe,orbs,F = get_exx_energy(b,nbf,nel,nocc,ETemp,Enuke,
S,h,Ints,H0,Gij,return_flag=2)
Fmo = matrixmultiply(transpose(orbs),matrixmultiply(F,orbs))
norb = nbf
bp = zeros(nbf,'d') # dE/db
for g in xrange(nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = matrixmultiply(transpose(orbs),matrixmultiply(Gij[g],orbs))
# Now sum the appropriate terms to get the b gradient
for i in xrange(nocc):
for a in xrange(nocc,norb):
bp[g] = bp[g] + Fmo[i,a]*Gmo[i,a]/(orbe[i]-orbe[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return_flag = kwargs.get('return_flag')
if return_flag == 1:
return energy,bp
elif return_flag == 2:
return energy,bp,orbe,orbs
return bp
def get_Hoep(b,H0,Gij):
Hoep = H0
# Add the contributions from the gaussian potential functions
# H[ij] += b[g]*<ibf|g|jbf>
for g in xrange(len(b)):
Hoep = Hoep + b[g]*Gij[g]
return Hoep
# Here's a much faster way to do this. Haven't figured out how to
# do it for more generic functions like OEP-GVB
def oep_hf_an(atoms,orbs,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
Implementation of Wu and Yang's Approximate Newton Scheme
from J. Theor. Comp. Chem. 2, 627 (2003).
oep_hf(atoms,orbs,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
Options
-------
bfs None The basis functions to use for the wfn
pbfs None The basis functions to use for the pot
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
maxiter = kwargs.get('maxiter',settings.OEPIters)
tol = kwargs.get('tol',settings.OEPTolerance)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms)
nel = atoms.get_nel()
nocc,nopen = atoms.get_closedopen()
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
bp = zeros(nbf,'d')
bvec = kwargs.get('bvec')
if bvec:
assert len(bvec) == npbf
b = array(bvec)
else:
b = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbs,0,nocc)
J0 = getJ(Ints,D0)
Vfa = (2*(nel-1.)/nel)*J0
H0 = h + Vfa
b = zeros(nbf,'d')
eold = 0
for iter in xrange(maxiter):
Hoep = get_Hoep(b,H0,Gij)
orbe,orbs = geigh(Hoep,S)
D = mkdens(orbs,0,nocc)
Vhf = get2JmK(Ints,D)
energy = trace2(2*h+Vhf,D)+Enuke
if abs(energy-eold) < tol:
break
else:
eold = energy
logging.debug("OEP AN Opt: %d %f" % (iter,energy))
dV_ao = Vhf-Vfa
dV = matrixmultiply(transpose(orbs),matrixmultiply(dV_ao,orbs))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
Gkt = zeros((nbf,nbf),'d')
for k in xrange(nbf):
# This didn't work; in fact, it made things worse:
Gk = matrixmultiply(transpose(orbs),matrixmultiply(Gij[k],orbs))
for i in xrange(nocc):
for a in xrange(nocc,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbe[i]-orbe[a])
for l in xrange(nbf):
Gl = matrixmultiply(transpose(orbs),matrixmultiply(Gij[l],orbs))
for i in xrange(nocc):
for a in xrange(nocc,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbe[i]-orbe[a])
# This should actually be a pseudoinverse...
b = solve(X,c)
logger.info("Final OEP energy = %f" % energy)
return energy,orbe,orbs
def oep_uhf_an(atoms,orbsa,orbsb,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
Implementation of Wu and Yang's Approximate Newton Scheme
from J. Theor. Comp. Chem. 2, 627 (2003).
oep_uhf(atoms,orbs,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
Options
-------
bfs None The basis functions to use for the wfn
pbfs None The basis functions to use for the pot
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
maxiter = kwargs.get('maxiter',settings.OEPIters)
tol = kwargs.get('tol',settings.OEPTolerance)
ETemp = kwargs.get('ETemp',settings.DFTElectronTemperature)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms,**kwargs)
nel = atoms.get_nel()
nclosed,nopen = atoms.get_closedopen()
nalpha,nbeta = nclosed+nopen,nclosed
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
ba = zeros(npbf,'d')
bb = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbsa,0,nalpha)+mkdens(orbsb,0,nbeta)
J0 = getJ(Ints,D0)
Vfa = ((nel-1.)/nel)*J0
H0 = h + Vfa
eold = 0
for iter in xrange(maxiter):
Hoepa = get_Hoep(ba,H0,Gij)
Hoepb = get_Hoep(ba,H0,Gij)
orbea,orbsa = geigh(Hoepa,S)
orbeb,orbsb = geigh(Hoepb,S)
if ETemp:
efermia = get_efermi(2*nalpha,orbea,ETemp)
occsa = get_fermi_occs(efermia,orbea,ETemp)
Da = mkdens_occs(orbsa,occsa)
efermib = get_efermi(2*nbeta,orbeb,ETemp)
occsb = get_fermi_occs(efermib,orbeb,ETemp)
Db = mkdens_occs(orbsb,occsb)
entropy = 0.5*(get_entropy(occsa,ETemp)+get_entropy(occsb,ETemp))
else:
Da = mkdens(orbsa,0,nalpha)
Db = mkdens(orbsb,0,nbeta)
J = getJ(Ints,Da) + getJ(Ints,Db)
Ka = getK(Ints,Da)
Kb = getK(Ints,Db)
energy = (trace2(2*h+J-Ka,Da)+trace2(2*h+J-Kb,Db))/2\
+Enuke
if ETemp: energy += entropy
if abs(energy-eold) < tol:
break
else:
eold = energy
logging.debug("OEP AN Opt: %d %f" % (iter,energy))
# Do alpha and beta separately
# Alphas
dV_ao = J-Ka-Vfa
dV = matrixmultiply(orbsa,matrixmultiply(dV_ao,transpose(orbsa)))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
for k in xrange(nbf):
Gk = matrixmultiply(orbsa,matrixmultiply(Gij[k],
transpose(orbsa)))
for i in xrange(nalpha):
for a in xrange(nalpha,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbea[i]-orbea[a])
for l in xrange(nbf):
Gl = matrixmultiply(orbsa,matrixmultiply(Gij[l],
transpose(orbsa)))
for i in xrange(nalpha):
for a in xrange(nalpha,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbea[i]-orbea[a])
# This should actually be a pseudoinverse...
ba = solve(X,c)
# Betas
dV_ao = J-Kb-Vfa
dV = matrixmultiply(orbsb,matrixmultiply(dV_ao,transpose(orbsb)))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
for k in xrange(nbf):
Gk = matrixmultiply(orbsb,matrixmultiply(Gij[k],
transpose(orbsb)))
for i in xrange(nbeta):
for a in xrange(nbeta,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbeb[i]-orbeb[a])
for l in xrange(nbf):
Gl = matrixmultiply(orbsb,matrixmultiply(Gij[l],
transpose(orbsb)))
for i in xrange(nbeta):
for a in xrange(nbeta,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbeb[i]-orbeb[a])
# This should actually be a pseudoinverse...
bb = solve(X,c)
logger.info("Final OEP energy = %f" % energy)
return energy,(orbea,orbeb),(orbsa,orbsb)
def test_old():
from PyQuante.Molecule import Molecule
from PyQuante.Ints import getbasis,getints
from PyQuante.hartree_fock import rhf
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
#mol = Molecule('HF',[('H',(0.,0.,0.)),('F',(0.,0.,0.898369))],
# units='Angstrom')
mol = Molecule('LiH',[(1,(0,0,1.5)),(3,(0,0,-1.5))],units = 'Bohr')
bfs = getbasis(mol)
S,h,Ints = getints(bfs,mol)
print "after integrals"
E_hf,orbe_hf,orbs_hf = rhf(mol,bfs=bfs,integrals=(S,h,Ints),DoAveraging=True)
print "RHF energy = ",E_hf
E_exx,orbe_exx,orbs_exx = exx(mol,orbs_hf,bfs=bfs,integrals=(S,h,Ints))
return
def test():
from PyQuante import Molecule, HFSolver, DFTSolver, UHFSolver
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
mol = Molecule("He",[(2,(0,0,0))])
solver = HFSolver(mol)
solver.iterate()
print "HF energy = ",solver.energy
dft_solver = DFTSolver(mol)
dft_solver.iterate()
print "DFT energy = ",dft_solver.energy
oep = EXXSolver(solver)
# Testing 0 temp
oep.iterate()
# Testing finite temp
oep.iterate(etemp=40000)
return
def utest():
from PyQuante import Molecule, HFSolver, DFTSolver, UHFSolver
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
mol = Molecule("He",[(2,(0,0,0))])
mol = Molecule("Li",[(3,(0,0,0))],multiplicity=2)
solver = UHFSolver(mol)
solver.iterate()
print "HF energy = ",solver.energy
dft_solver = DFTSolver(mol)
dft_solver.iterate()
print "DFT energy = ",dft_solver.energy
oep = UEXXSolver(solver)
# Testing 0 temp
oep.iterate()
# Testing finite temp
oep.iterate(etemp=10000)
return
if __name__ == '__main__':
test()
utest()<|fim▁end|>
|
for j in xrange(i+1):
jbf = self.bfs[j]
gij = three_center(ibf,gbf,jbf)
|
<|file_name|>FieldExclusionTest.java<|end_file_name|><|fim▁begin|>/**
* The MIT License
*
* Copyright (c) 2019, Mahmoud Ben Hassine ([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jeasy.random;
import static org.jeasy.random.FieldPredicates.*;
import static org.assertj.core.api.Assertions.assertThat;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import org.jeasy.random.api.ContextAwareRandomizer;
import org.jeasy.random.api.RandomizerContext;
import org.jeasy.random.beans.*;
import org.jeasy.random.beans.exclusion.A;
import org.jeasy.random.beans.exclusion.B;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.jeasy.random.beans.exclusion.C;
@ExtendWith(MockitoExtension.class)
class FieldExclusionTest {
private EasyRandom easyRandom;
@BeforeEach
void setUp() {
easyRandom = new EasyRandom();
}
@Test
void excludedFieldsShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("name"));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
//then
assertThat(person).isNotNull();
assertThat(person.getName()).isNull();
}
@Test
void excludedFieldsUsingSkipRandomizerShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("name").and(ofType(String.class)).and(inClass(Human.class)));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getName()).isNull();
}
@Test
void excludedFieldsUsingFieldDefinitionShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(named("name"));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getAddress()).isNotNull();
assertThat(person.getAddress().getStreet()).isNotNull();
// person.name and street.name should be null
assertThat(person.getName()).isNull();
assertThat(person.getAddress().getStreet().getName()).isNull();
}
@Test
void excludedDottedFieldsShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("name").and(inClass(Street.class)));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getAddress()).isNotNull();
assertThat(person.getAddress().getStreet()).isNotNull();
assertThat(person.getAddress().getStreet().getName()).isNull();
}
@Test
void fieldsExcludedWithAnnotationShouldNotBePopulated() {
Person person = easyRandom.nextObject(Person.class);
assertThat(person).isNotNull();
assertThat(person.getExcluded()).isNull();
}
@Test
@SuppressWarnings("deprecation")
void fieldsExcludedWithAnnotationViaFieldDefinitionShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(isAnnotatedWith(Deprecated.class));
easyRandom = new EasyRandom(parameters);
// when
Website website = easyRandom.nextObject(Website.class);
// then
assertThat(website).isNotNull();
assertThat(website.getProvider()).isNull();
}
@Test
void fieldsExcludedFromTypeViaFieldDefinitionShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(inClass(Address.class));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getAddress()).isNotNull();
// all fields declared in class Address must be null
assertThat(person.getAddress().getCity()).isNull();
assertThat(person.getAddress().getStreet()).isNull();
assertThat(person.getAddress().getZipCode()).isNull();
assertThat(person.getAddress().getCountry()).isNull();
}
@Test
void testFirstLevelExclusion() {
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("b2").and(inClass(C.class)));
easyRandom = new EasyRandom(parameters);
C c = easyRandom.nextObject(C.class);
assertThat(c).isNotNull();
// B1 and its "children" must not be null
assertThat(c.getB1()).isNotNull();
assertThat(c.getB1().getA1()).isNotNull();
assertThat(c.getB1().getA1().getS1()).isNotNull();
assertThat(c.getB1().getA1().getS2()).isNotNull();
assertThat(c.getB1().getA2()).isNotNull();
assertThat(c.getB1().getA2().getS1()).isNotNull();
assertThat(c.getB1().getA2().getS2()).isNotNull();
// B2 must be null
assertThat(c.getB2()).isNull();
}
@Test
void testSecondLevelExclusion() { // goal: exclude only b2.a2
EasyRandomParameters parameters = new EasyRandomParameters()
.randomize(ofType(A.class).and(inClass(B.class)), new ContextAwareRandomizer<A>() {
private RandomizerContext context;
@Override
public void setRandomizerContext(RandomizerContext context) {
this.context = context;
}
@Override
public A getRandomValue() {
if (context.getCurrentField().equals("b2.a2")) {
return null;
}
return new EasyRandom().nextObject(A.class);
}
});
easyRandom = new EasyRandom(parameters);
C c = easyRandom.nextObject(C.class);
assertThat(c).isNotNull();
// B1 and its "children" must not be null
assertThat(c.getB1()).isNotNull();
assertThat(c.getB1().getA1()).isNotNull();
assertThat(c.getB1().getA1().getS1()).isNotNull();
assertThat(c.getB1().getA1().getS2()).isNotNull();
assertThat(c.getB1().getA2()).isNotNull();
assertThat(c.getB1().getA2().getS1()).isNotNull();
assertThat(c.getB1().getA2().getS2()).isNotNull();
// Only B2.A2 must be null
assertThat(c.getB2()).isNotNull();
assertThat(c.getB2().getA1()).isNotNull();
assertThat(c.getB2().getA1().getS1()).isNotNull();
assertThat(c.getB2().getA1().getS2()).isNotNull();
assertThat(c.getB2().getA2()).isNull();
}
@Test
void testThirdLevelExclusion() { // goal: exclude only b2.a2.s2
EasyRandomParameters parameters = new EasyRandomParameters()
.randomize(FieldPredicates.named("s2").and(inClass(A.class)), new ContextAwareRandomizer<String>() {
private RandomizerContext context;
@Override
public void setRandomizerContext(RandomizerContext context) {
this.context = context;
}
@Override
public String getRandomValue() {
if (context.getCurrentField().equals("b2.a2.s2")) {
return null;
}
return new EasyRandom().nextObject(String.class);
}
});
easyRandom = new EasyRandom(parameters);
C c = easyRandom.nextObject(C.class);
<|fim▁hole|> assertThat(c.getB1().getA1().getS2()).isNotNull();
assertThat(c.getB1().getA2()).isNotNull();
assertThat(c.getB1().getA2().getS1()).isNotNull();
assertThat(c.getB1().getA2().getS2()).isNotNull();
// Only B2.A2.S2 must be null
assertThat(c.getB2()).isNotNull();
assertThat(c.getB2().getA1()).isNotNull();
assertThat(c.getB2().getA1().getS1()).isNotNull();
assertThat(c.getB2().getA1().getS2()).isNotNull();
assertThat(c.getB2().getA2().getS1()).isNotNull();
assertThat(c.getB2().getA2().getS2()).isNull();
}
@Test
void testFirstLevelCollectionExclusion() {
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(FieldPredicates.named("b3").and(inClass(C.class)));
easyRandom = new EasyRandom(parameters);
C c = easyRandom.nextObject(C.class);
assertThat(c).isNotNull();
// B1 and its "children" must not be null
assertThat(c.getB1()).isNotNull();
assertThat(c.getB1().getA1()).isNotNull();
assertThat(c.getB1().getA1().getS1()).isNotNull();
assertThat(c.getB1().getA1().getS2()).isNotNull();
assertThat(c.getB1().getA2()).isNotNull();
assertThat(c.getB1().getA2().getS1()).isNotNull();
assertThat(c.getB1().getA2().getS2()).isNotNull();
// B1 and its "children" must not be null
assertThat(c.getB2()).isNotNull();
assertThat(c.getB2().getA1()).isNotNull();
assertThat(c.getB2().getA1().getS1()).isNotNull();
assertThat(c.getB2().getA1().getS2()).isNotNull();
assertThat(c.getB2().getA2()).isNotNull();
assertThat(c.getB2().getA2().getS1()).isNotNull();
assertThat(c.getB2().getA2().getS2()).isNotNull();
// B3 must be null
assertThat(c.getB3()).isNull();
}
@Test
void testSecondLevelCollectionExclusion() { // b3.a2 does not make sense, should be ignored
EasyRandomParameters parameters = new EasyRandomParameters()
.randomize(FieldPredicates.named("a2").and(inClass(B.class)), new ContextAwareRandomizer<A>() {
private RandomizerContext context;
@Override
public void setRandomizerContext(RandomizerContext context) {
this.context = context;
}
@Override
public A getRandomValue() {
if (context.getCurrentField().equals("b3.a2")) {
return null;
}
return new EasyRandom().nextObject(A.class);
}
});
easyRandom = new EasyRandom(parameters);
C c = easyRandom.nextObject(C.class);
assertThat(c).isNotNull();
// B1 and its "children" must not be null
assertThat(c.getB1()).isNotNull();
assertThat(c.getB1().getA1()).isNotNull();
assertThat(c.getB1().getA1().getS1()).isNotNull();
assertThat(c.getB1().getA1().getS2()).isNotNull();
assertThat(c.getB1().getA2()).isNotNull();
assertThat(c.getB1().getA2().getS1()).isNotNull();
assertThat(c.getB1().getA2().getS2()).isNotNull();
// B2 and its "children" must not be null
assertThat(c.getB2()).isNotNull();
assertThat(c.getB2().getA1()).isNotNull();
assertThat(c.getB2().getA1().getS1()).isNotNull();
assertThat(c.getB2().getA1().getS2()).isNotNull();
assertThat(c.getB2().getA2()).isNotNull();
assertThat(c.getB2().getA2().getS1()).isNotNull();
assertThat(c.getB2().getA2().getS2()).isNotNull();
// B3 must not be null
assertThat(c.getB3()).isNotNull();
}
@Test
void whenFieldIsExcluded_thenItsInlineInitializationShouldBeUsedAsIs() {
// given
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("myList").and(ofType(List.class)).and(inClass(InlineInitializationBean.class)));
easyRandom = new EasyRandom(parameters);
// when
InlineInitializationBean bean = easyRandom.nextObject(InlineInitializationBean.class);
// then
assertThat(bean).isNotNull();
assertThat(bean.getMyList()).isEmpty();
}
@Test
void whenFieldIsExcluded_thenItsInlineInitializationShouldBeUsedAsIs_EvenIfBeanHasNoPublicConstructor() {
// given
EasyRandomParameters parameters = new EasyRandomParameters()
.excludeField(named("myList").and(ofType(List.class)).and(inClass(InlineInitializationBeanPrivateConstructor.class)));
easyRandom = new EasyRandom(parameters);
// when
InlineInitializationBeanPrivateConstructor bean = easyRandom.nextObject(InlineInitializationBeanPrivateConstructor.class);
// then
assertThat(bean.getMyList()).isEmpty();
}
@Test
void fieldsExcludedWithOneModifierShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(hasModifiers(Modifier.TRANSIENT));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getEmail()).isNull();
}
@Test
void fieldsExcludedWithTwoModifiersShouldNotBePopulated() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(hasModifiers(Modifier.TRANSIENT | Modifier.PROTECTED));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getEmail()).isNull();
}
@Test
void fieldsExcludedWithTwoModifiersShouldBePopulatedIfOneModifierIsNotFit() {
// given
EasyRandomParameters parameters = new EasyRandomParameters().excludeField(hasModifiers(Modifier.TRANSIENT | Modifier.PUBLIC));
easyRandom = new EasyRandom(parameters);
// when
Person person = easyRandom.nextObject(Person.class);
// then
assertThat(person).isNotNull();
assertThat(person.getEmail()).isNotNull();
}
public static class InlineInitializationBean {
private List<String> myList = new ArrayList<>();
public List<String> getMyList() {
return myList;
}
public void setMyList(List<String> myList) {
this.myList = myList;
}
}
public static class InlineInitializationBeanPrivateConstructor {
private List<String> myList = new ArrayList<>();
public List<String> getMyList() {
return myList;
}
private InlineInitializationBeanPrivateConstructor() {}
}
}<|fim▁end|>
|
// B1 and its "children" must not be null
assertThat(c.getB1()).isNotNull();
assertThat(c.getB1().getA1()).isNotNull();
assertThat(c.getB1().getA1().getS1()).isNotNull();
|
<|file_name|>test_successor.py<|end_file_name|><|fim▁begin|>from must import MustHavePatterns
from successor import Successor
class TestSuccessor(object):
@classmethod<|fim▁hole|>
def test_successor(self):
try:
self.test_patterns.create(Successor)
raise Exception("Recursive structure did not explode.")
except RuntimeError as re:
assert str(re).startswith("maximum recursion depth")<|fim▁end|>
|
def setup_class(cls):
cls.test_patterns = MustHavePatterns(Successor)
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(duration)]
#![feature(socket_timeout)]
#[cfg(unix)]extern crate libc;
#[cfg(unix)]extern crate unix_socket;
#[macro_use(log, sendlog)] extern crate logger;
extern crate config;
extern crate util;
extern crate parser;
extern crate response;
extern crate database;
extern crate command;
extern crate net2;
use std::time::Duration;
use std::io;
use std::io::{Read, Write};
use std::net::{SocketAddr, ToSocketAddrs, TcpStream};
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{Receiver, Sender, channel};
use std::thread;
#[cfg(unix)] use std::path::Path;
#[cfg(unix)] use std::fs::File;
#[cfg(unix)] use libc::funcs::posix88::unistd::fork;
#[cfg(unix)] use libc::funcs::c95::stdlib::exit;
#[cfg(unix)] use libc::funcs::posix88::unistd::getpid;
use net2::{TcpBuilder, TcpStreamExt};
#[cfg(unix)] use unix_socket::{UnixStream, UnixListener};
use config::Config;
use database::{Database, PubsubEvent};
use logger::Level;
use parser::{OwnedParsedCommand, Parser, ParseError};
use response::{Response, ResponseError};
/// A stream connection.
#[cfg(unix)]
enum Stream {
Tcp(TcpStream),
Unix(UnixStream),
}
#[cfg(not(unix))]
enum Stream {
Tcp(TcpStream),
}
#[cfg(unix)]
impl Stream {
/// Creates a new independently owned handle to the underlying socket.
fn try_clone(&self) -> io::Result<Stream> {
match *self {
Stream::Tcp(ref s) => Ok(Stream::Tcp(try!(s.try_clone()))),
Stream::Unix(ref s) => Ok(Stream::Unix(try!(s.try_clone()))),
}
}
/// Write a buffer into this object, returning how many bytes were written.
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match *self {
Stream::Tcp(ref mut s) => s.write(buf),
Stream::Unix(ref mut s) => s.write(buf),
}
}
/// Sets the keepalive timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_keepalive(&self, duration: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => TcpStreamExt::set_keepalive(s, duration),
Stream::Unix(_) => Ok(()),
}
}
/// Sets the write timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => s.set_write_timeout(dur),
// TODO: couldn't figure out how to enable this in unix_socket
Stream::Unix(_) => Ok(()),
}
}
/// Sets the read timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => s.set_read_timeout(dur),
// TODO: couldn't figure out how to enable this in unix_socket
Stream::Unix(_) => Ok(()),
}
}
}
#[cfg(unix)]
impl Read for Stream {
/// Pull some bytes from this source into the specified buffer,
/// returning how many bytes were read.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match *self {
Stream::Tcp(ref mut s) => s.read(buf),
Stream::Unix(ref mut s) => s.read(buf),
}
}
}
#[cfg(not(unix))]
impl Stream {
/// Creates a new independently owned handle to the underlying socket.
fn try_clone(&self) -> io::Result<Stream> {
match *self {
Stream::Tcp(ref s) => Ok(Stream::Tcp(try!(s.try_clone()))),
}
}
/// Write a buffer into this object, returning how many bytes were written.
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match *self {
Stream::Tcp(ref mut s) => s.write(buf),
}
}
/// Sets the keepalive timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_keepalive(&self, duration: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => TcpStreamExt::set_keepalive(s, duration),
}
}
/// Sets the write timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => s.set_write_timeout(dur),
}
}
/// Sets the read timeout to the timeout specified.
/// It fails silently for UNIX sockets.
fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
match *self {
Stream::Tcp(ref s) => s.set_read_timeout(dur),
}
}
}
#[cfg(not(unix))]
impl Read for Stream {
/// Pull some bytes from this source into the specified buffer,
/// returning how many bytes were read.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match *self {
Stream::Tcp(ref mut s) => s.read(buf),
}
}
}
/// A client connection
struct Client {
/// The socket connection
stream: Stream,
/// A reference to the database
db: Arc<Mutex<Database>>,
/// The client unique identifier
id: usize,
}
/// The database server
pub struct Server {
/// A reference to the database
db: Arc<Mutex<Database>>,
/// A list of channels listening for incoming connections
listener_channels: Vec<Sender<u8>>,
/// A list of threads listening for incoming connections
listener_threads: Vec<thread::JoinHandle<()>>,
/// An incremental id for new clients
pub next_id: Arc<Mutex<usize>>,
}
impl Client {
/// Creates a new TCP socket client
pub fn tcp(stream: TcpStream, db: Arc<Mutex<Database>>, id: usize) -> Client {
return Client {
stream: Stream::Tcp(stream),
db: db,
id: id,
}
}
/// Creates a new UNIX socket client
#[cfg(unix)]
pub fn unix(stream: UnixStream, db: Arc<Mutex<Database>>, id: usize) -> Client {
return Client {
stream: Stream::Unix(stream),
db: db,
id: id,
}
}
/// Creates a thread that writes into the client stream each response received
fn create_writer_thread(&self, sender: Sender<(Level, String)>, rx: Receiver<Option<Response>>) {
let mut stream = self.stream.try_clone().unwrap();
thread::spawn(move || {
loop {
match rx.recv() {
Ok(m) => match m {
Some(msg) => match stream.write(&*msg.as_bytes()) {
Ok(_) => (),
Err(e) => sendlog!(sender, Warning, "Error writing to client: {:?}", e).unwrap(),
},
None => break,
},
Err(_) => break,
};
}
});
}
/// Creates a thread that sends responses for every pubsub event received
fn create_pubsub_thread(&self, tx: Sender<Option<Response>>, pubsub_rx: Receiver<Option<PubsubEvent>>) {
#![allow(unused_must_use)]
thread::spawn(move || {
loop {
match pubsub_rx.recv() {
Ok(m) => match m {
Some(msg) => tx.send(Some(msg.as_response())),
None => break,
},
Err(_) => break,
};
}
tx.send(None);
});
}
/// Runs all clients commands. The function loops until the client
/// disconnects.
pub fn run(&mut self, sender: Sender<(Level, String)>) {
#![allow(unused_must_use)]
let (stream_tx, rx) = channel::<Option<Response>>();
self.create_writer_thread(sender.clone(), rx);
let (pubsub_tx, pubsub_rx) = channel::<Option<PubsubEvent>>();
self.create_pubsub_thread(stream_tx.clone(), pubsub_rx);
let mut client = command::Client::new(pubsub_tx, self.id);
let mut parser = Parser::new();
let mut this_command:Option<OwnedParsedCommand>;
let mut next_command:Option<OwnedParsedCommand> = None;
loop {
if next_command.is_none() {
parser.allocate();
let len = {
let pos = parser.written;
let mut buffer = parser.get_mut();
// read socket
match self.stream.read(&mut buffer[pos..]) {
Ok(r) => r,
Err(err) => {
sendlog!(sender, Verbose, "Reading from client: {:?}", err);
break;
},
}
};
parser.written += len;
// client closed connection
if len == 0 {
sendlog!(sender, Verbose, "Client closed connection");
break;
}
}
// was there an error during the execution?
let mut error = false;
this_command = next_command;
next_command = None;
// try to parse received command
let parsed_command = match this_command {
Some(ref c) => c.get_command(),
None => match parser.next() {
Ok(p) => p,
Err(err) => match err {
// if it's incomplete, keep adding to the buffer
ParseError::Incomplete => { continue; }
ParseError::BadProtocol(s) => {
let _ = stream_tx.send(Some(Response::Error(s)));
break;
},
_ => {
sendlog!(sender, Verbose, "Protocol error from client: {:?}", err);
break;
}
},
}
};
let mut db = match self.db.lock() {
Ok(db) => db,
Err(_) => break,
};
// execute the command
let r = command::command(parsed_command, &mut *db, &mut client);
// unlock the db
drop(db);
// check out the response
match r {
// received a response, send it to the client
Ok(response) => {
match stream_tx.send(Some(response)) {
Ok(_) => (),
Err(_) => error = true,
};
},
// no response
Err(err) => match err {
// There is no reply to send, that's ok
ResponseError::NoReply => (),
// We have to wait until a sender signals us back and then retry
// (Repeating the same command is actually wrong because of the timeout)
ResponseError::Wait(ref receiver) => {
// if we receive a None, send a nil, otherwise execute the command
match receiver.recv().unwrap() {
Some(cmd) => next_command = Some(cmd),
None => match stream_tx.send(Some(Response::Nil)) {
Ok(_) => (),
Err(_) => error = true,
},
}
}
},
}
// if something failed, let's shut down the client
if error {
// kill threads
stream_tx.send(None);
client.pubsub_sender.send(None);
break;
}
}
}
}
macro_rules! handle_listener {
($logger: expr, $listener: expr, $server: expr, $rx: expr, $tcp_keepalive: expr, $timeout: expr, $t: ident) => ({
let db = $server.db.clone();
let sender = $logger.sender();
let next_id = $server.next_id.clone();
thread::spawn(move || {
for stream in $listener.incoming() {
if $rx.try_recv().is_ok() {
// any new message should break
break;
}
match stream {
Ok(stream) => {
sendlog!(sender, Verbose, "Accepted connection to {:?}", stream).unwrap();
let db1 = db.clone();
let mysender = sender.clone();
let id = {
let mut nid = next_id.lock().unwrap();
*nid += 1;
*nid - 1
};
thread::spawn(move || {
let mut client = Client::$t(stream, db1, id);
client.stream.set_keepalive(if $tcp_keepalive > 0 { Some(Duration::from_secs($tcp_keepalive as u64)) } else { None }).unwrap();
client.stream.set_read_timeout(if $timeout > 0 { Some(Duration::new($timeout, 0)) } else { None }).unwrap();
client.stream.set_write_timeout(if $timeout > 0 { Some(Duration::new($timeout, 0)) } else { None }).unwrap();
client.run(mysender);
});
}
Err(e) => sendlog!(sender, Warning, "Accepting client connection: {:?}", e).unwrap(),
}
}
})
})
}
impl Server {
/// Creates a new server
pub fn new(config: Config) -> Server {
let db = Database::new(config);
return Server {
db: Arc::new(Mutex::new(db)),
listener_channels: Vec::new(),
listener_threads: Vec::new(),
next_id: Arc::new(Mutex::new(0)),
}
}
/// Runs the server. If `config.daemonize` is true, it forks and exits.
#[cfg(unix)]
pub fn run(&mut self) {
let (daemonize, pidfile) = {
let db = self.db.lock().unwrap();
(db.config.daemonize.clone(), db.config.pidfile.clone())
};
if daemonize {
unsafe {
match fork() {
-1 => panic!("Fork failed"),
0 => {
if let Ok(mut fp) = File::create(Path::new(&*pidfile)) {
match write!(fp, "{}", getpid()) {
Ok(_) => (),
Err(e) => {
let db = self.db.lock().unwrap();
log!(db.config.logger, Warning, "Error writing pid: {}", e);
},
}
}
self.start();
self.join();
},
_ => exit(0),
};
}
} else {
self.start();
self.join();
}
}
#[cfg(not(unix))]
pub fn run(&mut self) {
let daemonize = {
let db = self.db.lock().unwrap();
db.config.daemonize
};
if daemonize {
panic!("Cannot daemonize in non-unix");
} else {
self.start();
self.join();
}
}
#[cfg(windows)]
fn reuse_address(&self, _: &TcpBuilder) -> io::Result<()> {
Ok(())
}
#[cfg(not(windows))]
fn reuse_address(&self, builder: &TcpBuilder) -> io::Result<()> {
try!(builder.reuse_address(true));
Ok(())
}
/// Join the listener threads.
pub fn join(&mut self) {
#![allow(unused_must_use)]
while self.listener_threads.len() > 0 {
self.listener_threads.pop().unwrap().join();
}
}
/// Listens to a socket address.
fn listen<T: ToSocketAddrs>(&mut self, t: T, tcp_keepalive: u32, timeout: u64, tcp_backlog: i32) -> io::Result<()> {
for addr in try!(t.to_socket_addrs()) {
let (tx, rx) = channel();
let builder = try!(match addr {
SocketAddr::V4(_) => TcpBuilder::new_v4(),
SocketAddr::V6(_) => TcpBuilder::new_v6(),
});
try!(self.reuse_address(&builder));
let listener = try!(try!(
builder.bind(addr))
.listen(tcp_backlog));
self.listener_channels.push(tx);
{
let db = self.db.lock().unwrap();
let th = handle_listener!(db.config.logger, listener, self, rx, tcp_keepalive, timeout, tcp);
self.listener_threads.push(th);
}
}
Ok(())
}
/// Starts threads listening to new connections.
pub fn start(&mut self) {
let (tcp_keepalive, timeout, addresses, tcp_backlog) = {
let db = self.db.lock().unwrap();
(db.config.tcp_keepalive.clone(),
db.config.timeout.clone(),
db.config.addresses().clone(),
db.config.tcp_backlog.clone(),
)
};
for (host, port) in addresses {
match self.listen((&host[..], port), tcp_keepalive, timeout, tcp_backlog) {
Ok(_) => {
let db = self.db.lock().unwrap();
log!(db.config.logger, Notice, "The server is now ready to accept connections on port {}", port);
},
Err(err) => {
let db = self.db.lock().unwrap();
log!(db.config.logger, Warning, "Creating Server TCP listening socket {}:{}: {:?}", host, port, err);
continue;
}
}
}
self.handle_unixsocket();
}
#[cfg(unix)]
fn handle_unixsocket(&mut self) {
let db = self.db.lock().unwrap();
if let Some(ref unixsocket) = db.config.unixsocket {
let tcp_keepalive = db.config.tcp_keepalive;
let timeout = db.config.timeout;
let (tx, rx) = channel();
self.listener_channels.push(tx);
let listener = match UnixListener::bind(unixsocket) {
Ok(l) => l,
Err(err) => {
log!(db.config.logger, Warning, "Creating Server Unix socket {}: {:?}", unixsocket, err);
return;
}
};
let th = handle_listener!(db.config.logger, listener, self, rx, tcp_keepalive, timeout, unix);
self.listener_threads.push(th);
}
}
#[cfg(not(unix))]
fn handle_unixsocket(&mut self) {
let db = self.db.lock().unwrap();
if db.config.unixsocket.is_some() {
let _ = writeln!(&mut std::io::stderr(), "Ignoring unixsocket in non unix environment\n");
}
}
/// Sends a kill signal to the listeners and connects to the incoming
/// connections to break the listening loop.
pub fn stop(&mut self) {
#![allow(unused_must_use)]
for sender in self.listener_channels.iter() {
sender.send(0);
let db = self.db.lock().unwrap();
for (host, port) in db.config.addresses() {
for addrs in (&host[..], port).to_socket_addrs().unwrap() {
TcpStream::connect(addrs);
}
}
}
self.join();
}
}
#[cfg(test)]
mod test_networking {
use std::io::{Read, Write};
use std::net::TcpStream;
use std::str::from_utf8;
use std::thread;
use config::Config;
use logger::{Logger, Level};
use super::Server;
#[test]
fn parse_ping() {
let port = 16379;
let mut server = Server::new(Config::mock(port, Logger::new(Level::Warning)));
server.start();
let addr = format!("127.0.0.1:{}", port);
let streamres = TcpStream::connect(&*addr);
assert!(streamres.is_ok());
let mut stream = streamres.unwrap();
let message = b"*2\r\n$4\r\nping\r\n$4\r\npong\r\n";
assert!(stream.write(message).is_ok());
let mut h = [0u8; 4];
assert!(stream.read(&mut h).is_ok());
assert_eq!(from_utf8(&h).unwrap(), "$4\r\n");
let mut c = [0u8; 6];
assert!(stream.read(&mut c).is_ok());
assert_eq!(from_utf8(&c).unwrap(), "pong\r\n");
server.stop();
}
#[test]
fn allow_multiwrite() {
let port = 16380;
let mut server = Server::new(Config::mock(port, Logger::new(Level::Warning)));
server.start();
let addr = format!("127.0.0.1:{}", port);
let streamres = TcpStream::connect(&*addr);
assert!(streamres.is_ok());
let mut stream = streamres.unwrap();
let message = b"*2\r\n$4\r\nping\r\n";
assert!(stream.write(message).is_ok());
let message = b"$4\r\npong\r\n";
assert!(stream.write(message).is_ok());
let mut h = [0u8; 4];
assert!(stream.read(&mut h).is_ok());
assert_eq!(from_utf8(&h).unwrap(), "$4\r\n");
let mut c = [0u8; 6];
assert!(stream.read(&mut c).is_ok());
assert_eq!(from_utf8(&c).unwrap(), "pong\r\n");
server.stop();
}
#[test]
fn allow_stop() {
let port = 16381;
let mut server = Server::new(Config::mock(port, Logger::new(Level::Warning)));
server.start();
{
let addr = format!("127.0.0.1:{}", port);
let streamres = TcpStream::connect(&*addr);
assert!(streamres.is_ok());
}
server.stop();
{
let addr = format!("127.0.0.1:{}", port);
let streamres = TcpStream::connect(&*addr);
assert!(streamres.is_err());
}
server.start();
{
let addr = format!("127.0.0.1:{}", port);
let streamres = TcpStream::connect(&*addr);
assert!(streamres.is_ok());
}
server.stop();
}
#[test]
fn allow_multiple_clients() {
let port = 16382;
let mut server = Server::new(Config::mock(port, Logger::new(Level::Warning)));
server.start();
let addr = format!("127.0.0.1:{}", port);
let _ = TcpStream::connect(&*addr);
thread::sleep_ms(100);
assert_eq!(*server.next_id.lock().unwrap(), 1);
let _ = TcpStream::connect(&*addr);
thread::sleep_ms(100);
assert_eq!(*server.next_id.lock().unwrap(), 2);
server.stop();
}<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>search-audience-bidding.d.ts<|end_file_name|><|fim▁begin|>declare namespace GoogleAdsScripts {
namespace AdsApp {
interface SearchAudienceBidding {
/** Clears the bid modifier value. */
clearBidModifier(): void;
/** Returns the current bid modifier value. */
getBidModifier(): number;<|fim▁hole|> }
}
}<|fim▁end|>
|
/** Sets the bid modifier value for this audience to the specified value. */
setBidModifier(modifier: number): void;
|
<|file_name|>photom_b4.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals, division, print_function
from . import model_base
__all__ = ['PhotomModelB4']
class PhotomModelB4(model_base.DataModel):
"""
A data model for photom reference files.
"""
schema_url = "photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(PhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NircamPhotomModelB4(PhotomModelB4):
"""
A data model for NIRCam photom reference files.
"""
schema_url = "nircam_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NircamPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NirissPhotomModelB4(PhotomModelB4):
"""
A data model for NIRISS photom reference files.
"""
schema_url = "niriss_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NirissPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NirspecPhotomModelB4(PhotomModelB4):
"""
A data model for NIRSpec photom reference files.
"""
schema_url = "nirspec_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NirspecPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class MiriImgPhotomModelB4(PhotomModelB4):
"""
A data model for MIRI imaging photom reference files.
"""
schema_url = "mirimg_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(MiriImgPhotomModelB4, self).__init__(init=init, **kwargs)<|fim▁hole|>
class MiriMrsPhotomModelB4(PhotomModelB4):
"""
A data model for MIRI MRS photom reference files.
"""
schema_url = "mirmrs_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(MiriMrsPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table<|fim▁end|>
|
if phot_table is not None:
self.phot_table = phot_table
|
<|file_name|>ScheduledTransferRemoteasDestination.java<|end_file_name|><|fim▁begin|>import java.io.IOException;
import java.io.PrintWriter;
import java.util.HashMap;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.ofte.services.MetaDataCreations;
/**
* Servlet implementation class ScheduledTransferRemoteasDestination
*/
@SuppressWarnings("serial")
@WebServlet("/ScheduledTransferRemoteasDestination")
public class ScheduledTransferRemoteasDestination extends HttpServlet {
// private static final long serialVersionUID = 1L;
HashMap<String, String> hashMap = new HashMap<String, String>();
// com.ofte.services.MetaDataCreations metaDataCreations = new
// MetaDataCreations();
MetaDataCreations metaDataCreations = new MetaDataCreations();
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
* response)
*/
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
// read form fields
String schedulername = request.getParameter("schedulername");
// String jobName = request.getParameter("jname");
String sourceDirectory = request.getParameter("sd");
String sourceTriggerPattern = request.getParameter("stp");
String sourceFilePattern = request.getParameter("sfp");
String destinationDirectory = request.getParameter("dd");
String destinationFilePattern = request.getParameter("dfp");<|fim▁hole|> String port = request.getParameter("port");
String pollUnits = request.getParameter("pu");
String pollInterval = request.getParameter("pi");
// String XMLFilePath = request.getParameter("xmlfilename");
HashMap<String, String> hashMap = new HashMap<>();
hashMap.put("-sn", schedulername);
// hashMap.put("-jn", jobName);
hashMap.put("-sd", sourceDirectory);
hashMap.put("-tr", sourceTriggerPattern);
hashMap.put("-sfp", sourceFilePattern);
hashMap.put("-sftp-d", destinationDirectory);
hashMap.put("-trd", destinationTriggerPattern);
hashMap.put("-hi", hostIp);
hashMap.put("-un", userName);
hashMap.put("-pw", password);
hashMap.put("-po", port);
hashMap.put("-pu", pollUnits);
hashMap.put("-pi", pollInterval);
hashMap.put("-dfp", destinationFilePattern);
// hashMap.put("-gt", XMLFilePath);
// System.out.println(hashMap);
// System.out.println("username: " + username);
// System.out.println("password: " + password);
// String str[] = {"-mn "+monitorName,"-jn "+jobName,"-sd
// "+sourceDirectory,"-tr "+sourceTriggerPattern,"-sfp
// "+sourceFilePattern,"-dd
// "+destinationDirectory,destinationFilePattern,"-trd
// "+destinationTriggerPattern,"-pu "+pollUnits,"-pi "+pollInterval,"-gt
// "+XMLFilePath};
// for(int i=0;i<str.length;i++) {
// System.out.println(str[i]);
// }
try {
// metaDataCreations.fetchingUIDetails(hashMap);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// String string = "-mn "+monitorName+",-jn "+jobName+",-pi
// "+pollInterval+",-pu "+pollUnits+",-dd "+destinationDirectory+" "+
// sourceDirectory+",-tr "+sourceTriggerPattern+",-trd
// "+destinationTriggerPattern+",-gt "+XMLFilePath+",-sfp
// "+sourceFilePattern;
// FileWriter fileWriter = new FileWriter("D:\\UIDetails.txt");
// fileWriter.write(string);
// fileWriter.close();
Runnable r = new Runnable() {
public void run() {
// runYourBackgroundTaskHere();
try {
metaDataCreations.fetchingUIDetails(hashMap);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
new Thread(r).start();
// Example example = new Example();
// hashMap.put("monitorName", monitorName);
// example.result("-mn "+monitorName+" -jn "+jobName+" -pi
// "+pollInterval+" -pu "+pollUnits+" -dd "+destinationDirectory+" "+
// sourceDirectory+" -tr "+sourceTriggerPattern+" -trd
// "+destinationTriggerPattern+" -gt "+XMLFilePath+" -sfp
// "+sourceFilePattern);
// do some processing here...
// get response writer
// PrintWriter writer = response.getWriter();
// build HTML code
// String htmlRespone = "<html>";
// htmlRespone += "<h2>Your username is: " + username + "<br/>";
// htmlRespone += "Your password is: " + password + "</h2>";
// htmlRespone += "</html>";
// return response
// writer.println(htmlRespone);
PrintWriter out = response.getWriter();
response.setContentType("text/html");
out.println("<script type=\"text/javascript\">");
out.println("alert('successfully submited');");
out.println(
"window.open('http://localhost:8080/TestingUI/Open_OFTE_Scheduled_Transfers_Page.html','_self')");
out.println("</script>");
}
}<|fim▁end|>
|
String destinationTriggerPattern = request.getParameter("dtp");
String hostIp = request.getParameter("hostip");
String userName = request.getParameter("username");
String password = request.getParameter("password");
|
<|file_name|>plot_degree_circle.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Plot degree values for a given set of nodes in a simple circle plot.
'''
import numpy as np
import matplotlib.pyplot as plt
import mne
from jumeg import get_jumeg_path
from jumeg.connectivity import plot_degree_circle
import bct
orig_labels_fname = get_jumeg_path() + '/data/desikan_label_names.yaml'
yaml_fname = get_jumeg_path() + '/data/desikan_aparc_cortex_based_grouping.yaml'
con_fname = get_jumeg_path() + '/data/sample,aparc-con.npy'
con = np.load(con_fname)
con_ = con[0, :, :, 2] + con[0, :, :, 2].T
# compute the degree
degrees = mne.connectivity.degree(con_, threshold_prop=0.2)<|fim▁hole|><|fim▁end|>
|
fig, ax = plot_degree_circle(degrees, yaml_fname, orig_labels_fname)
|
<|file_name|>other.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
fn other() -> &str { "other"}
|
<|file_name|>default.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown import seo<|fim▁hole|>from django.conf import settings
class DefaultMetadata(seo.Metadata):
""" A very basic default class for those who do not wish to write their own.
"""
title = seo.Tag(head=True, max_length=68)
keywords = seo.MetaTag()
description = seo.MetaTag(max_length=155)
heading = seo.Tag(name="h1")
class Meta:
verbose_name = "Metadata"
verbose_name_plural = "Metadata"
use_sites = False
# This default class is automatically created when SEO_MODELS is
# defined, so we'll take our model list from there.
seo_models = getattr(settings, 'SEO_MODELS', [])
class HelpText:
title = "This is the page title, that appears in the title bar."
keywords = "Comma-separated keywords for search engines."
description = "A short description, displayed in search results."
heading = "This is the page heading, appearing in the <h1> tag."<|fim▁end|>
| |
<|file_name|>choicelists.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
# Copyright 2019-2020 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from django.db import models
from lino_xl.lib.ledger.choicelists import VoucherStates
from lino.api import dd, _
class OrderStates(VoucherStates):<|fim▁hole|>add('20', _("Active"), 'active', is_editable=True)
add('30', _("Urgent"), 'urgent', is_editable=True)
add('40', _("Done"), 'registered')
add('50', _("Cancelled"), 'cancelled')
OrderStates.draft.add_transition(required_states="active urgent registered cancelled")
OrderStates.active.add_transition(required_states="draft urgent registered cancelled")
OrderStates.urgent.add_transition(required_states="draft active registered cancelled")
OrderStates.registered.add_transition(required_states="draft active urgent cancelled")
OrderStates.cancelled.add_transition(required_states="draft active urgent registered")<|fim▁end|>
|
pass
add = OrderStates.add_item
add('10', _("Waiting"), 'draft', is_editable=True)
|
<|file_name|>move.py<|end_file_name|><|fim▁begin|>from engine.constants import BOARD_INDEX, C_PERM_INDEX, WK_SQ_INDEX, BK_SQ_INDEX, EN_PAS_INDEX, NORTH, SOUTH, \
RANK2, RANK7, WKC_INDEX, WQC_INDEX, BKC_INDEX, BQC_INDEX, CASTLE_VOIDED, CASTLED, A1, A8, E1, E8, C1, C8, G1, \
G8, H1, H8, WHITE, BLACK, HALF_MOVE_INDEX, FULL_MOVE_INDEX, TURN_INDEX, B8, B1, D1, D8, F1, F8
from engine.utils import update
from engine import board_hash
import logging
def move_at_state(state, move, live_move=False):
board = state[BOARD_INDEX]
castle_perm = state[C_PERM_INDEX]
white_king_sq = state[WK_SQ_INDEX]
black_king_sq = state[BK_SQ_INDEX]
from_tile_n = move[0]
to_tile_n = move[1]
if state[EN_PAS_INDEX] == to_tile_n and board[from_tile_n] == 'P':
if abs(from_tile_n - to_tile_n) == 11 or abs(from_tile_n - to_tile_n) == 9:
board = update(board, to_tile_n + SOUTH, 'o')
elif state[EN_PAS_INDEX] == to_tile_n and board[from_tile_n] == 'p':
if abs(from_tile_n - to_tile_n) == 11 or abs(from_tile_n - to_tile_n) == 9:
board = update(board, to_tile_n + NORTH, 'o')
en_pass_sq = -1
if board[from_tile_n] == 'P':
if from_tile_n >= RANK2:
if abs(to_tile_n - from_tile_n) == 20:
en_pass_sq = from_tile_n + NORTH
if board[to_tile_n + NORTH] == 'x':
board = update(board, from_tile_n, 'Q')
elif board[from_tile_n] == 'p':
if from_tile_n <= RANK7:<|fim▁hole|> en_pass_sq = from_tile_n + SOUTH
if board[to_tile_n + SOUTH] == 'x':
board = update(board, from_tile_n, 'q')
# King move case
elif board[from_tile_n] == 'K':
white_king_sq = to_tile_n
castle_perm = update(castle_perm, WKC_INDEX, CASTLE_VOIDED)
castle_perm = update(castle_perm, WQC_INDEX, CASTLE_VOIDED)
elif board[from_tile_n] == 'k':
black_king_sq = to_tile_n
castle_perm = update(castle_perm, BQC_INDEX, CASTLE_VOIDED)
castle_perm = update(castle_perm, BQC_INDEX, CASTLE_VOIDED)
elif board[from_tile_n] == 'R':
if from_tile_n == H1: # king side
castle_perm = update(castle_perm, WKC_INDEX, CASTLE_VOIDED)
elif from_tile_n == A1:
castle_perm = update(castle_perm, WQC_INDEX, CASTLE_VOIDED)
elif board[from_tile_n] == 'r':
if from_tile_n == H8: # king side
castle_perm = update(castle_perm, BKC_INDEX, CASTLE_VOIDED)
elif from_tile_n == A8:
castle_perm = update(castle_perm, BQC_INDEX, CASTLE_VOIDED)
# Check if attacking black king side rook
if to_tile_n == A1:
castle_perm = update(castle_perm, WQC_INDEX, CASTLE_VOIDED)
elif to_tile_n == H1:
castle_perm = update(castle_perm, WKC_INDEX, CASTLE_VOIDED)
elif to_tile_n == A8:
castle_perm = update(castle_perm, BQC_INDEX, CASTLE_VOIDED)
elif to_tile_n == H8:
castle_perm = update(castle_perm, BKC_INDEX, CASTLE_VOIDED)
if from_tile_n == E1 and to_tile_n == G1 and board[from_tile_n] == 'K': # and castle_perm[0] == 1:
board = update(board, E1, 'o')
board = update(board, F1, 'R')
board = update(board, G1, 'K')
board = update(board, H1, 'o')
white_king_sq = G1
castle_perm = update(castle_perm, WKC_INDEX, CASTLED)
castle_perm = update(castle_perm, WQC_INDEX, CASTLED)
elif from_tile_n == E1 and to_tile_n == C1 and board[from_tile_n] == 'K': # queen side castle
board = update(board, A1, 'o')
board = update(board, B1, 'o')
board = update(board, C1, 'K')
board = update(board, D1, 'R')
board = update(board, E1, 'o')
white_king_sq = C1
castle_perm = update(castle_perm, WKC_INDEX, CASTLED)
castle_perm = update(castle_perm, WQC_INDEX, CASTLED)
elif from_tile_n == E8 and to_tile_n == G8 and board[from_tile_n] == 'k': # king side castle
board = update(board, E8, 'o')
board = update(board, F8, 'r')
board = update(board, G8, 'k')
board = update(board, H8, 'o')
black_king_sq = G8
castle_perm = update(castle_perm, BKC_INDEX, CASTLED)
castle_perm = update(castle_perm, BQC_INDEX, CASTLED)
elif from_tile_n == E8 and to_tile_n == C8 and board[from_tile_n] == 'k': # queen side castle
board = update(board, A8, 'o')
board = update(board, B8, 'o')
board = update(board, C8, 'K')
board = update(board, D8, 'R')
board = update(board, E8, 'o')
black_king_sq = C8
castle_perm = update(castle_perm, BKC_INDEX, CASTLED)
castle_perm = update(castle_perm, BQC_INDEX, CASTLED)
else:
if live_move:
if board[to_tile_n] != 'o':
logging.debug('cleared board hash!!!')
print("cleared board hash", board[to_tile_n])
board_hash = {}
board = update(board, to_tile_n, board[from_tile_n])
board = update(board, from_tile_n, 'o')
# Change Turns
turn = BLACK if state[TURN_INDEX] == WHITE else WHITE
return [board, turn, en_pass_sq, state[HALF_MOVE_INDEX], state[FULL_MOVE_INDEX], castle_perm, white_king_sq, black_king_sq]<|fim▁end|>
|
if abs(to_tile_n - from_tile_n) == 20:
|
<|file_name|>tsp.rs<|end_file_name|><|fim▁begin|>extern crate time;
extern crate getopts;
extern crate rand;
// TODO use terminal colors for nicer colored output
// extern crate term;
use getopts::{Options, Matches};
use std::env::args;
use rand::{SeedableRng, StdRng};
use time::precise_time_ns;
use std::str::FromStr;
use graph::Graph;
use population::Population;
pub mod edge;
pub mod graph;
pub mod nodept;
pub mod population;
pub mod tour;
// pub mod graphviz_conv;
static DEFAULT_ITERS: usize = 800;
static DEFAULT_MUT_RATE: f64 = 0.02;
static DEFAULT_POP_SIZE: usize = 200;
static DEFAULT_TOURNAMENT_SIZE: usize = 15;
fn usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief[..]));
}
fn parse_opt<T: FromStr>(matches: &Matches, opt: &str, default: T) -> T {
match matches.opt_str(opt) {
Some(o) => o.parse::<T>().unwrap_or(default),
None => default,
}
}
fn main() {
let args: Vec<String> = args().skip(1).collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optopt("m",
"mutation_rate",
"change the mutation rate (default: 0.015)",
"MUTRATE");
opts.optopt("i",
"iters",
"change the number of GA iterations (default: 50)",
"ITERS");
opts.optopt("p",
"pop_size",
"change the population size (default: 5000)",
"POPSIZE");
opts.optflag("v",
"verbose",
"print a lot of information, including timing.");
opts.optopt("r", "read", "read graph from a .tsp file", "READ");
opts.optopt("t",
"tournament_size",
"change the number of specimens used for tournament selection",
"TSIZE");
let matches = match opts.parse(args) {
Ok(m) => m,
Err(_) => panic!("Failed matching options"),
};
if matches.opt_present("h") {
usage(&program, opts);
return;
}
let v_flag = matches.opt_present("v");
let node_count = 15;
let tournament_size = parse_opt::<usize>(&matches, "t", DEFAULT_TOURNAMENT_SIZE);
let scale = 200.0;
let mutation_rate = parse_opt::<f64>(&matches, "m", DEFAULT_MUT_RATE);
let iter_count = parse_opt::<usize>(&matches, "i", DEFAULT_ITERS);
let population_size = parse_opt::<usize>(&matches, "p", DEFAULT_POP_SIZE);
let graph;
if matches.opt_present("r") {
let file_path = parse_opt::<String>(&matches, "r", String::new());
if file_path.is_empty() {
panic!("failed to parse file path")
}
graph = Graph::from_file(&file_path).unwrap();
} else {
// make a seeded RNG for the random graph generation for consistent testing
let seed: &[_] = &[12, 13, 14, 15];
let mut s_rng: StdRng = SeedableRng::from_seed(seed);
graph = Graph::random_graph(&mut s_rng, node_count, scale, scale);
}
if v_flag {
println!("Running TSP-GA on a graph with |N| = {}, |E| = {}",
graph.num_nodes,<|fim▁hole|> println!("GA parameters:");
println!("\tMutation rate = {}", mutation_rate);
println!("\tPopulation size = {}", population_size);
println!("\tNumber of iterations = {}", iter_count);
println!("\tTournament size = {}", tournament_size);
}
// RNG for the GA
let rng: StdRng = match StdRng::new() {
Ok(r) => r,
Err(_) => panic!("failed to acquire RNG"),
};
let mut pop = Population::new(population_size,
Box::new(graph),
mutation_rate,
tournament_size,
rng);
let first_result = pop.fittest().total_weight;
let mut best_result = pop.fittest();
if v_flag {
println!("Fittest at start: {}", first_result)
}
// Evolve the population
let t0 = precise_time_ns();
for _ in 0..iter_count {
pop = pop.evolve();
let r = pop.fittest();
if r.total_weight < best_result.total_weight {
best_result = r;
}
}
let t1 = precise_time_ns();
// Show the end result and the time it took.
println!("Resulting tour: {:?}\nwith weight {}",
best_result.nodes,
best_result.total_weight);
if v_flag {
let dt = ((t1 - t0) as f64) / 1e6;
println!("t_avg = {} ms, t_overall = {} s",
dt / iter_count as f64,
dt / 1000.0);
println!("Improvement factor from first solution: {}",
(first_result / best_result.total_weight));
}
}<|fim▁end|>
|
graph.all_edges().len());
|
<|file_name|>otter_ai_to_our_format.py<|end_file_name|><|fim▁begin|>import os
import sys
def main():
if len(sys.argv) != 2:
print("Usage: Pass the file name for the source transcript txt file.")
sys.exit(-1)
file = sys.argv[1]
out_file = os.path.expanduser(
os.path.join(
'~/Desktop',
os.path.basename(file)<|fim▁hole|> print("Files:")
print("Reading source file: ", file)
print("Exported version at: ", out_file)
fin = open(file, 'r', encoding='utf-8')
fout = open(out_file, 'w', encoding='utf-8')
with fin, fout:
time = "0:00"
for line in fin:
if is_time(line):
time = get_time_text(line)
elif line and line.strip():
text = f"{time} {line.strip()}\n\n"
fout.write(text)
# print(text)
def is_time(line: str) -> bool:
if not line or not line.strip():
return False
parts = line.split(':')
if not parts:
return False
return all(p.strip().isnumeric() for p in parts)
def get_time_text(line: str) -> str:
if ':' not in line:
raise Exception(f"Text doesn't seem to be a time: {line}")
parts = line.split(':')
hour_text = "0"
min_text = "0"
sec_text = "0"
if len(parts) == 3:
hour_text = parts[0].strip()
min_text = parts[1].strip()
sec_text = parts[2].strip()
elif len(parts) == 2:
min_text = parts[0].strip()
sec_text = parts[1].strip()
elif len(parts) == 1:
sec_text = parts[0].strip()
return f"{hour_text.zfill(2)}:{min_text.zfill(2)}:{sec_text.zfill(2)}"
if __name__ == '__main__':
main()<|fim▁end|>
|
)
)
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># coding: utf-8
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from songwriter import views
urlpatterns = [
url(r'^$', views.api_root,
name="root"),
url(r'^songs/list/$', views.SongList.as_view(),
name="songs_list"),
url(r'^songs/list/paginate/$', views.SongListPaginate.as_view(),
name="songs_list_paginate"),<|fim▁hole|> url(r'^songs/(?P<pk>[0-9]+)/$', views.SongDetail.as_view(),
name="songs_detail"),
url(r'^songs/fast/data/(?P<song_id>[0-9]+)/$', views.get_song_details,
name="songs_fast_data"),
url(r'^song/convert/to/tex/(?P<song_id>[0-9]+)/$', views.convert_to_tex,
name="song_convert_to_tex"),
url(r'^song/edit/tex/(?P<song_id>[0-9]+)/$', views.edit_tex,
name="song_convert_to_tex"),
url(r'^song/edit/multiple/tex/(?P<songs_ids>[\/0-9]+|all)/$', views.edit_multiple_songs_tex,
name="song_edit_multiple_songs_tex"),
url(r'^song/compile/tex/(?P<song_id>[0-9]+)/$', views.compile_tex,
name="song_compile_tex"),
url(r'^songs/guess/pages/(?P<songs_ids>[\/0-9]+|all)/$', views.guess_pages_numbers,
name="songs_guess_pages_numbers"),
url(r'^song/new/with/verses/$', views.add_song_with_verses,
name="song_add_with_verses"),
url(r'^songs/without/author/$', views.get_songs_without_author,
name="songs_without_author"),
url(r'^songs/without/editor/$', views.get_songs_without_editor,
name="songs_without_editor"),
url(r'^songs/with/latex/code/$', views.get_songs_with_latex_code,
name="songs_with_latex_code"),
url(r'^songs/without/page/number/$', views.get_songs_without_page_number,
name="songs_without_page_number"),
url(r'^copyrights/extract/(?P<songs_ids>[\/0-9]+|all)/$', views.find_copyrights_data,
name="find_copyrights_data"),
url(r'^book/elements/sort/$', views.update_book_elements_list,
name="book_elements_sort"),
url(r'^book/elements/list/$', views.book_elements_list,
name="book_elements_list"),
url(r'^groups/fast/list/$', views.SongsGroupFastList.as_view(),
name="groups_fast_list"),
url(r'^groups/list/$', views.SongsGroupList.as_view(),
name="groups_list"),
url(r'^groups/(?P<pk>[0-9]+)/$', views.SongsGroupDetail.as_view(),
name="groups_detail"),
url(r'^authors/list/$', views.AuthorList.as_view(),
name="authors_list"),
url(r'^authors/(?P<pk>[0-9]+)/$', views.AuthorDetail.as_view(),
name="authors_detail"),
url(r'^editors/list/$', views.EditorList.as_view(),
name="editors_list"),
url(r'^editors/(?P<pk>[0-9]+)/$', views.EditorDetail.as_view(),
name="editors_detail"),
url(r'^themes/list/$', views.ThemeList.as_view(),
name="themes_list"),
url(r'^themes/(?P<pk>[0-9]+)/$', views.ThemeDetail.as_view(),
name="themes_detail"),
url(r'^paragraphs/list/$', views.ParagraphList.as_view(),
name="paragraphs_list"),
url(r'^paragraphs/(?P<pk>[0-9]+)/$', views.ParagraphDetail.as_view(),
name="paragraphs_detail"),
url(r'^paragraphs/invert/(?P<paragraph_id_top>[0-9]+)/and/(?P<paragraph_id_bottom>[0-9]+)/$',
views.invert_paragraphs, name="paragraphs_invert"),
url(r'^verses/list/$', views.VerseList.as_view(),
name="verses_list"),
url(r'^verses/(?P<pk>[0-9]+)/$', views.VerseDetail.as_view(),
name="verses_detail"),
url(r'^verses/invert/(?P<verse_id_top>[0-9]+)/and/(?P<verse_id_bottom>[0-9]+)/$',
views.invert_verses, name="verses_invert"),
url(r'^harmonization/list/$', views.HarmonizationList.as_view(),
name="harmonization_list"),
url(r'^harmonization/list/song/(?P<song_id>[0-9]+)/$', views.get_song_harmonizations,
name="get_song_harmonizations"),
url(r'^harmonization/(?P<pk>[0-9]+)/$', views.HarmonizationDetail.as_view(),
name="harmonization_detail"),
url(r'^author/list/songs/(?P<author_id>[0-9]+)/$', views.get_author_songs,
name="get_author_songs"),
url(r'^editor/list/songs/(?P<editor_id>[0-9]+)/$', views.get_editor_songs,
name="get_editor_songs"),
url(r'^theme/list/songs/(?P<theme_id>[0-9]+)/$', views.get_theme_songs,
name="get_theme_songs"),
url(r'^chords/list/$', views.ChordList.as_view(),
name="chords_list"),
url(r'^chords/(?P<pk>[0-9]+)/$', views.ChordDetail.as_view(),
name="chords_detail"),
url(r'^get/whole/tex/$', views.get_whole_tex_code,
name="compile_latexcode"),
url(r'^latexcode/list/$', views.SongLaTeXCodeList.as_view(),
name="latexcode_list"),
url(r'^latexcode/(?P<pk>[0-9]+)/$', views.SongLaTeXCodeDetail.as_view(),
name="latexcode_detail"),
url(r'^additional/latexcode/list/$', views.AdditionalLaTeXContentList.as_view(),
name="additional_latexcode_list"),
url(r'^additional/latexcode/(?P<pk>[0-9]+)/$', views.AdditionalLaTeXContentDetail.as_view(),
name="additional_latexcode_detail"),
]
urlpatterns = format_suffix_patterns(urlpatterns)<|fim▁end|>
| |
<|file_name|>import_onnx.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
from .... import symbol
from .... import ndarray as nd
from ....base import string_types
from .import_helper import _convert_map as convert_map
class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._num_input = 0
self._num_param = 0
def _convert_operator(self, node_name, op_name, attrs, inputs):
"""Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : str
Operator name, such as Convolution, FullyConnected
:param attrs : dict
Dict of operator attributes
:param inputs: list
list of inputs to the operator
Returns
-------
:return mxnet_sym
Converted mxnet symbol
"""
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name, string_types):
new_op = getattr(symbol, op_name, None)
if not new_op:
raise RuntimeError("Unable to map op_name {} to sym".format(op_name))
if node_name is None:
mxnet_sym = new_op(*inputs, **new_attrs)
else:
mxnet_sym = new_op(name=node_name, *inputs, **new_attrs)
return mxnet_sym
return op_name
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :symbol.Symbol
The returned mxnet symbol
params : dict
A dict of name: nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input<|fim▁hole|> else:
self._nodes[i.name] = symbol.Variable(name=i.name)
# For storing arg and aux params for the graph.
auxDict = {}
argDict = {}
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
inputs = [self._nodes[i] for i in node.input]
mxnet_sym = self._convert_operator(node_name, op_name, onnx_attr, inputs)
for k, i in zip(list(node.output), range(len(mxnet_sym.list_outputs()))):
self._nodes[k] = mxnet_sym[i]
# splitting params into args and aux params
for args in mxnet_sym.list_arguments():
if args in self._params:
argDict.update({args: nd.array(self._params[args])})
for aux in mxnet_sym.list_auxiliary_states():
if aux in self._params:
auxDict.update({aux: nd.array(self._params[aux])})
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = symbol.Group(out)
else:
out = out[0]
return out, argDict, auxDict
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. "
+ "Instructions to install - https://github.com/onnx/onnx")
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
# Needed for supporting python version > 3.5
if isinstance(attrs[a.name], bytes):
attrs[a.name] = attrs[a.name].decode(encoding='utf-8')
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs<|fim▁end|>
|
self._nodes[i.name] = symbol.Variable(name=i.name,
shape=self._params[i.name].shape)
|
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package codedeploy
import (
"github.com/aws/aws-sdk-go/private/protocol"
)
const (
// ErrCodeAlarmsLimitExceededException for service response error code
// "AlarmsLimitExceededException".
//
// The maximum number of alarms for a deployment group (10) was exceeded.
ErrCodeAlarmsLimitExceededException = "AlarmsLimitExceededException"
// ErrCodeApplicationAlreadyExistsException for service response error code
// "ApplicationAlreadyExistsException".
//
// An application with the specified name with the IAM user or AWS account already
// exists.
ErrCodeApplicationAlreadyExistsException = "ApplicationAlreadyExistsException"
// ErrCodeApplicationDoesNotExistException for service response error code
// "ApplicationDoesNotExistException".
//
// The application does not exist with the IAM user or AWS account.
ErrCodeApplicationDoesNotExistException = "ApplicationDoesNotExistException"
// ErrCodeApplicationLimitExceededException for service response error code
// "ApplicationLimitExceededException".
//
// More applications were attempted to be created than are allowed.
ErrCodeApplicationLimitExceededException = "ApplicationLimitExceededException"
// ErrCodeApplicationNameRequiredException for service response error code
// "ApplicationNameRequiredException".
//
// The minimum number of required application names was not specified.
ErrCodeApplicationNameRequiredException = "ApplicationNameRequiredException"
// ErrCodeArnNotSupportedException for service response error code
// "ArnNotSupportedException".
//
// The specified ARN is not supported. For example, it might be an ARN for a
// resource that is not expected.
ErrCodeArnNotSupportedException = "ArnNotSupportedException"
// ErrCodeBatchLimitExceededException for service response error code
// "BatchLimitExceededException".
//
// The maximum number of names or IDs allowed for this request (100) was exceeded.
ErrCodeBatchLimitExceededException = "BatchLimitExceededException"
// ErrCodeBucketNameFilterRequiredException for service response error code
// "BucketNameFilterRequiredException".
//
// A bucket name is required, but was not provided.
ErrCodeBucketNameFilterRequiredException = "BucketNameFilterRequiredException"
// ErrCodeDeploymentAlreadyCompletedException for service response error code
// "DeploymentAlreadyCompletedException".
//
// The deployment is already complete.
ErrCodeDeploymentAlreadyCompletedException = "DeploymentAlreadyCompletedException"
// ErrCodeDeploymentConfigAlreadyExistsException for service response error code
// "DeploymentConfigAlreadyExistsException".
//
// A deployment configuration with the specified name with the IAM user or AWS
// account already exists.
ErrCodeDeploymentConfigAlreadyExistsException = "DeploymentConfigAlreadyExistsException"
// ErrCodeDeploymentConfigDoesNotExistException for service response error code
// "DeploymentConfigDoesNotExistException".
//
// The deployment configuration does not exist with the IAM user or AWS account.
ErrCodeDeploymentConfigDoesNotExistException = "DeploymentConfigDoesNotExistException"
// ErrCodeDeploymentConfigInUseException for service response error code
// "DeploymentConfigInUseException".
//
// The deployment configuration is still in use.
ErrCodeDeploymentConfigInUseException = "DeploymentConfigInUseException"
// ErrCodeDeploymentConfigLimitExceededException for service response error code
// "DeploymentConfigLimitExceededException".
//
// The deployment configurations limit was exceeded.
ErrCodeDeploymentConfigLimitExceededException = "DeploymentConfigLimitExceededException"
// ErrCodeDeploymentConfigNameRequiredException for service response error code
// "DeploymentConfigNameRequiredException".
//
// The deployment configuration name was not specified.
ErrCodeDeploymentConfigNameRequiredException = "DeploymentConfigNameRequiredException"
// ErrCodeDeploymentDoesNotExistException for service response error code
// "DeploymentDoesNotExistException".
//
// The deployment with the IAM user or AWS account does not exist.
ErrCodeDeploymentDoesNotExistException = "DeploymentDoesNotExistException"
// ErrCodeDeploymentGroupAlreadyExistsException for service response error code
// "DeploymentGroupAlreadyExistsException".
//
// A deployment group with the specified name with the IAM user or AWS account
// already exists.
ErrCodeDeploymentGroupAlreadyExistsException = "DeploymentGroupAlreadyExistsException"
// ErrCodeDeploymentGroupDoesNotExistException for service response error code
// "DeploymentGroupDoesNotExistException".
//
// The named deployment group with the IAM user or AWS account does not exist.
ErrCodeDeploymentGroupDoesNotExistException = "DeploymentGroupDoesNotExistException"
// ErrCodeDeploymentGroupLimitExceededException for service response error code
// "DeploymentGroupLimitExceededException".
//
// The deployment groups limit was exceeded.
ErrCodeDeploymentGroupLimitExceededException = "DeploymentGroupLimitExceededException"
// ErrCodeDeploymentGroupNameRequiredException for service response error code
// "DeploymentGroupNameRequiredException".
//
// The deployment group name was not specified.
ErrCodeDeploymentGroupNameRequiredException = "DeploymentGroupNameRequiredException"
// ErrCodeDeploymentIdRequiredException for service response error code
// "DeploymentIdRequiredException".
//
// At least one deployment ID must be specified.
ErrCodeDeploymentIdRequiredException = "DeploymentIdRequiredException"
// ErrCodeDeploymentIsNotInReadyStateException for service response error code
// "DeploymentIsNotInReadyStateException".
//
// The deployment does not have a status of Ready and can't continue yet.
ErrCodeDeploymentIsNotInReadyStateException = "DeploymentIsNotInReadyStateException"
// ErrCodeDeploymentLimitExceededException for service response error code
// "DeploymentLimitExceededException".
//
// The number of allowed deployments was exceeded.
ErrCodeDeploymentLimitExceededException = "DeploymentLimitExceededException"
// ErrCodeDeploymentNotStartedException for service response error code
// "DeploymentNotStartedException".
//
// The specified deployment has not started.
ErrCodeDeploymentNotStartedException = "DeploymentNotStartedException"
// ErrCodeDeploymentTargetDoesNotExistException for service response error code
// "DeploymentTargetDoesNotExistException".
//
// The provided target ID does not belong to the attempted deployment.
ErrCodeDeploymentTargetDoesNotExistException = "DeploymentTargetDoesNotExistException"
// ErrCodeDeploymentTargetIdRequiredException for service response error code
// "DeploymentTargetIdRequiredException".
//
// A deployment target ID was not provided.
ErrCodeDeploymentTargetIdRequiredException = "DeploymentTargetIdRequiredException"
// ErrCodeDeploymentTargetListSizeExceededException for service response error code
// "DeploymentTargetListSizeExceededException".
//
// The maximum number of targets that can be associated with an Amazon ECS or
// AWS Lambda deployment was exceeded. The target list of both types of deployments
// must have exactly one item. This exception does not apply to EC2/On-premises
// deployments.
ErrCodeDeploymentTargetListSizeExceededException = "DeploymentTargetListSizeExceededException"
// ErrCodeDescriptionTooLongException for service response error code
// "DescriptionTooLongException".
//
// The description is too long.
ErrCodeDescriptionTooLongException = "DescriptionTooLongException"
// ErrCodeECSServiceMappingLimitExceededException for service response error code
// "ECSServiceMappingLimitExceededException".
//
// The Amazon ECS service is associated with more than one deployment groups.
// An Amazon ECS service can be associated with only one deployment group.
ErrCodeECSServiceMappingLimitExceededException = "ECSServiceMappingLimitExceededException"
// ErrCodeGitHubAccountTokenDoesNotExistException for service response error code
// "GitHubAccountTokenDoesNotExistException".
//
// No GitHub account connection exists with the named specified in the call.
ErrCodeGitHubAccountTokenDoesNotExistException = "GitHubAccountTokenDoesNotExistException"
// ErrCodeGitHubAccountTokenNameRequiredException for service response error code
// "GitHubAccountTokenNameRequiredException".
//
// The call is missing a required GitHub account connection name.
ErrCodeGitHubAccountTokenNameRequiredException = "GitHubAccountTokenNameRequiredException"
// ErrCodeIamArnRequiredException for service response error code
// "IamArnRequiredException".
//
// No IAM ARN was included in the request. You must use an IAM session ARN or
// IAM user ARN in the request.
ErrCodeIamArnRequiredException = "IamArnRequiredException"
// ErrCodeIamSessionArnAlreadyRegisteredException for service response error code
// "IamSessionArnAlreadyRegisteredException".
//
// The request included an IAM session ARN that has already been used to register
// a different instance.
ErrCodeIamSessionArnAlreadyRegisteredException = "IamSessionArnAlreadyRegisteredException"
// ErrCodeIamUserArnAlreadyRegisteredException for service response error code
// "IamUserArnAlreadyRegisteredException".
//
// The specified IAM user ARN is already registered with an on-premises instance.
ErrCodeIamUserArnAlreadyRegisteredException = "IamUserArnAlreadyRegisteredException"
// ErrCodeIamUserArnRequiredException for service response error code
// "IamUserArnRequiredException".
//
// An IAM user ARN was not specified.
ErrCodeIamUserArnRequiredException = "IamUserArnRequiredException"
// ErrCodeInstanceDoesNotExistException for service response error code
// "InstanceDoesNotExistException".
//
// The specified instance does not exist in the deployment group.
ErrCodeInstanceDoesNotExistException = "InstanceDoesNotExistException"
// ErrCodeInstanceIdRequiredException for service response error code
// "InstanceIdRequiredException".
//
// The instance ID was not specified.
ErrCodeInstanceIdRequiredException = "InstanceIdRequiredException"
// ErrCodeInstanceLimitExceededException for service response error code
// "InstanceLimitExceededException".
//
// The maximum number of allowed on-premises instances in a single call was
// exceeded.
ErrCodeInstanceLimitExceededException = "InstanceLimitExceededException"
// ErrCodeInstanceNameAlreadyRegisteredException for service response error code
// "InstanceNameAlreadyRegisteredException".
//
// The specified on-premises instance name is already registered.
ErrCodeInstanceNameAlreadyRegisteredException = "InstanceNameAlreadyRegisteredException"
// ErrCodeInstanceNameRequiredException for service response error code
// "InstanceNameRequiredException".
//
// An on-premises instance name was not specified.
ErrCodeInstanceNameRequiredException = "InstanceNameRequiredException"
// ErrCodeInstanceNotRegisteredException for service response error code
// "InstanceNotRegisteredException".
//
// The specified on-premises instance is not registered.
ErrCodeInstanceNotRegisteredException = "InstanceNotRegisteredException"
// ErrCodeInvalidAlarmConfigException for service response error code
// "InvalidAlarmConfigException".
//
// The format of the alarm configuration is invalid. Possible causes include:
//
// * The alarm list is null.
//
// * The alarm object is null.
//
// * The alarm name is empty or null or exceeds the limit of 255 characters.<|fim▁hole|> // * The alarm configuration is enabled, but the alarm list is empty.
ErrCodeInvalidAlarmConfigException = "InvalidAlarmConfigException"
// ErrCodeInvalidApplicationNameException for service response error code
// "InvalidApplicationNameException".
//
// The application name was specified in an invalid format.
ErrCodeInvalidApplicationNameException = "InvalidApplicationNameException"
// ErrCodeInvalidArnException for service response error code
// "InvalidArnException".
//
// The specified ARN is not in a valid format.
ErrCodeInvalidArnException = "InvalidArnException"
// ErrCodeInvalidAutoRollbackConfigException for service response error code
// "InvalidAutoRollbackConfigException".
//
// The automatic rollback configuration was specified in an invalid format.
// For example, automatic rollback is enabled, but an invalid triggering event
// type or no event types were listed.
ErrCodeInvalidAutoRollbackConfigException = "InvalidAutoRollbackConfigException"
// ErrCodeInvalidAutoScalingGroupException for service response error code
// "InvalidAutoScalingGroupException".
//
// The Auto Scaling group was specified in an invalid format or does not exist.
ErrCodeInvalidAutoScalingGroupException = "InvalidAutoScalingGroupException"
// ErrCodeInvalidBlueGreenDeploymentConfigurationException for service response error code
// "InvalidBlueGreenDeploymentConfigurationException".
//
// The configuration for the blue/green deployment group was provided in an
// invalid format. For information about deployment configuration format, see
// CreateDeploymentConfig.
ErrCodeInvalidBlueGreenDeploymentConfigurationException = "InvalidBlueGreenDeploymentConfigurationException"
// ErrCodeInvalidBucketNameFilterException for service response error code
// "InvalidBucketNameFilterException".
//
// The bucket name either doesn't exist or was specified in an invalid format.
ErrCodeInvalidBucketNameFilterException = "InvalidBucketNameFilterException"
// ErrCodeInvalidComputePlatformException for service response error code
// "InvalidComputePlatformException".
//
// The computePlatform is invalid. The computePlatform should be Lambda, Server,
// or ECS.
ErrCodeInvalidComputePlatformException = "InvalidComputePlatformException"
// ErrCodeInvalidDeployedStateFilterException for service response error code
// "InvalidDeployedStateFilterException".
//
// The deployed state filter was specified in an invalid format.
ErrCodeInvalidDeployedStateFilterException = "InvalidDeployedStateFilterException"
// ErrCodeInvalidDeploymentConfigNameException for service response error code
// "InvalidDeploymentConfigNameException".
//
// The deployment configuration name was specified in an invalid format.
ErrCodeInvalidDeploymentConfigNameException = "InvalidDeploymentConfigNameException"
// ErrCodeInvalidDeploymentGroupNameException for service response error code
// "InvalidDeploymentGroupNameException".
//
// The deployment group name was specified in an invalid format.
ErrCodeInvalidDeploymentGroupNameException = "InvalidDeploymentGroupNameException"
// ErrCodeInvalidDeploymentIdException for service response error code
// "InvalidDeploymentIdException".
//
// At least one of the deployment IDs was specified in an invalid format.
ErrCodeInvalidDeploymentIdException = "InvalidDeploymentIdException"
// ErrCodeInvalidDeploymentInstanceTypeException for service response error code
// "InvalidDeploymentInstanceTypeException".
//
// An instance type was specified for an in-place deployment. Instance types
// are supported for blue/green deployments only.
ErrCodeInvalidDeploymentInstanceTypeException = "InvalidDeploymentInstanceTypeException"
// ErrCodeInvalidDeploymentStatusException for service response error code
// "InvalidDeploymentStatusException".
//
// The specified deployment status doesn't exist or cannot be determined.
ErrCodeInvalidDeploymentStatusException = "InvalidDeploymentStatusException"
// ErrCodeInvalidDeploymentStyleException for service response error code
// "InvalidDeploymentStyleException".
//
// An invalid deployment style was specified. Valid deployment types include
// "IN_PLACE" and "BLUE_GREEN." Valid deployment options include "WITH_TRAFFIC_CONTROL"
// and "WITHOUT_TRAFFIC_CONTROL."
ErrCodeInvalidDeploymentStyleException = "InvalidDeploymentStyleException"
// ErrCodeInvalidDeploymentTargetIdException for service response error code
// "InvalidDeploymentTargetIdException".
//
// The target ID provided was not valid.
ErrCodeInvalidDeploymentTargetIdException = "InvalidDeploymentTargetIdException"
// ErrCodeInvalidDeploymentWaitTypeException for service response error code
// "InvalidDeploymentWaitTypeException".
//
// The wait type is invalid.
ErrCodeInvalidDeploymentWaitTypeException = "InvalidDeploymentWaitTypeException"
// ErrCodeInvalidEC2TagCombinationException for service response error code
// "InvalidEC2TagCombinationException".
//
// A call was submitted that specified both Ec2TagFilters and Ec2TagSet, but
// only one of these data types can be used in a single call.
ErrCodeInvalidEC2TagCombinationException = "InvalidEC2TagCombinationException"
// ErrCodeInvalidEC2TagException for service response error code
// "InvalidEC2TagException".
//
// The tag was specified in an invalid format.
ErrCodeInvalidEC2TagException = "InvalidEC2TagException"
// ErrCodeInvalidECSServiceException for service response error code
// "InvalidECSServiceException".
//
// The Amazon ECS service identifier is not valid.
ErrCodeInvalidECSServiceException = "InvalidECSServiceException"
// ErrCodeInvalidExternalIdException for service response error code
// "InvalidExternalIdException".
//
// The external ID was specified in an invalid format.
ErrCodeInvalidExternalIdException = "InvalidExternalIdException"
// ErrCodeInvalidFileExistsBehaviorException for service response error code
// "InvalidFileExistsBehaviorException".
//
// An invalid fileExistsBehavior option was specified to determine how AWS CodeDeploy
// handles files or directories that already exist in a deployment target location,
// but weren't part of the previous successful deployment. Valid values include
// "DISALLOW," "OVERWRITE," and "RETAIN."
ErrCodeInvalidFileExistsBehaviorException = "InvalidFileExistsBehaviorException"
// ErrCodeInvalidGitHubAccountTokenException for service response error code
// "InvalidGitHubAccountTokenException".
//
// The GitHub token is not valid.
ErrCodeInvalidGitHubAccountTokenException = "InvalidGitHubAccountTokenException"
// ErrCodeInvalidGitHubAccountTokenNameException for service response error code
// "InvalidGitHubAccountTokenNameException".
//
// The format of the specified GitHub account connection name is invalid.
ErrCodeInvalidGitHubAccountTokenNameException = "InvalidGitHubAccountTokenNameException"
// ErrCodeInvalidIamSessionArnException for service response error code
// "InvalidIamSessionArnException".
//
// The IAM session ARN was specified in an invalid format.
ErrCodeInvalidIamSessionArnException = "InvalidIamSessionArnException"
// ErrCodeInvalidIamUserArnException for service response error code
// "InvalidIamUserArnException".
//
// The IAM user ARN was specified in an invalid format.
ErrCodeInvalidIamUserArnException = "InvalidIamUserArnException"
// ErrCodeInvalidIgnoreApplicationStopFailuresValueException for service response error code
// "InvalidIgnoreApplicationStopFailuresValueException".
//
// The IgnoreApplicationStopFailures value is invalid. For AWS Lambda deployments,
// false is expected. For EC2/On-premises deployments, true or false is expected.
ErrCodeInvalidIgnoreApplicationStopFailuresValueException = "InvalidIgnoreApplicationStopFailuresValueException"
// ErrCodeInvalidInputException for service response error code
// "InvalidInputException".
//
// The input was specified in an invalid format.
ErrCodeInvalidInputException = "InvalidInputException"
// ErrCodeInvalidInstanceNameException for service response error code
// "InvalidInstanceNameException".
//
// The on-premises instance name was specified in an invalid format.
ErrCodeInvalidInstanceNameException = "InvalidInstanceNameException"
// ErrCodeInvalidInstanceStatusException for service response error code
// "InvalidInstanceStatusException".
//
// The specified instance status does not exist.
ErrCodeInvalidInstanceStatusException = "InvalidInstanceStatusException"
// ErrCodeInvalidInstanceTypeException for service response error code
// "InvalidInstanceTypeException".
//
// An invalid instance type was specified for instances in a blue/green deployment.
// Valid values include "Blue" for an original environment and "Green" for a
// replacement environment.
ErrCodeInvalidInstanceTypeException = "InvalidInstanceTypeException"
// ErrCodeInvalidKeyPrefixFilterException for service response error code
// "InvalidKeyPrefixFilterException".
//
// The specified key prefix filter was specified in an invalid format.
ErrCodeInvalidKeyPrefixFilterException = "InvalidKeyPrefixFilterException"
// ErrCodeInvalidLifecycleEventHookExecutionIdException for service response error code
// "InvalidLifecycleEventHookExecutionIdException".
//
// A lifecycle event hook is invalid. Review the hooks section in your AppSpec
// file to ensure the lifecycle events and hooks functions are valid.
ErrCodeInvalidLifecycleEventHookExecutionIdException = "InvalidLifecycleEventHookExecutionIdException"
// ErrCodeInvalidLifecycleEventHookExecutionStatusException for service response error code
// "InvalidLifecycleEventHookExecutionStatusException".
//
// The result of a Lambda validation function that verifies a lifecycle event
// is invalid. It should return Succeeded or Failed.
ErrCodeInvalidLifecycleEventHookExecutionStatusException = "InvalidLifecycleEventHookExecutionStatusException"
// ErrCodeInvalidLoadBalancerInfoException for service response error code
// "InvalidLoadBalancerInfoException".
//
// An invalid load balancer name, or no load balancer name, was specified.
ErrCodeInvalidLoadBalancerInfoException = "InvalidLoadBalancerInfoException"
// ErrCodeInvalidMinimumHealthyHostValueException for service response error code
// "InvalidMinimumHealthyHostValueException".
//
// The minimum healthy instance value was specified in an invalid format.
ErrCodeInvalidMinimumHealthyHostValueException = "InvalidMinimumHealthyHostValueException"
// ErrCodeInvalidNextTokenException for service response error code
// "InvalidNextTokenException".
//
// The next token was specified in an invalid format.
ErrCodeInvalidNextTokenException = "InvalidNextTokenException"
// ErrCodeInvalidOnPremisesTagCombinationException for service response error code
// "InvalidOnPremisesTagCombinationException".
//
// A call was submitted that specified both OnPremisesTagFilters and OnPremisesTagSet,
// but only one of these data types can be used in a single call.
ErrCodeInvalidOnPremisesTagCombinationException = "InvalidOnPremisesTagCombinationException"
// ErrCodeInvalidOperationException for service response error code
// "InvalidOperationException".
//
// An invalid operation was detected.
ErrCodeInvalidOperationException = "InvalidOperationException"
// ErrCodeInvalidRegistrationStatusException for service response error code
// "InvalidRegistrationStatusException".
//
// The registration status was specified in an invalid format.
ErrCodeInvalidRegistrationStatusException = "InvalidRegistrationStatusException"
// ErrCodeInvalidRevisionException for service response error code
// "InvalidRevisionException".
//
// The revision was specified in an invalid format.
ErrCodeInvalidRevisionException = "InvalidRevisionException"
// ErrCodeInvalidRoleException for service response error code
// "InvalidRoleException".
//
// The service role ARN was specified in an invalid format. Or, if an Auto Scaling
// group was specified, the specified service role does not grant the appropriate
// permissions to Amazon EC2 Auto Scaling.
ErrCodeInvalidRoleException = "InvalidRoleException"
// ErrCodeInvalidSortByException for service response error code
// "InvalidSortByException".
//
// The column name to sort by is either not present or was specified in an invalid
// format.
ErrCodeInvalidSortByException = "InvalidSortByException"
// ErrCodeInvalidSortOrderException for service response error code
// "InvalidSortOrderException".
//
// The sort order was specified in an invalid format.
ErrCodeInvalidSortOrderException = "InvalidSortOrderException"
// ErrCodeInvalidTagException for service response error code
// "InvalidTagException".
//
// The tag was specified in an invalid format.
ErrCodeInvalidTagException = "InvalidTagException"
// ErrCodeInvalidTagFilterException for service response error code
// "InvalidTagFilterException".
//
// The tag filter was specified in an invalid format.
ErrCodeInvalidTagFilterException = "InvalidTagFilterException"
// ErrCodeInvalidTagsToAddException for service response error code
// "InvalidTagsToAddException".
//
// The specified tags are not valid.
ErrCodeInvalidTagsToAddException = "InvalidTagsToAddException"
// ErrCodeInvalidTargetFilterNameException for service response error code
// "InvalidTargetFilterNameException".
//
// The target filter name is invalid.
ErrCodeInvalidTargetFilterNameException = "InvalidTargetFilterNameException"
// ErrCodeInvalidTargetGroupPairException for service response error code
// "InvalidTargetGroupPairException".
//
// A target group pair associated with this deployment is not valid.
ErrCodeInvalidTargetGroupPairException = "InvalidTargetGroupPairException"
// ErrCodeInvalidTargetInstancesException for service response error code
// "InvalidTargetInstancesException".
//
// The target instance configuration is invalid. Possible causes include:
//
// * Configuration data for target instances was entered for an in-place
// deployment.
//
// * The limit of 10 tags for a tag type was exceeded.
//
// * The combined length of the tag names exceeded the limit.
//
// * A specified tag is not currently applied to any instances.
ErrCodeInvalidTargetInstancesException = "InvalidTargetInstancesException"
// ErrCodeInvalidTimeRangeException for service response error code
// "InvalidTimeRangeException".
//
// The specified time range was specified in an invalid format.
ErrCodeInvalidTimeRangeException = "InvalidTimeRangeException"
// ErrCodeInvalidTrafficRoutingConfigurationException for service response error code
// "InvalidTrafficRoutingConfigurationException".
//
// The configuration that specifies how traffic is routed during a deployment
// is invalid.
ErrCodeInvalidTrafficRoutingConfigurationException = "InvalidTrafficRoutingConfigurationException"
// ErrCodeInvalidTriggerConfigException for service response error code
// "InvalidTriggerConfigException".
//
// The trigger was specified in an invalid format.
ErrCodeInvalidTriggerConfigException = "InvalidTriggerConfigException"
// ErrCodeInvalidUpdateOutdatedInstancesOnlyValueException for service response error code
// "InvalidUpdateOutdatedInstancesOnlyValueException".
//
// The UpdateOutdatedInstancesOnly value is invalid. For AWS Lambda deployments,
// false is expected. For EC2/On-premises deployments, true or false is expected.
ErrCodeInvalidUpdateOutdatedInstancesOnlyValueException = "InvalidUpdateOutdatedInstancesOnlyValueException"
// ErrCodeLifecycleEventAlreadyCompletedException for service response error code
// "LifecycleEventAlreadyCompletedException".
//
// An attempt to return the status of an already completed lifecycle event occurred.
ErrCodeLifecycleEventAlreadyCompletedException = "LifecycleEventAlreadyCompletedException"
// ErrCodeLifecycleHookLimitExceededException for service response error code
// "LifecycleHookLimitExceededException".
//
// The limit for lifecycle hooks was exceeded.
ErrCodeLifecycleHookLimitExceededException = "LifecycleHookLimitExceededException"
// ErrCodeMultipleIamArnsProvidedException for service response error code
// "MultipleIamArnsProvidedException".
//
// Both an IAM user ARN and an IAM session ARN were included in the request.
// Use only one ARN type.
ErrCodeMultipleIamArnsProvidedException = "MultipleIamArnsProvidedException"
// ErrCodeOperationNotSupportedException for service response error code
// "OperationNotSupportedException".
//
// The API used does not support the deployment.
ErrCodeOperationNotSupportedException = "OperationNotSupportedException"
// ErrCodeResourceArnRequiredException for service response error code
// "ResourceArnRequiredException".
//
// The ARN of a resource is required, but was not found.
ErrCodeResourceArnRequiredException = "ResourceArnRequiredException"
// ErrCodeResourceValidationException for service response error code
// "ResourceValidationException".
//
// The specified resource could not be validated.
ErrCodeResourceValidationException = "ResourceValidationException"
// ErrCodeRevisionDoesNotExistException for service response error code
// "RevisionDoesNotExistException".
//
// The named revision does not exist with the IAM user or AWS account.
ErrCodeRevisionDoesNotExistException = "RevisionDoesNotExistException"
// ErrCodeRevisionRequiredException for service response error code
// "RevisionRequiredException".
//
// The revision ID was not specified.
ErrCodeRevisionRequiredException = "RevisionRequiredException"
// ErrCodeRoleRequiredException for service response error code
// "RoleRequiredException".
//
// The role ID was not specified.
ErrCodeRoleRequiredException = "RoleRequiredException"
// ErrCodeTagLimitExceededException for service response error code
// "TagLimitExceededException".
//
// The maximum allowed number of tags was exceeded.
ErrCodeTagLimitExceededException = "TagLimitExceededException"
// ErrCodeTagRequiredException for service response error code
// "TagRequiredException".
//
// A tag was not specified.
ErrCodeTagRequiredException = "TagRequiredException"
// ErrCodeTagSetListLimitExceededException for service response error code
// "TagSetListLimitExceededException".
//
// The number of tag groups included in the tag set list exceeded the maximum
// allowed limit of 3.
ErrCodeTagSetListLimitExceededException = "TagSetListLimitExceededException"
// ErrCodeThrottlingException for service response error code
// "ThrottlingException".
//
// An API function was called too frequently.
ErrCodeThrottlingException = "ThrottlingException"
// ErrCodeTriggerTargetsLimitExceededException for service response error code
// "TriggerTargetsLimitExceededException".
//
// The maximum allowed number of triggers was exceeded.
ErrCodeTriggerTargetsLimitExceededException = "TriggerTargetsLimitExceededException"
// ErrCodeUnsupportedActionForDeploymentTypeException for service response error code
// "UnsupportedActionForDeploymentTypeException".
//
// A call was submitted that is not supported for the specified deployment type.
ErrCodeUnsupportedActionForDeploymentTypeException = "UnsupportedActionForDeploymentTypeException"
)
var exceptionFromCode = map[string]func(protocol.ResponseMetadata) error{
"AlarmsLimitExceededException": newErrorAlarmsLimitExceededException,
"ApplicationAlreadyExistsException": newErrorApplicationAlreadyExistsException,
"ApplicationDoesNotExistException": newErrorApplicationDoesNotExistException,
"ApplicationLimitExceededException": newErrorApplicationLimitExceededException,
"ApplicationNameRequiredException": newErrorApplicationNameRequiredException,
"ArnNotSupportedException": newErrorArnNotSupportedException,
"BatchLimitExceededException": newErrorBatchLimitExceededException,
"BucketNameFilterRequiredException": newErrorBucketNameFilterRequiredException,
"DeploymentAlreadyCompletedException": newErrorDeploymentAlreadyCompletedException,
"DeploymentConfigAlreadyExistsException": newErrorDeploymentConfigAlreadyExistsException,
"DeploymentConfigDoesNotExistException": newErrorDeploymentConfigDoesNotExistException,
"DeploymentConfigInUseException": newErrorDeploymentConfigInUseException,
"DeploymentConfigLimitExceededException": newErrorDeploymentConfigLimitExceededException,
"DeploymentConfigNameRequiredException": newErrorDeploymentConfigNameRequiredException,
"DeploymentDoesNotExistException": newErrorDeploymentDoesNotExistException,
"DeploymentGroupAlreadyExistsException": newErrorDeploymentGroupAlreadyExistsException,
"DeploymentGroupDoesNotExistException": newErrorDeploymentGroupDoesNotExistException,
"DeploymentGroupLimitExceededException": newErrorDeploymentGroupLimitExceededException,
"DeploymentGroupNameRequiredException": newErrorDeploymentGroupNameRequiredException,
"DeploymentIdRequiredException": newErrorDeploymentIdRequiredException,
"DeploymentIsNotInReadyStateException": newErrorDeploymentIsNotInReadyStateException,
"DeploymentLimitExceededException": newErrorDeploymentLimitExceededException,
"DeploymentNotStartedException": newErrorDeploymentNotStartedException,
"DeploymentTargetDoesNotExistException": newErrorDeploymentTargetDoesNotExistException,
"DeploymentTargetIdRequiredException": newErrorDeploymentTargetIdRequiredException,
"DeploymentTargetListSizeExceededException": newErrorDeploymentTargetListSizeExceededException,
"DescriptionTooLongException": newErrorDescriptionTooLongException,
"ECSServiceMappingLimitExceededException": newErrorECSServiceMappingLimitExceededException,
"GitHubAccountTokenDoesNotExistException": newErrorGitHubAccountTokenDoesNotExistException,
"GitHubAccountTokenNameRequiredException": newErrorGitHubAccountTokenNameRequiredException,
"IamArnRequiredException": newErrorIamArnRequiredException,
"IamSessionArnAlreadyRegisteredException": newErrorIamSessionArnAlreadyRegisteredException,
"IamUserArnAlreadyRegisteredException": newErrorIamUserArnAlreadyRegisteredException,
"IamUserArnRequiredException": newErrorIamUserArnRequiredException,
"InstanceDoesNotExistException": newErrorInstanceDoesNotExistException,
"InstanceIdRequiredException": newErrorInstanceIdRequiredException,
"InstanceLimitExceededException": newErrorInstanceLimitExceededException,
"InstanceNameAlreadyRegisteredException": newErrorInstanceNameAlreadyRegisteredException,
"InstanceNameRequiredException": newErrorInstanceNameRequiredException,
"InstanceNotRegisteredException": newErrorInstanceNotRegisteredException,
"InvalidAlarmConfigException": newErrorInvalidAlarmConfigException,
"InvalidApplicationNameException": newErrorInvalidApplicationNameException,
"InvalidArnException": newErrorInvalidArnException,
"InvalidAutoRollbackConfigException": newErrorInvalidAutoRollbackConfigException,
"InvalidAutoScalingGroupException": newErrorInvalidAutoScalingGroupException,
"InvalidBlueGreenDeploymentConfigurationException": newErrorInvalidBlueGreenDeploymentConfigurationException,
"InvalidBucketNameFilterException": newErrorInvalidBucketNameFilterException,
"InvalidComputePlatformException": newErrorInvalidComputePlatformException,
"InvalidDeployedStateFilterException": newErrorInvalidDeployedStateFilterException,
"InvalidDeploymentConfigNameException": newErrorInvalidDeploymentConfigNameException,
"InvalidDeploymentGroupNameException": newErrorInvalidDeploymentGroupNameException,
"InvalidDeploymentIdException": newErrorInvalidDeploymentIdException,
"InvalidDeploymentInstanceTypeException": newErrorInvalidDeploymentInstanceTypeException,
"InvalidDeploymentStatusException": newErrorInvalidDeploymentStatusException,
"InvalidDeploymentStyleException": newErrorInvalidDeploymentStyleException,
"InvalidDeploymentTargetIdException": newErrorInvalidDeploymentTargetIdException,
"InvalidDeploymentWaitTypeException": newErrorInvalidDeploymentWaitTypeException,
"InvalidEC2TagCombinationException": newErrorInvalidEC2TagCombinationException,
"InvalidEC2TagException": newErrorInvalidEC2TagException,
"InvalidECSServiceException": newErrorInvalidECSServiceException,
"InvalidExternalIdException": newErrorInvalidExternalIdException,
"InvalidFileExistsBehaviorException": newErrorInvalidFileExistsBehaviorException,
"InvalidGitHubAccountTokenException": newErrorInvalidGitHubAccountTokenException,
"InvalidGitHubAccountTokenNameException": newErrorInvalidGitHubAccountTokenNameException,
"InvalidIamSessionArnException": newErrorInvalidIamSessionArnException,
"InvalidIamUserArnException": newErrorInvalidIamUserArnException,
"InvalidIgnoreApplicationStopFailuresValueException": newErrorInvalidIgnoreApplicationStopFailuresValueException,
"InvalidInputException": newErrorInvalidInputException,
"InvalidInstanceNameException": newErrorInvalidInstanceNameException,
"InvalidInstanceStatusException": newErrorInvalidInstanceStatusException,
"InvalidInstanceTypeException": newErrorInvalidInstanceTypeException,
"InvalidKeyPrefixFilterException": newErrorInvalidKeyPrefixFilterException,
"InvalidLifecycleEventHookExecutionIdException": newErrorInvalidLifecycleEventHookExecutionIdException,
"InvalidLifecycleEventHookExecutionStatusException": newErrorInvalidLifecycleEventHookExecutionStatusException,
"InvalidLoadBalancerInfoException": newErrorInvalidLoadBalancerInfoException,
"InvalidMinimumHealthyHostValueException": newErrorInvalidMinimumHealthyHostValueException,
"InvalidNextTokenException": newErrorInvalidNextTokenException,
"InvalidOnPremisesTagCombinationException": newErrorInvalidOnPremisesTagCombinationException,
"InvalidOperationException": newErrorInvalidOperationException,
"InvalidRegistrationStatusException": newErrorInvalidRegistrationStatusException,
"InvalidRevisionException": newErrorInvalidRevisionException,
"InvalidRoleException": newErrorInvalidRoleException,
"InvalidSortByException": newErrorInvalidSortByException,
"InvalidSortOrderException": newErrorInvalidSortOrderException,
"InvalidTagException": newErrorInvalidTagException,
"InvalidTagFilterException": newErrorInvalidTagFilterException,
"InvalidTagsToAddException": newErrorInvalidTagsToAddException,
"InvalidTargetFilterNameException": newErrorInvalidTargetFilterNameException,
"InvalidTargetGroupPairException": newErrorInvalidTargetGroupPairException,
"InvalidTargetInstancesException": newErrorInvalidTargetInstancesException,
"InvalidTimeRangeException": newErrorInvalidTimeRangeException,
"InvalidTrafficRoutingConfigurationException": newErrorInvalidTrafficRoutingConfigurationException,
"InvalidTriggerConfigException": newErrorInvalidTriggerConfigException,
"InvalidUpdateOutdatedInstancesOnlyValueException": newErrorInvalidUpdateOutdatedInstancesOnlyValueException,
"LifecycleEventAlreadyCompletedException": newErrorLifecycleEventAlreadyCompletedException,
"LifecycleHookLimitExceededException": newErrorLifecycleHookLimitExceededException,
"MultipleIamArnsProvidedException": newErrorMultipleIamArnsProvidedException,
"OperationNotSupportedException": newErrorOperationNotSupportedException,
"ResourceArnRequiredException": newErrorResourceArnRequiredException,
"ResourceValidationException": newErrorResourceValidationException,
"RevisionDoesNotExistException": newErrorRevisionDoesNotExistException,
"RevisionRequiredException": newErrorRevisionRequiredException,
"RoleRequiredException": newErrorRoleRequiredException,
"TagLimitExceededException": newErrorTagLimitExceededException,
"TagRequiredException": newErrorTagRequiredException,
"TagSetListLimitExceededException": newErrorTagSetListLimitExceededException,
"ThrottlingException": newErrorThrottlingException,
"TriggerTargetsLimitExceededException": newErrorTriggerTargetsLimitExceededException,
"UnsupportedActionForDeploymentTypeException": newErrorUnsupportedActionForDeploymentTypeException,
}<|fim▁end|>
|
//
// * Two alarms with the same name have been specified.
//
|
<|file_name|>polifill.js<|end_file_name|><|fim▁begin|>'use strict'
const reduce = Function.bind.call(Function.call, Array.prototype.reduce);
const isEnumerable = Function.bind.call(Function.call, Object.prototype.propertyIsEnumerable);
const concat = Function.bind.call(Function.call, Array.prototype.concat);
const keys = Reflect.ownKeys;
if (!Object.values) {
Object.values = (O) => reduce(keys(O), (v, k) => concat(v, typeof k === 'string' && isEnumerable(O, k) ? [O[k]] : []), []);
}
if (!Object.entries) {
Object.entries = (O) => reduce(keys(O), (e, k) => concat(e, typeof k === 'string' && isEnumerable(O, k) ? [
[k, O[k]]
] : []), []);
}
//from
//https://medium.com/@_jh3y/throttling-and-debouncing-in-javascript-b01cad5c8edf#.jlqokoxtu
//or
//https://remysharp.com/2010/07/21/throttling-function-calls
function debounce(callback, delay) {
let timeout;
return function() {
const context = this,
args = arguments;
clearTimeout(timeout);
timeout = setTimeout(() => callback.apply(context, args), delay);
};
};
function throttle(func, limit) {
let inThrottle,
lastFunc,
throttleTimer;
return function() {<|fim▁hole|> if (inThrottle) {
clearTimeout(lastFunc);
return lastFunc = setTimeout(function() {
func.apply(context, args);
inThrottle = false;
}, limit);
} else {
func.apply(context, args);
inThrottle = true;
return throttleTimer = setTimeout(() => inThrottle = false, limit);
}
};
};
/*END POLIFILL*/<|fim▁end|>
|
const context = this,
args = arguments;
|
<|file_name|>homeCtrl.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>angular.module('home', ['ngMessages'])
.controller('homeCtrl', [function() {
}]);<|fim▁end|>
| |
<|file_name|>serviceClientsAngular2.extensions.ts<|end_file_name|><|fim▁begin|>export class MyBaseClass {
protected transformOptions(options: any) {
return options;
}
<|fim▁hole|> protected transformResult(url: string, response: any, processor: (response: any) => any) {
return processor(response);
}
}<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:96c89faf399ad903c813617aca830b28b3330c35d8af37d08743722e06d9323d<|fim▁hole|>size 84<|fim▁end|>
| |
<|file_name|>reuse_test.py<|end_file_name|><|fim▁begin|>import tensorflow as tf
def f():<|fim▁hole|>
f()<|fim▁end|>
|
with tf.variable_scope('A') as scope:
print scope.reuse
|
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|>import uuid
import base64
import re
def generate_key():
"""
generates a uuid, encodes it with base32 and strips it's padding.
this reduces the string size from 32 to 26 chars.
"""
return base64.b32encode(uuid.uuid4().bytes).strip('=').lower()[0:12]
def thousand_separator(x=0, sep='.', dot=','):
"""
creates a string of number separated by selected delimiters
"""
num, _, frac = str(x).partition(dot)
num = re.sub(r'(\d{3})(?=\d)', r'\1'+sep, num[::-1])[::-1]
if frac:
num += dot + frac
return num
def new_parser(passed_object, request_data):
"""
Maps passed request object from client into expected object.
Use this for creation of new object by passing an instantiated
empty object into the passed_object variable
"""
for item, value in request_data.values.iteritems():
if hasattr(passed_object, item) and value is not None:
try:
setattr(passed_object, item, value)
except:
setattr(passed_object, item, convert_to_date(value))
passed_object.id = generate_key()
return passed_object
def edit_parser(passed_object, request_data):
"""
Maps value from passed json object for data edit purposes.
You need to pass in object resulting from query into the
passed_object variable
"""
for item in request_data.values:
if item != "id" and hasattr(passed_object, item) and request_data.values.get(item) != None:<|fim▁hole|> return passed_object
def convert_to_date(date_string):
from datetime import date
input = date_string.split("-")
return date(int(input[0]),int(input[1]),int(input[2]))
def multikeysort(items, columns):
from operator import itemgetter
comparers = [ ((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
for fn, mult in comparers:
result = cmp(fn(left), fn(right))
if result:
return mult * result
else:
return 0
return sorted(items, cmp=comparer)<|fim▁end|>
|
setattr(passed_object, item, request_data.values.get(item))
|
<|file_name|>html.ts<|end_file_name|><|fim▁begin|>import * as cheerio from "cheerio";
<|fim▁hole|>import { createDestUrl } from "./path";
export class Html {
private _$: CheerioStatic;
constructor(source: string) {
this._$ = cheerio.load(source);
}
public finalize(): void {
const that: Html = this;
this._$("a").each(function(i, elem) {
const $this = that._$(this);
const href = $this.attr("href");
if (href) {
$this.attr("href", createDestUrl(href));
}
});
if (this._$(".language-math").length !== 0) {
this._$("*").each(function(i, elem) {
const $this = that._$(this);
$this.before(`
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
inlineMath: [['$','$'], ["\\\\(","\\\\)"]],
displayMath: [ ['$$','$$'], ["\\\\[","\\\\]"] ]
}
});
</script>
<script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML"></script>
<meta http-equiv="X-UA-Compatible" CONTENT="IE=EmulateIE7" />
`);
return false;
});
}
}
public getPageOptions(): PageOptions {
// title は 1 つ目の h1
const options: PageOptions = {};
const h1 = this._$("h1").first();
options.title = (h1 && h1.text()) || "";
return options;
}
public toHtml(): string {
return this._$.html();
}
}<|fim▁end|>
|
import { PageOptions } from "../../options";
|
<|file_name|>div_mod.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::rounding_modes::RoundingMode;
use malachite_base_test_util::generators::{
signed_gen, signed_gen_var_6, signed_pair_gen_var_4, unsigned_gen, unsigned_gen_var_1,
unsigned_pair_gen_var_12,
};
use std::panic::catch_unwind;
#[test]
fn test_div_mod_and_div_rem_unsigned() {
fn test<T: PrimitiveUnsigned>(n: T, d: T, q: T, r: T) {
assert_eq!(n.div_mod(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.div_assign_mod(d), r);
assert_eq!(mut_n, q);
assert_eq!(n.div_rem(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.div_assign_rem(d), r);
assert_eq!(mut_n, q);
}
test::<u8>(0, 1, 0, 0);
test::<u16>(0, 123, 0, 0);
test::<u32>(1, 1, 1, 0);
test::<u64>(123, 1, 123, 0);
test::<usize>(123, 123, 1, 0);
test::<u128>(123, 456, 0, 123);
test::<u16>(456, 123, 3, 87);
test::<u32>(u32::MAX, 1, u32::MAX, 0);
test::<usize>(0xffffffff, 0xffffffff, 1, 0);
test::<u64>(1000000000000, 1, 1000000000000, 0);
test::<u64>(1000000000000, 3, 333333333333, 1);
test::<u64>(1000000000000, 123, 8130081300, 100);
test::<u64>(1000000000000, 0xffffffff, 232, 3567587560);
test::<u128>(1000000000000000000000000, 1, 1000000000000000000000000, 0);
test::<u128>(1000000000000000000000000, 3, 333333333333333333333333, 1);
test::<u128>(1000000000000000000000000, 123, 8130081300813008130081, 37);
test::<u128>(
1000000000000000000000000,
0xffffffff,
232830643708079,
3167723695,
);
test::<u128>(
1000000000000000000000000,
1234567890987,
810000006723,
530068894399,
);
test::<u128>(
253640751230376270397812803167,
2669936877441,
94998781946290113,
1520301762334,
);
test::<u64>(3768477692975601, 11447376614057827956, 0, 3768477692975601);
test::<u64>(3356605361737854, 3081095617839357, 1, 275509743898497);
test::<u128>(
1098730198198174614195,
953382298040157850476,
1,
145347900158016763719,
);
test::<u128>(
69738658860594537152875081748,
69738658860594537152875081748,
1,
0,
);
test::<u128>(1000000000000000000000000, 1000000000000000000000000, 1, 0);
test::<u128>(0, 1000000000000000000000000, 0, 0);
test::<u128>(123, 1000000000000000000000000, 0, 123);
}
fn div_mod_and_div_rem_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_pair_gen_var_12::<T, T>().test_properties(|(x, y)| {
let mut mut_x = x;
let r = mut_x.div_assign_mod(y);
let q = mut_x;
assert_eq!(x.div_mod(y), (q, r));
let mut mut_x = x;
let r_alt = mut_x.div_assign_rem(y);
let q_alt = mut_x;
assert_eq!((q_alt, r_alt), (q, r));
assert_eq!(x.div_rem(y), (q, r));
assert_eq!((x / y, x % y), (q, r));
assert!(r < y);
assert_eq!(q * y + r, x);
});
unsigned_gen::<T>().test_properties(|x| {
assert_eq!(x.div_mod(T::ONE), (x, T::ZERO));
});
unsigned_gen_var_1::<T>().test_properties(|x| {
assert_eq!(x.div_mod(x), (T::ONE, T::ZERO));
assert_eq!(T::ZERO.div_mod(x), (T::ZERO, T::ZERO));
if x > T::ONE {
assert_eq!(T::ONE.div_mod(x), (T::ZERO, T::ONE));
}
});
}
#[test]
fn test_div_mod_signed() {
fn test<T: PrimitiveSigned>(n: T, d: T, q: T, r: T) {
assert_eq!(n.div_mod(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.div_assign_mod(d), r);
assert_eq!(mut_n, q);
}
test::<i8>(0, 1, 0, 0);
test::<i16>(0, 123, 0, 0);
test::<i32>(1, 1, 1, 0);
test::<i64>(123, 1, 123, 0);
test::<i128>(123, 123, 1, 0);
test::<isize>(123, 456, 0, 123);
test::<i16>(456, 123, 3, 87);
test::<i64>(0xffffffff, 1, 0xffffffff, 0);
test::<i64>(0xffffffff, 0xffffffff, 1, 0);
test::<i64>(1000000000000, 1, 1000000000000, 0);
test::<i64>(1000000000000, 3, 333333333333, 1);
test::<i64>(1000000000000, 123, 8130081300, 100);
test::<i64>(1000000000000, 0xffffffff, 232, 3567587560);
test::<i128>(1000000000000000000000000, 1, 1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, 3, 333333333333333333333333, 1);
test::<i128>(1000000000000000000000000, 123, 8130081300813008130081, 37);
test::<i128>(
1000000000000000000000000,
0xffffffff,
232830643708079,
3167723695,
);
test::<i128>(
1000000000000000000000000,
1234567890987,
810000006723,
530068894399,
);
test::<i128>(
253640751230376270397812803167,
2669936877441,
94998781946290113,
1520301762334,
);
test::<i128>(3768477692975601, 11447376614057827956, 0, 3768477692975601);
test::<i64>(3356605361737854, 3081095617839357, 1, 275509743898497);
test::<i128>(
1098730198198174614195,
953382298040157850476,
1,
145347900158016763719,
);
test::<i128>(
69738658860594537152875081748,
69738658860594537152875081748,
1,
0,
);
test::<i128>(1000000000000000000000000, 1000000000000000000000000, 1, 0);
test::<i128>(0, 1000000000000000000000000, 0, 0);
test::<i128>(123, 1000000000000000000000000, 0, 123);
test::<i8>(0, -1, 0, 0);
test::<i16>(0, -123, 0, 0);
test::<i32>(1, -1, -1, 0);
test::<i64>(123, -1, -123, 0);
test::<i128>(123, -123, -1, 0);
test::<isize>(123, -456, -1, -333);
test::<i16>(456, -123, -4, -36);
test::<i64>(0xffffffff, -1, -0xffffffff, 0);
test::<i64>(0xffffffff, -0xffffffff, -1, 0);
test::<i64>(1000000000000, -1, -1000000000000, 0);
test::<i64>(1000000000000, -3, -333333333334, -2);
test::<i64>(1000000000000, -123, -8130081301, -23);
test::<i64>(1000000000000, -0xffffffff, -233, -727379735);
test::<i128>(1000000000000000000000000, -1, -1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, -3, -333333333333333333333334, -2);
test::<i128>(
1000000000000000000000000,
-123,
-8130081300813008130082,
-86,
);
test::<i128>(
1000000000000000000000000,
-0xffffffff,
-232830643708080,
-1127243600,
);
test::<i128>(
1000000000000000000000000,
-1234567890987,
-810000006724,
-704498996588,
);
test::<i128>(
253640751230376270397812803167,
-2669936877441,
-94998781946290114,
-1149635115107,
);
test::<i128>(
3768477692975601,
-11447376614057827956,
-1,
-11443608136364852355,
);
test::<i64>(3356605361737854, -3081095617839357, -2, -2805585873940860);
test::<i128>(
1098730198198174614195,
-953382298040157850476,
-2,
-808034397882141086757,
);
test::<i128>(
69738658860594537152875081748,
-69738658860594537152875081748,
-1,
0,
);
test::<i128>(1000000000000000000000000, -1000000000000000000000000, -1, 0);
test::<i128>(0, -1000000000000000000000000, 0, 0);
test::<i128>(
123,
-1000000000000000000000000,
-1,
-999999999999999999999877,
);
test::<i8>(-1, 1, -1, 0);
test::<i16>(-123, 1, -123, 0);
test::<i32>(-123, 123, -1, 0);
test::<i64>(-123, 456, -1, 333);
test::<isize>(-456, 123, -4, 36);
test::<i64>(-0xffffffff, -1, 0xffffffff, 0);
test::<i64>(-0xffffffff, 0xffffffff, -1, 0);
test::<i64>(-1000000000000, 1, -1000000000000, 0);
test::<i64>(-1000000000000, 3, -333333333334, 2);
test::<i64>(-1000000000000, 123, -8130081301, 23);
test::<i64>(-1000000000000, 0xffffffff, -233, 727379735);
test::<i128>(-1000000000000000000000000, 1, -1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, 3, -333333333333333333333334, 2);
test::<i128>(-1000000000000000000000000, 123, -8130081300813008130082, 86);
test::<i128>(
-1000000000000000000000000,
0xffffffff,
-232830643708080,
1127243600,
);
test::<i128>(
-1000000000000000000000000,
1234567890987,
-810000006724,
704498996588,
);
test::<i128>(
-253640751230376270397812803167,
2669936877441,
-94998781946290114,
1149635115107,
);
test::<i128>(
-3768477692975601,
11447376614057827956,
-1,
11443608136364852355,
);
test::<i64>(-3356605361737854, 3081095617839357, -2, 2805585873940860);
test::<i128>(
-1098730198198174614195,
953382298040157850476,
-2,
808034397882141086757,
);
test::<i128>(
-69738658860594537152875081748,
69738658860594537152875081748,
-1,
0,
);
test::<i128>(-1000000000000000000000000, 1000000000000000000000000, -1, 0);
test::<i128>(
-123,
1000000000000000000000000,
-1,
999999999999999999999877,
);
test::<i8>(-1, -1, 1, 0);
test::<i16>(-123, -1, 123, 0);
test::<i32>(-123, -123, 1, 0);
test::<i64>(-123, -456, 0, -123);
test::<isize>(-456, -123, 3, -87);
test::<i128>(-0xffffffff, -1, 0xffffffff, 0);
test::<i64>(-0xffffffff, -0xffffffff, 1, 0);
test::<i64>(-1000000000000, -1, 1000000000000, 0);
test::<i64>(-1000000000000, -3, 333333333333, -1);
test::<i64>(-1000000000000, -123, 8130081300, -100);
test::<i64>(-1000000000000, -0xffffffff, 232, -3567587560);
test::<i128>(-1000000000000000000000000, -1, 1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, -3, 333333333333333333333333, -1);
test::<i128>(
-1000000000000000000000000,
-123,
8130081300813008130081,
-37,
);
test::<i128>(
-1000000000000000000000000,
-0xffffffff,
232830643708079,
-3167723695,
);
test::<i128>(
-1000000000000000000000000,
-1234567890987,
810000006723,
-530068894399,
);
test::<i128>(
-253640751230376270397812803167,
-2669936877441,
94998781946290113,
-1520301762334,
);
test::<i128>(
-3768477692975601,
-11447376614057827956,
0,
-3768477692975601,
);
test::<i64>(-3356605361737854, -3081095617839357, 1, -275509743898497);
test::<i128>(
-1098730198198174614195,
-953382298040157850476,
1,
-145347900158016763719,
);
test::<i128>(
-69738658860594537152875081748,
-69738658860594537152875081748,
1,
0,
);
test::<i128>(-1000000000000000000000000, -1000000000000000000000000, 1, 0);
test::<i128>(-123, -1000000000000000000000000, 0, -123);
}
fn div_mod_fail_helper<T: PrimitiveInt>() {
assert_panic!(T::ONE.div_mod(T::ZERO));
assert_panic!({
let mut n = T::ONE;
n.div_assign_mod(T::ZERO);
});
}
fn div_mod_signed_fail_helper<T: PrimitiveSigned>() {
assert_panic!(T::MIN.div_mod(T::NEGATIVE_ONE));
assert_panic!({
let mut n = T::MIN;
n.div_assign_mod(T::NEGATIVE_ONE);
});
}
#[test]
pub fn div_mod_fail() {
apply_fn_to_primitive_ints!(div_mod_fail_helper);
apply_fn_to_signeds!(div_mod_signed_fail_helper);
}
fn div_mod_properties_helper_signed<T: PrimitiveSigned>() {
signed_pair_gen_var_4::<T>().test_properties(|(x, y)| {
let mut mut_x = x;
let r = mut_x.div_assign_mod(y);
let q = mut_x;
assert_eq!(x.div_mod(y), (q, r));
let (q_alt, r_alt) = (x.div_round(y, RoundingMode::Floor), x.mod_op(y));
assert_eq!(q_alt, q);
assert_eq!(r_alt, r);
assert!(r.lt_abs(&y));
assert!(r == T::ZERO || (r > T::ZERO) == (y > T::ZERO));
if let Some(product) = q.checked_mul(y) {
assert_eq!(product + r, x);
} else if q > T::ZERO {
assert_eq!((q - T::ONE) * y + r + y, x);
} else {
assert_eq!((q + T::ONE) * y + r - y, x);
}
if x != T::MIN {
let (neg_q, neg_r) = (-x).div_mod(y);
assert_eq!(x.ceiling_div_mod(y), (-neg_q, -neg_r));
}
if y != T::MIN && (x != T::MIN || y != T::ONE) {
let (neg_q, r) = x.div_mod(-y);
assert_eq!(x.ceiling_div_mod(y), (-neg_q, r));
}
});
signed_gen::<T>().test_properties(|x| {
let (q, r) = x.div_mod(T::ONE);
assert_eq!(q, x);
assert_eq!(r, T::ZERO);
if x != T::MIN {
let (q, r) = x.div_mod(T::NEGATIVE_ONE);
assert_eq!(q, -x);
assert_eq!(r, T::ZERO);
}
});
signed_gen_var_6::<T>().test_properties(|x| {
assert_eq!(x.div_mod(T::ONE), (x, T::ZERO));
assert_eq!(x.div_mod(x), (T::ONE, T::ZERO));
assert_eq!(T::ZERO.div_mod(x), (T::ZERO, T::ZERO));
if x != T::MIN {
assert_eq!(x.div_mod(T::NEGATIVE_ONE), (-x, T::ZERO));
assert_eq!(x.div_mod(-x), (T::NEGATIVE_ONE, T::ZERO));
}
if x > T::ONE {
assert_eq!(T::ONE.div_mod(x), (T::ZERO, T::ONE));
assert_eq!(T::NEGATIVE_ONE.div_mod(x), (T::NEGATIVE_ONE, x - T::ONE));
}
});
}
#[test]
fn div_mod_properties() {
apply_fn_to_unsigneds!(div_mod_and_div_rem_properties_helper_unsigned);
apply_fn_to_signeds!(div_mod_properties_helper_signed);
}
#[test]
fn test_div_rem_signed() {
fn test<T: PrimitiveSigned>(n: T, d: T, q: T, r: T) {
assert_eq!(n.div_rem(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.div_assign_rem(d), r);
assert_eq!(mut_n, q);
}
test::<i8>(0, 1, 0, 0);
test::<i16>(0, 123, 0, 0);
test::<i32>(1, 1, 1, 0);
test::<i64>(123, 1, 123, 0);
test::<i128>(123, 123, 1, 0);
test::<isize>(123, 456, 0, 123);
test::<i16>(456, 123, 3, 87);
test::<i64>(0xffffffff, 1, 0xffffffff, 0);
test::<i64>(0xffffffff, 0xffffffff, 1, 0);
test::<i64>(1000000000000, 1, 1000000000000, 0);
test::<i64>(1000000000000, 3, 333333333333, 1);
test::<i64>(1000000000000, 123, 8130081300, 100);
test::<i64>(1000000000000, 0xffffffff, 232, 3567587560);
test::<i128>(1000000000000000000000000, 1, 1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, 3, 333333333333333333333333, 1);
test::<i128>(1000000000000000000000000, 123, 8130081300813008130081, 37);
test::<i128>(
1000000000000000000000000,
0xffffffff,
232830643708079,
3167723695,
);
test::<i128>(
1000000000000000000000000,
1234567890987,
810000006723,
530068894399,
);
test::<i128>(
253640751230376270397812803167,
2669936877441,
94998781946290113,
1520301762334,
);
test::<i128>(3768477692975601, 11447376614057827956, 0, 3768477692975601);
test::<i64>(3356605361737854, 3081095617839357, 1, 275509743898497);
test::<i128>(
1098730198198174614195,
953382298040157850476,
1,
145347900158016763719,
);
test::<i128>(
69738658860594537152875081748,
69738658860594537152875081748,
1,
0,
);
test::<i128>(1000000000000000000000000, 1000000000000000000000000, 1, 0);
test::<i128>(0, 1000000000000000000000000, 0, 0);
test::<i128>(123, 1000000000000000000000000, 0, 123);
test::<i8>(0, -1, 0, 0);
test::<i16>(0, -123, 0, 0);
test::<i32>(1, -1, -1, 0);
test::<i64>(123, -1, -123, 0);
test::<i128>(123, -123, -1, 0);
test::<isize>(123, -456, 0, 123);
test::<i16>(456, -123, -3, 87);
test::<i64>(0xffffffff, -1, -0xffffffff, 0);
test::<i64>(0xffffffff, -0xffffffff, -1, 0);
test::<i64>(1000000000000, -1, -1000000000000, 0);
test::<i64>(1000000000000, -3, -333333333333, 1);
test::<i64>(1000000000000, -123, -8130081300, 100);
test::<i64>(1000000000000, -0xffffffff, -232, 3567587560);
test::<i128>(1000000000000000000000000, -1, -1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, -3, -333333333333333333333333, 1);
test::<i128>(1000000000000000000000000, -123, -8130081300813008130081, 37);
test::<i128>(
1000000000000000000000000,
-0xffffffff,
-232830643708079,
3167723695,
);
test::<i128>(
1000000000000000000000000,
-1234567890987,
-810000006723,
530068894399,
);
test::<i128>(
253640751230376270397812803167,
-2669936877441,
-94998781946290113,
1520301762334,
);
test::<i128>(3768477692975601, -11447376614057827956, 0, 3768477692975601);
test::<i64>(3356605361737854, -3081095617839357, -1, 275509743898497);
test::<i128>(
1098730198198174614195,
-953382298040157850476,
-1,
145347900158016763719,
);
test::<i128>(
69738658860594537152875081748,
-69738658860594537152875081748,
-1,
0,
);
test::<i128>(1000000000000000000000000, -1000000000000000000000000, -1, 0);
test::<i128>(0, -1000000000000000000000000, 0, 0);
test::<i128>(123, -1000000000000000000000000, 0, 123);
test::<i8>(-1, 1, -1, 0);
test::<i16>(-123, 1, -123, 0);
test::<i32>(-123, 123, -1, 0);
test::<i64>(-123, 456, 0, -123);
test::<isize>(-456, 123, -3, -87);
test::<i64>(-0xffffffff, 1, -0xffffffff, 0);
test::<i64>(-0xffffffff, 0xffffffff, -1, 0);
test::<i64>(-1000000000000, 1, -1000000000000, 0);
test::<i64>(-1000000000000, 3, -333333333333, -1);
test::<i64>(-1000000000000, 123, -8130081300, -100);
test::<i64>(-1000000000000, 0xffffffff, -232, -3567587560);
test::<i128>(-1000000000000000000000000, 1, -1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, 3, -333333333333333333333333, -1);
test::<i128>(
-1000000000000000000000000,
123,
-8130081300813008130081,
-37,
);
test::<i128>(
-1000000000000000000000000,
0xffffffff,
-232830643708079,
-3167723695,
);
test::<i128>(
-1000000000000000000000000,
1234567890987,
-810000006723,
-530068894399,
);
test::<i128>(
-253640751230376270397812803167,
2669936877441,
-94998781946290113,
-1520301762334,
);
test::<i128>(
-3768477692975601,
11447376614057827956,
0,
-3768477692975601,
);
test::<i64>(-3356605361737854, 3081095617839357, -1, -275509743898497);
test::<i128>(
-1098730198198174614195,
953382298040157850476,
-1,
-145347900158016763719,
);
test::<i128>(
-69738658860594537152875081748,
69738658860594537152875081748,
-1,
0,
);
test::<i128>(-1000000000000000000000000, 1000000000000000000000000, -1, 0);
test::<i128>(-123, 1000000000000000000000000, 0, -123);
test::<i8>(-1, -1, 1, 0);
test::<i16>(-123, -1, 123, 0);
test::<i32>(-123, -123, 1, 0);
test::<i64>(-123, -456, 0, -123);
test::<isize>(-456, -123, 3, -87);
test::<i64>(-0xffffffff, -1, 0xffffffff, 0);
test::<i64>(-0xffffffff, -0xffffffff, 1, 0);
test::<i64>(-1000000000000, -1, 1000000000000, 0);
test::<i64>(-1000000000000, -3, 333333333333, -1);
test::<i64>(-1000000000000, -123, 8130081300, -100);
test::<i64>(-1000000000000, -0xffffffff, 232, -3567587560);
test::<i128>(-1000000000000000000000000, -1, 1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, -3, 333333333333333333333333, -1);
test::<i128>(
-1000000000000000000000000,
-123,
8130081300813008130081,
-37,
);
test::<i128>(
-1000000000000000000000000,
-0xffffffff,
232830643708079,
-3167723695,
);
test::<i128>(
-1000000000000000000000000,
-1234567890987,
810000006723,
-530068894399,
);
test::<i128>(
-253640751230376270397812803167,
-2669936877441,
94998781946290113,
-1520301762334,
);
test::<i128>(
-3768477692975601,
-11447376614057827956,
0,
-3768477692975601,
);
test::<i64>(-3356605361737854, -3081095617839357, 1, -275509743898497);
test::<i128>(
-1098730198198174614195,
-953382298040157850476,
1,
-145347900158016763719,
);
test::<i128>(
-69738658860594537152875081748,
-69738658860594537152875081748,
1,
0,
);
test::<i128>(-1000000000000000000000000, -1000000000000000000000000, 1, 0);
test::<i128>(-123, -1000000000000000000000000, 0, -123);
}
fn div_rem_fail_helper<T: PrimitiveInt>() {
assert_panic!(T::ONE.div_rem(T::ZERO));
assert_panic!({
let mut n = T::ONE;
n.div_assign_rem(T::ZERO);
});
}
fn div_rem_signed_fail_helper<T: PrimitiveSigned>() {
assert_panic!(T::MIN.div_rem(T::NEGATIVE_ONE));
assert_panic!({
let mut n = T::MIN;
n.div_assign_rem(T::NEGATIVE_ONE);
});
}
#[test]
pub fn div_rem_fail() {
apply_fn_to_primitive_ints!(div_rem_fail_helper);
apply_fn_to_signeds!(div_rem_signed_fail_helper);
}
fn div_rem_properties_helper_signed<T: PrimitiveSigned>() {
signed_pair_gen_var_4::<T>().test_properties(|(x, y)| {
let mut mut_x = x;
let r = mut_x.div_assign_rem(y);
let q = mut_x;
assert_eq!(x.div_rem(y), (q, r));
assert_eq!((x / y, x % y), (q, r));
assert!(r.lt_abs(&y));
assert!(r == T::ZERO || (r > T::ZERO) == (x > T::ZERO));
assert_eq!(q * y + r, x);
if x != T::MIN {
assert_eq!((-x).div_rem(y), (-q, -r));
}
if y != T::MIN && (x != T::MIN || y != T::ONE) {
assert_eq!(x.div_rem(-y), (-q, r));
}
});
signed_gen::<T>().test_properties(|x| {
let (q, r) = x.div_rem(T::ONE);
assert_eq!(q, x);
assert_eq!(r, T::ZERO);
if x != T::MIN {
let (q, r) = x.div_rem(T::NEGATIVE_ONE);
assert_eq!(q, -x);
assert_eq!(r, T::ZERO);
}
});
signed_gen_var_6::<T>().test_properties(|x| {
assert_eq!(x.div_rem(T::ONE), (x, T::ZERO));
assert_eq!(x.div_rem(x), (T::ONE, T::ZERO));
assert_eq!(T::ZERO.div_rem(x), (T::ZERO, T::ZERO));
if x != T::MIN {
assert_eq!(x.div_rem(T::NEGATIVE_ONE), (-x, T::ZERO));
assert_eq!(x.div_rem(-x), (T::NEGATIVE_ONE, T::ZERO));
}
if x > T::ONE {
assert_eq!(T::ONE.div_rem(x), (T::ZERO, T::ONE));
assert_eq!(T::NEGATIVE_ONE.div_rem(x), (T::ZERO, T::NEGATIVE_ONE));
}
});
}
#[test]
fn div_rem_properties() {
apply_fn_to_signeds!(div_rem_properties_helper_signed);
}
#[test]
fn test_ceiling_div_neg_mod() {
fn test<T: PrimitiveUnsigned>(n: T, d: T, q: T, r: T) {
assert_eq!(n.ceiling_div_neg_mod(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.ceiling_div_assign_neg_mod(d), r);
assert_eq!(mut_n, q);
}
test::<u8>(0, 1, 0, 0);
test::<u16>(0, 123, 0, 0);
test::<u32>(1, 1, 1, 0);
test::<u64>(123, 1, 123, 0);
test::<u128>(123, 123, 1, 0);
test::<usize>(123, 456, 1, 333);
test::<u16>(456, 123, 4, 36);
test::<u64>(0xffffffff, 1, 0xffffffff, 0);
test::<u64>(0xffffffff, 0xffffffff, 1, 0);
test::<u64>(1000000000000, 1, 1000000000000, 0);
test::<u64>(1000000000000, 3, 333333333334, 2);
test::<u64>(1000000000000, 123, 8130081301, 23);
test::<u64>(1000000000000, 0xffffffff, 233, 727379735);
test::<u128>(1000000000000000000000000, 1, 1000000000000000000000000, 0);
test::<u128>(1000000000000000000000000, 3, 333333333333333333333334, 2);
test::<u128>(1000000000000000000000000, 123, 8130081300813008130082, 86);
test::<u128>(
1000000000000000000000000,
0xffffffff,
232830643708080,
1127243600,
);
test::<u128>(
1000000000000000000000000,
1234567890987,
810000006724,
704498996588,
);
test::<u128>(
253640751230376270397812803167,
2669936877441,
94998781946290114,
1149635115107,
);
test::<u64>(
3768477692975601,
11447376614057827956,
1,
11443608136364852355,
);
test::<u64>(3356605361737854, 3081095617839357, 2, 2805585873940860);
test::<u128>(
1098730198198174614195,
953382298040157850476,
2,
808034397882141086757,
);
test::<u128>(
69738658860594537152875081748,
69738658860594537152875081748,
1,
0,
);
test::<u128>(1000000000000000000000000, 1000000000000000000000000, 1, 0);
test::<u128>(0, 1000000000000000000000000, 0, 0);
test::<u128>(123, 1000000000000000000000000, 1, 999999999999999999999877);
}
fn ceiling_div_neg_mod_fail_helper<T: PrimitiveUnsigned>() {
assert_panic!(T::ONE.ceiling_div_neg_mod(T::ZERO));
assert_panic!({
let mut n = T::ONE;
n.ceiling_div_assign_neg_mod(T::ZERO);
});
}
#[test]
pub fn ceiling_div_neg_mod_fail() {
apply_fn_to_unsigneds!(ceiling_div_neg_mod_fail_helper);
}
fn ceiling_div_neg_mod_properties_helper<T: PrimitiveUnsigned>() {
unsigned_pair_gen_var_12::<T, T>().test_properties(|(x, y)| {
let mut mut_x = x;
let r = mut_x.ceiling_div_assign_neg_mod(y);
let q = mut_x;
assert_eq!(x.ceiling_div_neg_mod(y), (q, r));
let (q_alt, r_alt) = (x.div_round(y, RoundingMode::Ceiling), x.neg_mod(y));
assert_eq!(q_alt, q);
assert_eq!(r_alt, r);
assert!(r < y);
if let Some(product) = q.checked_mul(y) {
assert_eq!(product - r, x);
} else {
assert_eq!((q - T::ONE) * y - r + y, x);
}
});
unsigned_gen::<T>().test_properties(|x| {
assert_eq!(x.ceiling_div_neg_mod(T::ONE), (x, T::ZERO));
});
unsigned_gen_var_1::<T>().test_properties(|x| {
assert_eq!(x.ceiling_div_neg_mod(x), (T::ONE, T::ZERO));
assert_eq!(T::ZERO.ceiling_div_neg_mod(x), (T::ZERO, T::ZERO));
if x > T::ONE {
assert_eq!(T::ONE.ceiling_div_neg_mod(x), (T::ONE, x - T::ONE));
}
});
}
#[test]
fn ceiling_div_neg_mod_properties() {
apply_fn_to_unsigneds!(ceiling_div_neg_mod_properties_helper);
}
#[test]
fn test_ceiling_div_mod() {
fn test<T: PrimitiveSigned>(n: T, d: T, q: T, r: T) {
assert_eq!(n.ceiling_div_mod(d), (q, r));
let mut mut_n = n;
assert_eq!(mut_n.ceiling_div_assign_mod(d), r);
assert_eq!(mut_n, q);
}
test::<i8>(0, 1, 0, 0);
test::<i16>(0, 123, 0, 0);
test::<i32>(1, 1, 1, 0);
test::<i64>(123, 1, 123, 0);
test::<i128>(123, 123, 1, 0);
test::<isize>(123, 456, 1, -333);
test::<i16>(456, 123, 4, -36);
test::<i64>(0xffffffff, 1, 0xffffffff, 0);
test::<i64>(0xffffffff, 0xffffffff, 1, 0);
test::<i64>(1000000000000, 1, 1000000000000, 0);
test::<i64>(1000000000000, 3, 333333333334, -2);
test::<i64>(1000000000000, 123, 8130081301, -23);
test::<i64>(1000000000000, 0xffffffff, 233, -727379735);
test::<i128>(1000000000000000000000000, 1, 1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, 3, 333333333333333333333334, -2);
test::<i128>(1000000000000000000000000, 123, 8130081300813008130082, -86);
test::<i128>(
1000000000000000000000000,
0xffffffff,
232830643708080,
-1127243600,
);
test::<i128>(
1000000000000000000000000,
1234567890987,
810000006724,
-704498996588,
);
test::<i128>(
253640751230376270397812803167,
2669936877441,
94998781946290114,
-1149635115107,
);
test::<i128>(
3768477692975601,
11447376614057827956,
1,
-11443608136364852355,
);
test::<i64>(3356605361737854, 3081095617839357, 2, -2805585873940860);
test::<i128>(
1098730198198174614195,
953382298040157850476,
2,
-808034397882141086757,
);
test::<i128>(
69738658860594537152875081748,
69738658860594537152875081748,
1,
0,
);
test::<i128>(1000000000000000000000000, 1000000000000000000000000, 1, 0);
test::<i128>(0, 1000000000000000000000000, 0, 0);
test::<i128>(123, 1000000000000000000000000, 1, -999999999999999999999877);
test::<i8>(0, -1, 0, 0);
test::<i16>(0, -123, 0, 0);
test::<i32>(1, -1, -1, 0);
test::<i64>(123, -1, -123, 0);
test::<i128>(123, -123, -1, 0);
test::<isize>(123, -456, 0, 123);
test::<i16>(456, -123, -3, 87);
test::<i64>(0xffffffff, -1, -0xffffffff, 0);
test::<i64>(0xffffffff, -0xffffffff, -1, 0);
test::<i64>(1000000000000, -1, -1000000000000, 0);
test::<i64>(1000000000000, -3, -333333333333, 1);
test::<i64>(1000000000000, -123, -8130081300, 100);
test::<i64>(1000000000000, -0xffffffff, -232, 3567587560);
test::<i128>(1000000000000000000000000, -1, -1000000000000000000000000, 0);
test::<i128>(1000000000000000000000000, -3, -333333333333333333333333, 1);
test::<i128>(1000000000000000000000000, -123, -8130081300813008130081, 37);
test::<i128>(
1000000000000000000000000,
-0xffffffff,
-232830643708079,
3167723695,
);
test::<i128>(
1000000000000000000000000,
-1234567890987,
-810000006723,
530068894399,
);
test::<i128>(
253640751230376270397812803167,
-2669936877441,
-94998781946290113,
1520301762334,
);
test::<i128>(3768477692975601, -11447376614057827956, 0, 3768477692975601);
test::<i64>(3356605361737854, -3081095617839357, -1, 275509743898497);
test::<i128>(
1098730198198174614195,
-953382298040157850476,
-1,
145347900158016763719,
);
test::<i128>(
69738658860594537152875081748,
-69738658860594537152875081748,
-1,
0,
);
test::<i128>(1000000000000000000000000, -1000000000000000000000000, -1, 0);
test::<i128>(0, -1000000000000000000000000, 0, 0);
test::<i128>(123, -1000000000000000000000000, 0, 123);
test::<i8>(-1, 1, -1, 0);
test::<i16>(-123, 1, -123, 0);
test::<i32>(-123, 123, -1, 0);
test::<i64>(-123, 456, 0, -123);
test::<i128>(-456, 123, -3, -87);
test::<isize>(-0xffffffff, 1, -0xffffffff, 0);
test::<i64>(-0xffffffff, 0xffffffff, -1, 0);
test::<i64>(-1000000000000, 1, -1000000000000, 0);
test::<i64>(-1000000000000, 3, -333333333333, -1);
test::<i64>(-1000000000000, 123, -8130081300, -100);
test::<i64>(-1000000000000, 0xffffffff, -232, -3567587560);
test::<i128>(-1000000000000000000000000, 1, -1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, 3, -333333333333333333333333, -1);
test::<i128>(
-1000000000000000000000000,
123,
-8130081300813008130081,
-37,
);
test::<i128>(
-1000000000000000000000000,
0xffffffff,
-232830643708079,
-3167723695,
);
test::<i128>(
-1000000000000000000000000,
1234567890987,
-810000006723,
-530068894399,
);
test::<i128>(
-253640751230376270397812803167,
2669936877441,
-94998781946290113,
-1520301762334,
);
test::<i128>(
-3768477692975601,
11447376614057827956,
0,
-3768477692975601,
);
test::<i64>(-3356605361737854, 3081095617839357, -1, -275509743898497);
test::<i128>(
-1098730198198174614195,
953382298040157850476,
-1,
-145347900158016763719,
);
test::<i128>(
-69738658860594537152875081748,
69738658860594537152875081748,
-1,
0,
);
test::<i128>(-1000000000000000000000000, 1000000000000000000000000, -1, 0);
test::<i128>(0, 1000000000000000000000000, 0, 0);
test::<i128>(-123, 1000000000000000000000000, 0, -123);
test::<i8>(-1, -1, 1, 0);
test::<i16>(-123, -1, 123, 0);
test::<i32>(-123, -123, 1, 0);
test::<i64>(-123, -456, 1, 333);
test::<i128>(-456, -123, 4, 36);
test::<isize>(-0xffffffff, -1, 0xffffffff, 0);
test::<i64>(-0xffffffff, -0xffffffff, 1, 0);
test::<i64>(-1000000000000, -1, 1000000000000, 0);
test::<i64>(-1000000000000, -3, 333333333334, 2);
test::<i64>(-1000000000000, -123, 8130081301, 23);
test::<i64>(-1000000000000, -0xffffffff, 233, 727379735);
test::<i128>(-1000000000000000000000000, -1, 1000000000000000000000000, 0);
test::<i128>(-1000000000000000000000000, -3, 333333333333333333333334, 2);
test::<i128>(-1000000000000000000000000, -123, 8130081300813008130082, 86);
test::<i128>(
-1000000000000000000000000,
-0xffffffff,
232830643708080,
1127243600,
);
test::<i128>(
-1000000000000000000000000,
-1234567890987,
810000006724,
704498996588,
);
test::<i128>(
-253640751230376270397812803167,
-2669936877441,
94998781946290114,
1149635115107,
);
test::<i128>(
-3768477692975601,
-11447376614057827956,
1,
11443608136364852355,
);
test::<i64>(-3356605361737854, -3081095617839357, 2, 2805585873940860);
test::<i128>(
-1098730198198174614195,
-953382298040157850476,
2,
808034397882141086757,
);
test::<i128>(
-69738658860594537152875081748,
-69738658860594537152875081748,
1,
0,
);
test::<i128>(-1000000000000000000000000, -1000000000000000000000000, 1, 0);
test::<i128>(0, -1000000000000000000000000, 0, 0);
test::<i128>(
-123,
-1000000000000000000000000,
1,
999999999999999999999877,
);
}
fn ceiling_div_mod_fail_helper<T: PrimitiveSigned>() {
assert_panic!(T::ONE.ceiling_div_mod(T::ZERO));
assert_panic!({
let mut n = T::ONE;
n.ceiling_div_assign_mod(T::ZERO);
});
assert_panic!(T::MIN.ceiling_div_mod(T::NEGATIVE_ONE));
assert_panic!({
let mut n = T::MIN;
n.ceiling_div_assign_mod(T::NEGATIVE_ONE);
});
}
#[test]
pub fn ceiling_div_mod_fail() {
apply_fn_to_signeds!(ceiling_div_mod_fail_helper);
}
fn ceiling_div_mod_properties_helper<T: PrimitiveSigned>() {
signed_pair_gen_var_4::<T>().test_properties(|(x, y)| {
let mut mut_x = x;
let r = mut_x.ceiling_div_assign_mod(y);
let q = mut_x;
assert_eq!(x.ceiling_div_mod(y), (q, r));
let (q_alt, r_alt) = (x.div_round(y, RoundingMode::Ceiling), x.ceiling_mod(y));
assert_eq!(q_alt, q);
assert_eq!(r_alt, r);
assert!(r.lt_abs(&y));
assert!(r == T::ZERO || (r > T::ZERO) != (y > T::ZERO));
if let Some(product) = q.checked_mul(y) {
assert_eq!(product + r, x);
} else if q > T::ZERO {
assert_eq!((q - T::ONE) * y + r + y, x);
} else {
assert_eq!((q + T::ONE) * y + r - y, x);
}
if x != T::MIN {
let (neg_q, neg_r) = (-x).ceiling_div_mod(y);
assert_eq!(x.div_mod(y), (-neg_q, -neg_r));
}
if y != T::MIN && (x != T::MIN || y != T::ONE) {
let (neg_q, r) = x.ceiling_div_mod(-y);
assert_eq!(x.div_mod(y), (-neg_q, r));
}
});
signed_gen::<T>().test_properties(|x| {
let (q, r) = x.ceiling_div_mod(T::ONE);
assert_eq!(q, x);
assert_eq!(r, T::ZERO);
if x != T::MIN {
let (q, r) = x.ceiling_div_mod(T::NEGATIVE_ONE);
assert_eq!(q, -x);
assert_eq!(r, T::ZERO);
}
});
signed_gen_var_6::<T>().test_properties(|x| {
assert_eq!(x.ceiling_div_mod(T::ONE), (x, T::ZERO));
if x != T::MIN {
assert_eq!(x.ceiling_div_mod(T::NEGATIVE_ONE), (-x, T::ZERO));
}
assert_eq!(x.ceiling_div_mod(x), (T::ONE, T::ZERO));<|fim▁hole|> if x != T::MIN {
assert_eq!(x.ceiling_div_mod(-x), (T::NEGATIVE_ONE, T::ZERO));
}
assert_eq!(T::ZERO.ceiling_div_mod(x), (T::ZERO, T::ZERO));
});
}
#[test]
fn ceiling_div_mod_properties() {
apply_fn_to_signeds!(ceiling_div_mod_properties_helper);
}<|fim▁end|>
| |
<|file_name|>payments.rs<|end_file_name|><|fim▁begin|>extern crate futures;
extern crate indy_sys;
use indy::{IndyError, ErrorCode};
use indy::payments;
use self::futures::Future;
use self::indy_sys::payments as payments_sys;
use std::collections::VecDeque;
use std::ffi::CString;
use super::libc::c_char;
use std::sync::{Once, Mutex};
use indy::{WalletHandle, CommandHandle};
use crate::utils::callback;
#[macro_export]
macro_rules! mocked_handler {
($first_param_name: ident: $first_param_type: ty $(, $param_name: ident: $param_type: ty)*) => (
use super::*;
lazy_static! {
static ref INJECTIONS: Mutex<VecDeque<(i32, CString)>> = Default::default();
}
pub extern fn handle(cmd_handle: CommandHandle,<|fim▁hole|> let cb = cb.unwrap_or_else(|| {
panic!("Null passed as callback!")
});
if let Ok(mut injections) = INJECTIONS.lock() {
if let Some((err, res)) = injections.pop_front() {
return (cb)(cmd_handle, err, res.as_ptr());
}
} else {
panic!("Can't lock injections mutex");
}
panic!("No injections left!");
}
pub fn inject_mock(err: ErrorCode, res: &str) {
if let Ok(mut injections) = INJECTIONS.lock() {
let res = CString::new(res).unwrap();
injections.push_back((err as i32, res))
} else {
panic!("Can't lock injections mutex");
}
}
pub fn clear_mocks() {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.clear();
} else {
panic!("Can't lock injections mutex");
}
}
)
}
macro_rules! mocked_handler_slice {
($first_param_name: ident: $first_param_type: ty $(, $param_name: ident: $param_type: ty)*) => (
use super::*;
lazy_static! {
static ref INJECTIONS: Mutex<VecDeque<(i32, Vec<u8>)>> = Default::default();
}
pub extern fn handle(cmd_handle: CommandHandle,
$first_param_name: $first_param_type,
$($param_name: $param_type,)*
cb: Option<extern fn(command_handle_: CommandHandle, err_: i32, raw: *const u8, len: u32)>) -> i32 {
let cb = cb.unwrap_or_else(|| {
panic!("Null passed as callback!")
});
if let Ok(mut injections) = INJECTIONS.lock() {
if let Some((err, r)) = injections.pop_front() {
(cb)(cmd_handle, err, r.as_slice().as_ptr() as *const u8, r.len() as u32);
return err;
}
} else {
panic!("Can't lock injections mutex");
}
panic!("No injections left!");
}
pub fn inject_mock(err: ErrorCode, r: Vec<u8>) {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.push_back((err as i32, r))
} else {
panic!("Can't lock injections mutex");
}
}
pub fn clear_mocks() {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.clear();
} else {
panic!("Can't lock injections mutex");
}
}
)
}
macro_rules! mocked_handler_bool {
($first_param_name: ident: $first_param_type: ty $(, $param_name: ident: $param_type: ty)*) => (
use super::*;
lazy_static! {
static ref INJECTIONS: Mutex<VecDeque<(i32, bool)>> = Default::default();
}
pub extern fn handle(cmd_handle: CommandHandle,
$first_param_name: $first_param_type,
$($param_name: $param_type,)*
cb: Option<extern fn(command_handle_: CommandHandle, err_: i32, valid: bool)>) -> i32 {
let cb = cb.unwrap_or_else(|| {
panic!("Null passed as callback!")
});
if let Ok(mut injections) = INJECTIONS.lock() {
if let Some((err, res)) = injections.pop_front() {
(cb)(cmd_handle, err, res);
return err;
}
} else {
panic!("Can't lock injections mutex");
}
panic!("No injections left!");
}
pub fn inject_mock(err: ErrorCode, r: bool) {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.push_back((err as i32, r))
} else {
panic!("Can't lock injections mutex");
}
}
pub fn clear_mocks() {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.clear();
} else {
panic!("Can't lock injections mutex");
}
}
)
}
type IndyPaymentCallback = extern fn(command_handle_: CommandHandle,
err: i32,
payment_address: *const c_char) -> i32;
type ParsePaymentSourcesCallback = extern fn(command_handle_: CommandHandle,
err: i32,
payment_address: *const c_char,
next: i64) -> i32;
lazy_static! {
static ref CREATE_PAYMENT_METHOD_INIT: Once = Once::new();
}
pub mod mock_method {
use super::*;
pub fn init() {
CREATE_PAYMENT_METHOD_INIT.call_once(|| {
let (receiver, cmd_handle, cb) = callback::_closure_to_cb_ec();
let payment_method_name = CString::new("null").unwrap();
unsafe {
payments_sys::indy_register_payment_method(cmd_handle,
payment_method_name.as_ptr(),
Some(create_payment_address::handle),
Some(add_request_fees::handle),
Some(parse_response_with_fees::handle),
Some(build_get_payment_sources_request::handle),
Some(parse_get_payment_sources_response::handle),
Some(build_payment_req::handle),
Some(parse_payment_response::handle),
Some(build_mint_req::handle),
Some(build_set_txn_fees_req::handle),
Some(build_get_txn_fees_req::handle),
Some(parse_get_txn_fees_response::handle),
Some(build_verify_payment_req::handle),
Some(parse_verify_payment_response::handle),
Some(sign_with_address::handle),
Some(verify_with_address::handle),
cb,
);
}
receiver.recv().unwrap();
});
}
pub mod create_payment_address {
mocked_handler!(_wallet_handle: WalletHandle, _config: *const c_char);
}
pub mod add_request_fees {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _req_json: *const c_char, _inputs_json: *const c_char, _outputs_json: *const c_char, _extra: *const c_char);
}
pub mod parse_response_with_fees {
mocked_handler!(_resp_json: *const c_char);
}
pub mod build_get_payment_sources_request {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _payment_address: *const c_char, _from: i64);
}
pub mod parse_get_payment_sources_response {
use super::*;
lazy_static! {
static ref INJECTIONS: Mutex<VecDeque<(i32, CString, i64)>> = Default::default();
}
pub extern fn handle(cmd_handle: CommandHandle,
_response: *const c_char,
cb: Option<ParsePaymentSourcesCallback>) -> i32 {
let cb = cb.unwrap_or_else(|| {
panic!("Null passed as callback!")
});
if let Ok(mut injections) = INJECTIONS.lock() {
if let Some((err, res, num)) = injections.pop_front() {
return (cb)(cmd_handle, err, res.as_ptr(), num);
}
} else {
panic!("Can't lock injections mutex");
}
panic!("No injections left!");
}
pub fn inject_mock(err: ErrorCode, res: &str, num: i64) {
if let Ok(mut injections) = INJECTIONS.lock() {
let res = CString::new(res).unwrap();
injections.push_back((err as i32, res, num))
} else {
panic!("Can't lock injections mutex");
}
}
pub fn clear_mocks() {
if let Ok(mut injections) = INJECTIONS.lock() {
injections.clear();
} else {
panic!("Can't lock injections mutex");
}
}
}
pub mod build_payment_req {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _inputs_json: *const c_char, _outputs_json: *const c_char, _extra: *const c_char);
}
pub mod parse_payment_response {
mocked_handler!(_resp_json: *const c_char);
}
pub mod build_mint_req {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _outputs_json: *const c_char, _extra: *const c_char);
}
pub mod build_set_txn_fees_req {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _fees_json: *const c_char);
}
pub mod build_get_txn_fees_req {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char);
}
pub mod parse_get_txn_fees_response {
mocked_handler!(_resp_json: *const c_char);
}
pub mod build_verify_payment_req {
mocked_handler!(_wallet_handle: WalletHandle, _submitter_did: *const c_char, _receipt: *const c_char);
}
pub mod parse_verify_payment_response {
mocked_handler!(_resp_json: *const c_char);
}
pub mod sign_with_address {
mocked_handler_slice!(_wallet_handle: WalletHandle, _address: *const c_char, _message_raw: *const u8, _message_len: u32);
}
pub mod verify_with_address {
mocked_handler_bool!(_address: *const c_char, _message_raw: *const u8, _message_len: u32, _signature: *const u8, _signature_len: u32);
}
}
pub fn register_payment_method(payment_method_name: &str,
create_payment_address: Option<payments_sys::CreatePaymentAddressCB>,
add_request_fees: Option<payments_sys::AddRequestFeesCB>,
parse_response_with_fees: Option<payments_sys::ParseResponseWithFeesCB>,
build_get_payment_sources_request: Option<payments_sys::BuildGetPaymentSourcesRequestCB>,
parse_get_payment_sources_response: Option<payments_sys::ParseGetPaymentSourcesResponseCB>,
build_payment_req: Option<payments_sys::BuildPaymentReqCB>,
parse_payment_response: Option<payments_sys::ParsePaymentResponseCB>,
build_mint_req: Option<payments_sys::BuildMintReqCB>,
build_set_txn_fees_req: Option<payments_sys::BuildSetTxnFeesReqCB>,
build_get_txn_fees_req: Option<payments_sys::BuildGetTxnFeesReqCB>,
parse_get_txn_fees_response: Option<payments_sys::ParseGetTxnFeesResponseCB>,
build_verify_payment_req: Option<payments_sys::BuildVerifyPaymentReqCB>,
parse_verify_payment_response: Option<payments_sys::ParseVerifyPaymentResponseCB>,
sign_with_address: Option<payments_sys::SignWithAddressCB>,
verify_with_address: Option<payments_sys::VerifyWithAddressCB>
) -> Result<(), ErrorCode> {
let (receiver, cmd_handle, cb) = callback::_closure_to_cb_ec();
let payment_method_name = CString::new(payment_method_name).unwrap();
let err = unsafe {
payments_sys::indy_register_payment_method(cmd_handle,
payment_method_name.as_ptr(),
create_payment_address,
add_request_fees,
parse_response_with_fees,
build_get_payment_sources_request,
parse_get_payment_sources_response,
build_payment_req,
parse_payment_response,
build_mint_req,
build_set_txn_fees_req,
build_get_txn_fees_req,
parse_get_txn_fees_response,
build_verify_payment_req,
parse_verify_payment_response,
sign_with_address,
verify_with_address,
cb,
)
};
super::results::result_to_empty(err, receiver)
}
pub fn create_payment_address(wallet_handle: WalletHandle, config: &str, payment_method: &str) -> Result<String, IndyError> {
payments::create_payment_address(wallet_handle, payment_method, config).wait()
}
pub fn list_payment_addresses(wallet_handle: WalletHandle) -> Result<String, IndyError> {
payments::list_payment_addresses(wallet_handle).wait()
}
pub fn add_request_fees(wallet_handle: WalletHandle, submitter_did: Option<&str>, req_json: &str, inputs_json: &str, outputs_json: &str, extra: Option<&str>) -> Result<(String, String), IndyError> {
payments::add_request_fees(wallet_handle, submitter_did, req_json, inputs_json, outputs_json, extra).wait()
}
#[allow(deprecated)]
pub fn build_get_payment_sources_request(wallet_handle: WalletHandle, submitter_did: Option<&str>, payment_address: &str) -> Result<(String, String), IndyError> {
payments::build_get_payment_sources_request(wallet_handle, submitter_did, payment_address).wait()
}
pub fn build_get_payment_sources_with_from_request(wallet_handle: WalletHandle, submitter_did: Option<&str>, payment_address: &str, from: Option<i64>) -> Result<(String, String), IndyError> {
payments::build_get_payment_sources_with_from_request(wallet_handle, submitter_did, payment_address, from).wait()
}
pub fn build_payment_req(wallet_handle: WalletHandle, submitter_did: Option<&str>, inputs_json: &str, outputs_json: &str, extra: Option<&str>) -> Result<(String, String), IndyError> {
payments::build_payment_req(wallet_handle, submitter_did, inputs_json, outputs_json, extra).wait()
}
pub fn parse_response_with_fees(payment_method: &str, resp_json: &str) -> Result<String, IndyError> {
payments::parse_response_with_fees(payment_method, resp_json).wait()
}
#[allow(deprecated)]
pub fn parse_get_payment_sources_response(payment_method: &str, resp_json: &str) -> Result<String, IndyError> {
payments::parse_get_payment_sources_response(payment_method, resp_json).wait()
}
pub fn parse_get_payment_sources_with_from_response(payment_method: &str, resp_json: &str) -> Result<(String, Option<i64>), IndyError> {
payments::parse_get_payment_sources_with_from_response(payment_method, resp_json).wait()
}
pub fn parse_payment_response(payment_method: &str, resp_json: &str) -> Result<String, IndyError> {
payments::parse_payment_response(payment_method, resp_json).wait()
}
pub fn prepare_extra_with_acceptance_data(extra: Option<&str>,
text: Option<&str>,
version: Option<&str>,
taa_digest: Option<&str>,
acc_mech_type: &str,
time_of_acceptance: u64) -> Result<String, IndyError> {
payments::prepare_extra_with_acceptance_data(extra, text, version, taa_digest, acc_mech_type, time_of_acceptance).wait()
}
pub fn build_mint_req(wallet_handle: WalletHandle, submitter_did: Option<&str>, outputs_json: &str, extra: Option<&str>) -> Result<(String, String), IndyError> {
payments::build_mint_req(wallet_handle, submitter_did, outputs_json, extra).wait()
}
pub fn build_set_txn_fees_req(wallet_handle: WalletHandle, submitter_did: Option<&str>, payment_method: &str, fees_json: &str) -> Result<String, IndyError> {
payments::build_set_txn_fees_req(wallet_handle, submitter_did, payment_method, fees_json).wait()
}
pub fn build_get_txn_fees_req(wallet_handle: WalletHandle, submitter_did: Option<&str>, payment_method: &str) -> Result<String, IndyError> {
payments::build_get_txn_fees_req(wallet_handle, submitter_did, payment_method).wait()
}
pub fn parse_get_txn_fees_response(payment_method: &str, resp_json: &str) -> Result<String, IndyError> {
payments::parse_get_txn_fees_response(payment_method, resp_json).wait()
}
pub fn build_verify_payment_req(wallet_handle: WalletHandle, submitter_did: Option<&str>, receipt: &str) -> Result<(String, String), IndyError> {
payments::build_verify_payment_req(wallet_handle, submitter_did, receipt).wait()
}
pub fn parse_verify_payment_response(payment_method: &str, resp_json: &str) -> Result<String, IndyError> {
payments::parse_verify_payment_response(payment_method, resp_json).wait()
}
pub fn get_request_info(get_auth_rule_resp_json: &str, requester_info_json: &str, fees_json: &str) -> Result<String, IndyError> {
payments::get_request_info(get_auth_rule_resp_json, requester_info_json, fees_json).wait()
}
pub fn sign_with_address(wallet_handle: WalletHandle, address: &str, message: &[u8]) -> Result<Vec<u8>, IndyError> {
payments::sign_with_address(wallet_handle, address, message).wait()
}
pub fn verify_with_address(address: &str, message: &[u8], signature: &[u8]) -> Result<bool, IndyError> {
payments::verify_with_address(address, message, signature).wait()
}<|fim▁end|>
|
$first_param_name: $first_param_type,
$($param_name: $param_type,)*
cb: Option<IndyPaymentCallback>) -> i32 {
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/<|fim▁hole|>mod error_reporting;
mod lsp;
mod server;
use lsp_server::Connection;
use std::error::Error;
use env_logger::Env;
use log::info;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
env_logger::from_env(Env::default().default_filter_or("info, warn, error, debug")).init();
let (connection, io_handles) = Connection::stdio();
info!("Initialized stdio transport layer");
let params = server::initialize(&connection)?;
info!("JSON-RPC handshake completed");
server::run(connection, params).await?;
io_handles.join()?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::client;
use super::server;
use lsp_server::Connection;
use lsp_types::{ClientCapabilities, InitializeParams};
use std::error::Error;
#[test]
fn initialize() -> Result<(), Box<dyn Error + Sync + Send>> {
// Test with an in-memory connection pair
let (connection, client) = Connection::memory();
// Mock set of client parameters. The `root_path` field is deprecated, but
// still required to construct the params, so we allow deprecated fields here.
#[allow(deprecated)]
let init_params = InitializeParams {
process_id: Some(1),
root_path: None,
root_uri: None,
initialization_options: None,
capabilities: ClientCapabilities::default(),
trace: None,
workspace_folders: None,
client_info: None,
};
client::initialize(&client, &init_params, 0);
let params = server::initialize(&connection)?;
assert_eq!(params, init_params);
Ok(())
}
}<|fim▁end|>
|
#![warn(clippy::all)]
mod client;
|
<|file_name|>nulldummy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment<|fim▁hole|>import time
NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
class NULLDUMMYTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
self.setup_clean_chain = True
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
extra_args=[['-whitelist=127.0.0.1', '-walletprematurewitness']])
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address)
NetworkThread().start() # Start up network handling in another thread
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
self.block_submit(self.nodes[0], test1txs, False, True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
trueDummy(test2tx)
assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
self.log.info ("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
print ("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
for i in test6txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
self.block_submit(self.nodes[0], test6txs, True, True)
def create_transaction(self, node, txid, to_address, amount):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()<|fim▁end|>
|
from test_framework.script import CScript
from io import BytesIO
|
<|file_name|>test_parse_segv.py<|end_file_name|><|fim▁begin|>import unittest, tempfile, sys, os.path
datadir = os.environ.get('APPORT_DATA_DIR', '/usr/share/apport')
sys.path.insert(0, os.path.join(datadir, 'general-hooks'))
import parse_segv
# Default global registers, maps, and disassembly for testing
regs = '''eax 0xffffffff -1
ecx 0xbfc6af40 -1077498048
edx 0x1 1
ebx 0x26eff4 2551796
esp 0xbfc6af24 0xbfc6af24
ebp 0xbfc6af28 0xbfc6af28
esi 0x826bb60 136756064
edi 0x8083480 134755456
eip 0x808354e 0x808354e <main+14>
eflags 0x200286 [ PF SF IF ID ]
cs 0x73 115
ss 0x7b 123
ds 0x7b 123
es 0x7b 123
fs 0x4 4
gs 0x33 51
'''
regs64 = '''rax 0xffffffffffffffff -1
rbx 0x26eff4 2551796
rcx 0xffffffffffffffff -1
rdx 0xffffffffff600180 -10485376
rsi 0x0 0
rdi 0x7fffffffe3b0 140737488348080<|fim▁hole|>rsp 0x0000bfc6af24 0x0000bfc6af24
r8 0x0 0
r9 0x0 0
r10 0x7fffffffe140 140737488347456
r11 0x246 582
r12 0x7fffffffe400 140737488348160
r13 0x7fffffffe468 140737488348264
r14 0x1 1
r15 0x7fffffffe460 140737488348256
rip 0x7ffff790be10 0x7ffff790be10 <nanosleep+16>
eflags 0x246 [ PF ZF IF ]
cs 0x33 51
ss 0x2b 43
ds 0x0 0
es 0x0 0
fs 0x0 0
gs 0x0 0
fctrl 0x37f 895
fstat 0x0 0
ftag 0xffff 65535
fiseg 0x0 0
fioff 0x40303a 4206650
foseg 0x0 0
fooff 0x0 0
fop 0x5d8 1496
mxcsr 0x1f80 [ IM DM ZM OM UM PM ]
'''
maps = '''00110000-0026c000 r-xp 00000000 08:06 375131 /lib/tls/i686/cmov/libc-2.9.so
0026c000-0026d000 ---p 0015c000 08:06 375131 /lib/tls/i686/cmov/libc-2.9.so
0026d000-0026f000 r--p 0015c000 08:06 375131 /lib/tls/i686/cmov/libc-2.9.so
0026f000-00270000 rw-p 0015e000 08:06 375131 /lib/tls/i686/cmov/libc-2.9.so
00270000-00273000 rw-p 00000000 00:00 0
002c1000-002e5000 r-xp 00000000 08:06 375135 /lib/tls/i686/cmov/libm-2.9.so
002e5000-002e6000 r--p 00023000 08:06 375135 /lib/tls/i686/cmov/libm-2.9.so
002e6000-002e7000 rw-p 00024000 08:06 375135 /lib/tls/i686/cmov/libm-2.9.so
00318000-00334000 r-xp 00000000 08:06 977846 /lib/ld-2.9.so
00334000-00335000 r--p 0001b000 08:06 977846 /lib/ld-2.9.so
00335000-00336000 rw-p 0001c000 08:06 977846 /lib/ld-2.9.so
0056e000-005a1000 r-xp 00000000 08:06 65575 /lib/libncurses.so.5.7
005a1000-005a3000 r--p 00033000 08:06 65575 /lib/libncurses.so.5.7
005a3000-005a4000 rw-p 00035000 08:06 65575 /lib/libncurses.so.5.7
00b67000-00b68000 r-xp 00000000 00:00 0 [vdso]
00bb6000-00bcb000 r-xp 00000000 08:06 375202 /lib/tls/i686/cmov/libpthread-2.9.so
00bcb000-00bcc000 r--p 00014000 08:06 375202 /lib/tls/i686/cmov/libpthread-2.9.so
00bcc000-00bcd000 rw-p 00015000 08:06 375202 /lib/tls/i686/cmov/libpthread-2.9.so
00bcd000-00bcf000 rw-p 00000000 00:00 0
00beb000-00bed000 r-xp 00000000 08:06 375134 /lib/tls/i686/cmov/libdl-2.9.so
00bed000-00bee000 r--p 00001000 08:06 375134 /lib/tls/i686/cmov/libdl-2.9.so
00bee000-00bef000 rw-p 00002000 08:06 375134 /lib/tls/i686/cmov/libdl-2.9.so
00c56000-00c7a000 r-xp 00000000 08:06 1140420 /usr/lib/libexpat.so.1.5.2
00c7a000-00c7c000 r--p 00023000 08:06 1140420 /usr/lib/libexpat.so.1.5.2
00c7c000-00c7d000 rw-p 00025000 08:06 1140420 /usr/lib/libexpat.so.1.5.2
00dce000-00dfa000 r-xp 00000000 08:06 65612 /lib/libreadline.so.5.2
00dfa000-00dfb000 ---p 0002c000 08:06 65612 /lib/libreadline.so.5.2
00dfb000-00dfc000 r--p 0002c000 08:06 65612 /lib/libreadline.so.5.2
00dfc000-00dff000 rw-p 0002d000 08:06 65612 /lib/libreadline.so.5.2
00dff000-00e00000 rw-p 00000000 00:00 0
08048000-0831c000 r-xp 00000000 08:06 1140349 /usr/bin/gdb
0831c000-0831d000 r--p 002d3000 08:06 1140349 /usr/bin/gdb
0831d000-08325000 rw-p 002d4000 08:06 1140349 /usr/bin/gdb
08325000-0833f000 rw-p 00000000 00:00 0
b8077000-b807a000 rw-p 00000000 00:00 0
b8096000-b8098000 rw-p 00000000 00:00 0
bfc57000-bfc6c000 rw-p 00000000 00:00 0 [stack]
'''
disasm = '''0x08083540 <main+0>: lea 0x4(%esp),%ecx
0x08083544 <main+4>: and $0xfffffff0,%esp
0x08083547 <main+7>: pushl -0x4(%ecx)
0x0808354a <main+10>: push %ebp
0x0808354b <main+11>: mov %esp,%ebp
0x0808354d <main+13>: push %ecx
0x0808354e <main+14>: sub $0x14,%esp
0x08083551 <main+17>: mov (%ecx),%eax
0x08083553 <main+19>: mov 0x4(%ecx),%edx
0x08083556 <main+22>: lea -0x14(%ebp),%ecx
0x08083559 <main+25>: movl $0x0,-0xc(%ebp)
0x08083560 <main+32>: movl $0x826bc68,-0x8(%ebp)
0x08083567 <main+39>: mov %eax,-0x14(%ebp)
0x0808356a <main+42>: mov %edx,-0x10(%ebp)
0x0808356d <main+45>: mov %ecx,(%esp)
0x08083570 <main+48>: call 0x8083580 <gdb_main>
0x08083575 <main+53>: add $0x14,%esp
0x08083578 <main+56>: pop %ecx
0x08083579 <main+57>: pop %ebp
0x0808357a <main+58>: lea -0x4(%ecx),%esp
0x0808357d <main+61>: ret
'''
class T(unittest.TestCase):
'''Test Segfault Parser'''
def test_invalid_00_registers(self):
'''Require valid registers'''
regs = 'a 0x10\nb !!!\n'
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, '', '')
try:
segv = parse_segv.ParseSegv(regs, '', '')
except ValueError as e:
self.assertTrue('invalid literal for int()' in str(e), str(e))
regs = 'a 0x10'
disasm = '0x08083540 <main+0>: lea 0x4(%esp),%ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.regs['a'], 0x10, segv)
segv.regs = None
self.assertRaises(ValueError, segv.parse_disassembly, '')
def test_invalid_01_disassembly(self):
'''Require valid disassembly'''
regs = 'a 0x10'
disasm = ''
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, disasm, '')
disasm = 'Dump ...'
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, disasm, '')
disasm = 'Dump ...\nmonkey'
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, disasm, '')
disasm = 'monkey'
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, disasm, '')
disasm = '0x1111111111: Cannot access memory at address 0x1111111111\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x1111111111, segv.pc)
self.assertEqual(segv.insn, None, segv.insn)
self.assertEqual(segv.src, None, segv.src)
self.assertEqual(segv.dest, None, segv.dest)
disasm = '0x2111111111: \n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x2111111111, segv.pc)
self.assertEqual(segv.insn, None, segv.insn)
self.assertEqual(segv.src, None, segv.src)
self.assertEqual(segv.dest, None, segv.dest)
disasm = '0x8069ff0 <fopen@plt+132220>: cmpb $0x0,(%eax,%ebx,1)\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x8069ff0, segv.pc)
self.assertEqual(segv.insn, 'cmpb', segv.insn)
self.assertEqual(segv.src, '$0x0', segv.src)
self.assertEqual(segv.dest, '(%eax,%ebx,1)', segv.dest)
disasm = '0xb765bb48 <_XSend+440>: call *0x40(%edi)\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0xb765bb48, segv.pc)
self.assertEqual(segv.insn, 'call', segv.insn)
self.assertEqual(segv.src, '*0x40(%edi)', segv.src)
self.assertEqual(segv.dest, None, segv.dest)
disasm = '0xb7aae5a0: call 0xb7a805af <_Unwind_Find_FDE@plt+111>\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0xb7aae5a0, segv.pc)
self.assertEqual(segv.insn, 'call', segv.insn)
self.assertEqual(segv.src, '0xb7a805af', segv.src)
self.assertEqual(segv.dest, None, segv.dest)
disasm = '0x09083540: mov 0x4(%esp),%es:%ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x09083540, segv.pc)
self.assertEqual(segv.insn, 'mov', segv.insn)
self.assertEqual(segv.src, '0x4(%esp)', segv.src)
self.assertEqual(segv.dest, '%es:%ecx', segv.dest)
disasm = '0x08083540 <main+0>: lea 0x4(%esp),%ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x08083540, segv.pc)
self.assertEqual(segv.insn, 'lea', segv.insn)
self.assertEqual(segv.src, '0x4(%esp)', segv.src)
self.assertEqual(segv.dest, '%ecx', segv.dest)
disasm = '''0x404127 <exo_mount_hal_device_mount+167>:
repz cmpsb %es:(%rdi),%ds:(%rsi)\n'''
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x0404127, segv.pc)
self.assertEqual(segv.insn, 'repz cmpsb', segv.insn)
self.assertEqual(segv.src, '%es:(%rdi)', segv.src)
self.assertEqual(segv.dest, '%ds:(%rsi)', segv.dest)
disasm = '0xb031765a <hufftab16+570>: add 0x3430433,%eax'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0xb031765a, segv.pc)
self.assertEqual(segv.insn, 'add', segv.insn)
self.assertEqual(segv.src, '0x3430433', segv.src)
self.assertEqual(segv.dest, '%eax', segv.dest)
disasm = 'Dump ...\n0x08083540 <main+0>: lea 0x4(%esp),%ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x08083540, segv.pc)
self.assertEqual(segv.insn, 'lea', segv.insn)
self.assertEqual(segv.src, '0x4(%esp)', segv.src)
self.assertEqual(segv.dest, '%ecx', segv.dest)
disasm = '0x08083550 <main+0>: nop\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x08083550, segv.pc)
self.assertEqual(segv.insn, 'nop', segv.insn)
self.assertEqual(segv.src, None, segv.src)
self.assertEqual(segv.dest, None, segv.dest)
regs = 'esp 0x444'
disasm = '0x08083560 <main+0>: push %ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x08083560, segv.pc)
self.assertEqual(segv.insn, 'push', segv.insn)
self.assertEqual(segv.src, '%ecx', segv.src)
self.assertEqual(segv.dest, '(%esp)', segv.dest)
# GDB 7.1
regs = 'esp 0x444'
disasm = '=> 0x08083560 <main+0>: push %ecx\n'
segv = parse_segv.ParseSegv(regs, disasm, '')
self.assertEqual(segv.pc, 0x08083560, segv.pc)
self.assertEqual(segv.insn, 'push', segv.insn)
self.assertEqual(segv.src, '%ecx', segv.src)
self.assertEqual(segv.dest, '(%esp)', segv.dest)
def test_ioport_operation(self):
'''I/O port violations'''
regs = 'rax 0x3 3'
disasm = '''0x4087f1 <snd_pcm_hw_params_set_channels_near@plt+19345>:
out %al,$0xb3
'''
maps = '''00400000-00412000 r-xp 00000000 08:04 10371157 /usr/sbin/pommed
00611000-00614000 rw-p 00011000 08:04 10371157 /usr/sbin/pommed
00614000-00635000 rw-p 00614000 00:00 0 [heap]
'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
self.assertEqual(segv.pc, 0x4087f1, segv.pc)
self.assertEqual(segv.insn, 'out', segv.insn)
self.assertEqual(segv.src, '%al', segv.src)
self.assertEqual(segv.dest, '$0xb3', segv.dest)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('disallowed I/O port operation on port 3' in reason, reason)
def test_invalid_02_maps(self):
'''Require valid maps'''
regs = 'a 0x10'
disasm = 'Dump ...\n0x08083540 <main+0>: lea 0x4(%esp),%ecx\n'
maps = 'asdlkfjaadf'
self.assertRaises(ValueError, parse_segv.ParseSegv, regs, disasm, maps)
maps = '''005a3000-005a4000 rw-p 00035000 08:06 65575 /lib/libncurses.so.5.7
00b67000-00b68000 r-xp 00000000 00:00 0 [vdso]
00c67000-00c68000 r--p 00000000 00:00 0 '''
segv = parse_segv.ParseSegv(regs, disasm, maps)
self.assertEqual(segv.maps[0]['start'], 0x005a3000, segv)
self.assertEqual(segv.maps[0]['end'], 0x005a4000, segv)
self.assertEqual(segv.maps[0]['perms'], 'rw-p', segv)
self.assertEqual(segv.maps[0]['name'], '/lib/libncurses.so.5.7', segv)
self.assertEqual(segv.maps[1]['start'], 0x00b67000, segv)
self.assertEqual(segv.maps[1]['end'], 0x00b68000, segv)
self.assertEqual(segv.maps[1]['perms'], 'r-xp', segv)
self.assertEqual(segv.maps[1]['name'], '[vdso]', segv)
self.assertEqual(segv.maps[2]['start'], 0x00c67000, segv)
self.assertEqual(segv.maps[2]['end'], 0x00c68000, segv)
self.assertEqual(segv.maps[2]['perms'], 'r--p', segv)
self.assertEqual(segv.maps[2]['name'], None, segv)
def test_debug(self):
'''Debug mode works'''
regs = 'a 0x10'
disasm = 'Dump ...\n0x08083540 <main+0>: lea 0x4(%esp),%ecx\n'
maps = '''005a3000-005a4000 rw-p 00035000 08:06 65575 /lib/libncurses.so.5.7
00b67000-00b68000 r-xp 00000000 00:00 0 [vdso]
00c67000-00c68000 r--p 00000000 00:00 0 '''
sys.stderr = tempfile.NamedTemporaryFile(prefix='parse_segv-stderr-')
segv = parse_segv.ParseSegv(regs, disasm, maps, debug=True)
self.assertTrue(segv is not None, segv)
def test_register_values(self):
'''Sub-register parsing'''
disasm = '''0x08083540 <main+0>: mov $1,%ecx'''
segv = parse_segv.ParseSegv(regs64, disasm, '')
val = segv.register_value('%rdx')
self.assertEqual(val, 0xffffffffff600180, hex(val))
val = segv.register_value('%edx')
self.assertEqual(val, 0xff600180, hex(val))
val = segv.register_value('%dx')
self.assertEqual(val, 0x0180, hex(val))
val = segv.register_value('%dl')
self.assertEqual(val, 0x80, hex(val))
def test_segv_unknown(self):
'''Handles unknown segfaults'''
disasm = '''0x08083540 <main+0>: mov $1,%ecx'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertFalse(understood, details)
# Verify calculations
self.assertEqual(segv.calculate_arg('(%ecx)'), 0xbfc6af40, segv.regs['ecx'])
self.assertEqual(segv.calculate_arg('0x10(%ecx)'), 0xbfc6af50, segv.regs['ecx'])
self.assertEqual(segv.calculate_arg('-0x20(%ecx)'), 0xbfc6af20, segv.regs['ecx'])
self.assertEqual(segv.calculate_arg('%fs:(%ecx)'), 0xbfc6af44, segv.regs['ecx'])
self.assertEqual(segv.calculate_arg('0x3404403'), 0x3404403, '0x3404403')
self.assertEqual(segv.calculate_arg('*0x40(%edi)'), 0x80834c0, segv.regs['edi'])
self.assertEqual(segv.calculate_arg('(%edx,%ebx,1)'), 0x26eff5, segv.regs['ebx'])
self.assertEqual(segv.calculate_arg('(%eax,%ebx,1)'), 0x26eff3, segv.regs['ebx'])
self.assertEqual(segv.calculate_arg('0x10(,%ebx,1)'), 0x26f004, segv.regs['ebx'])
# Again, but 64bit
disasm = '''0x08083540 <main+0>: mov $1,%rcx'''
segv = parse_segv.ParseSegv(regs64, disasm, maps)
understood, reason, details = segv.report()
self.assertFalse(understood, details)
self.assertEqual(segv.calculate_arg('(%rax,%rbx,1)'), 0x26eff3, segv.regs['rbx'])
def test_segv_pc_missing(self):
'''Handles PC in missing VMA'''
disasm = '''0x00083540 <main+0>: lea 0x4(%esp),%ecx'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('PC (0x00083540) not located in a known VMA region' in details, details)
self.assertTrue('executing unknown VMA' in reason, reason)
disasm = '''0x00083544:'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('PC (0x00083544) not located in a known VMA region' in details, details)
self.assertTrue('executing unknown VMA' in reason, reason)
def test_segv_pc_null(self):
'''Handles PC in NULL VMA'''
disasm = '''0x00000540 <main+0>: lea 0x4(%esp),%ecx'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('PC (0x00000540) not located in a known VMA region' in details, details)
self.assertTrue('executing NULL VMA' in reason, reason)
def test_segv_pc_nx_writable(self):
'''Handles PC in writable NX VMA'''
disasm = '''0x005a3000 <main+0>: lea 0x4(%esp),%ecx'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('PC (0x005a3000) in non-executable VMA region:' in details, details)
self.assertTrue('executing writable VMA /lib/libncurses.so.5.7' in reason, reason)
def test_segv_pc_nx_unwritable(self):
'''Handles PC in non-writable NX VMA'''
disasm = '''0x00dfb000 <main+0>: lea 0x4(%esp),%ecx'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('PC (0x00dfb000) in non-executable VMA region:' in details, details)
self.assertTrue('executing non-writable VMA /lib/libreadline.so.5.2' in reason, reason)
def test_segv_src_missing(self):
'''Handles source in missing VMA'''
reg = regs + 'ecx 0x0006af24 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
# Valid crash
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('source "-0x4(%ecx)" (0x0006af20) not located in a known VMA region' in details, details)
self.assertTrue('reading unknown VMA' in reason, reason)
# Valid crash
disasm = '0x08083547 <main+7>: callq *%ecx'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('source "*%ecx" (0x0006af24) not located in a known VMA region' in details, details)
self.assertTrue('reading unknown VMA' in reason, reason)
def test_segv_src_null(self):
'''Handles source in NULL VMA'''
reg = regs + 'ecx 0x00000024 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('source "-0x4(%ecx)" (0x00000020) not located in a known VMA region' in details, details)
self.assertTrue('reading NULL VMA' in reason, reason)
def test_segv_src_not_readable(self):
'''Handles source not in readable VMA'''
reg = regs + 'ecx 0x0026c080 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('source "-0x4(%ecx)" (0x0026c07c) in non-readable VMA region:' in details, details)
self.assertTrue('reading VMA /lib/tls/i686/cmov/libc-2.9.so' in reason, reason)
self.assertFalse('Stack memory exhausted' in details, details)
self.assertFalse('Stack pointer not within stack segment' in details, details)
def test_segv_dest_missing(self):
'''Handles destintation in missing VMA'''
reg = regs + 'esp 0x0006af24 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('destination "(%esp)" (0x0006af24) not located in a known VMA region' in details, details)
self.assertTrue('writing unknown VMA' in reason, reason)
def test_segv_dest_null(self):
'''Handles destintation in NULL VMA'''
reg = regs + 'esp 0x00000024 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('destination "(%esp)" (0x00000024) not located in a known VMA region' in details, details)
self.assertTrue('writing NULL VMA' in reason, reason)
def test_segv_dest_not_writable(self):
'''Handles destination not in writable VMA'''
reg = regs + 'esp 0x08048080 0xbfc6af24'
disasm = '0x08083547 <main+7>: pushl -0x4(%ecx)'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('destination "(%esp)" (0x08048080) in non-writable VMA region:' in details, details)
self.assertTrue('writing VMA /usr/bin/gdb' in reason, reason)
def test_segv_crackful_disasm(self):
'''Rejects insane disassemblies'''
disasm = '0x08083547 <main+7>: pushl -0x4(blah)'
segv = parse_segv.ParseSegv(regs, disasm, maps)
self.assertRaises(ValueError, segv.report)
disasm = '0x08083547 <main+7>: pushl -04(%ecx)'
segv = parse_segv.ParseSegv(regs, disasm, maps)
self.assertRaises(ValueError, segv.report)
def test_segv_stack_failure(self):
'''Handles walking off the stack'''
# Triggered via "push"
reg = regs + 'esp 0xbfc56ff0 0xbfc56ff0'
disasm = '0x08083547 <main+7>: push %eax'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('destination "(%esp)" (0xbfc56ff0) not located in a known VMA region (needed writable region)!' in details, details)
# Triggered via "call"
reg = regs + 'esp 0xbfc56fff 0xbfc56fff'
disasm = '0x08083547 <main+7>: callq 0x08083540'
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('destination "(%esp)" (0xbfc56fff) not located in a known VMA region (needed writable region)!' in details, details)
self.assertTrue('Stack memory exhausted' in details, details)
# Triggered via unknown reason
reg = regs + 'esp 0xdfc56000 0xdfc56000'
disasm = '''0x08083540 <main+0>: mov $1,%rcx'''
segv = parse_segv.ParseSegv(reg, disasm, maps)
understood, reason, details = segv.report()
self.assertTrue(understood, details)
self.assertTrue('SP (0xdfc56000) not located in a known VMA region (needed readable region)!' in details, details)
self.assertTrue('Stack pointer not within stack segment' in details, details)
def test_segv_stack_kernel_segfault(self):
'''Handles unknown segfaults in kernel'''
# Crash in valid code path
disasm = '''0x0056e010: ret'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertFalse(understood, details)
self.assertTrue('Reason could not be automatically determined.' in details, details)
self.assertFalse('(Unhandled exception in kernel code?)' in details, details)
# Crash from kernel code path
disasm = '''0x00b67422 <__kernel_vsyscall+2>: ret'''
segv = parse_segv.ParseSegv(regs, disasm, maps)
understood, reason, details = segv.report()
self.assertFalse(understood, details)
self.assertTrue('Reason could not be automatically determined. (Unhandled exception in kernel code?)' in details, details)
unittest.main()<|fim▁end|>
|
rbp 0x0 0x0
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>pub fn config_read_word(bus: u8, slot: u8, func: u8, offset: u8) -> u16 {
let device: i32 = match (bus, slot, func) {
// root PCI controller
(0, 0, 0) => 0,
// secondary bus controller (for bus 1)
(0, 1, 0) => 1,
(9, 0, 0) => 2,
(_, _, _) => -1,
};
<|fim▁hole|> // vendor ID
0x00 => match device {
0 => 0x1022,
1 => 0x1022,
2 => 0x1013,
_ => 0xFFFF,
},
// device ID
0x02 => match device {
0 => 0x15D0,
1 => 0x15D3,
2 => 0x00B8,
_ => 0xFFFF,
},
// classes
0x0A => match device {
0 => 0x0600,
1 => 0x0604,
2 => 0x0300,
_ => 0xFFFF,
},
// header type
0x0E => match device {
1 => 0x0001,
_ => 0x0000,
},
// secondary bus
0x18 => match device {
1 => 0x0900,
_ => 0x0000,
},
_ => 0xFFFF,
}
}<|fim▁end|>
|
match offset {
|
<|file_name|>adaptDefineComponentFunction.js<|end_file_name|><|fim▁begin|>import validateComponentConfig from '../validation/validateComponentConfig';
import normalizeComponentConfig from '../helper/normalizeComponentConfig';
import createFactory from '../helper/createFactory';
import printError from '../helper/printError';
export default function adaptDefineComponentFunction({
createComponentType,
createElement,
Adapter
}) {
function defineComponent(config) {
const error = validateComponentConfig(config);
if (error) {
const errorMsg = prettifyErrorMsg(error.message, config);
printError(errorMsg);
throw new TypeError(errorMsg);
}
const
normalizedConfig = normalizeComponentConfig(config),<|fim▁hole|> }
defineComponent._jsx = createElement;
defineComponent._jsxFrag = null;
return defineComponent;
}
function prettifyErrorMsg(errorMsg, config) {
return config && typeof config === 'object'
&& typeof config.displayName === 'string'
&& config.displayName.trim().length > 0
? '[defineComponent] Invalid configuration for component '
+ `"${config.displayName}": ${errorMsg} `
: `[defineComponent] Invalid component configuration: ${errorMsg}`;
}<|fim▁end|>
|
componentType = createComponentType(normalizedConfig),
factory = createFactory(componentType, normalizedConfig, Adapter);
return factory;
|
<|file_name|>descriptor.rs<|end_file_name|><|fim▁begin|>use uuid::*;<|fim▁hole|> ClientConfiguration,
ServerConfiguration,
PresentationFormat,
AggregateFormat,
ValidRange,
ExternalReportReference,
ReportReference,
NumberOfDigitals,
TriggerSetting,
TestComplexBitfield,
Unknown(UUID),
}
impl Descriptor {
fn to_uuid(&self) -> UUID {
UUID::Custom(match *self {
Descriptor::ExtendedProperties => 0x2900,
Descriptor::UserDescription => 0x2901,
Descriptor::ClientConfiguration => 0x2902,
Descriptor::ServerConfiguration => 0x2903,
Descriptor::PresentationFormat => 0x2904,
Descriptor::AggregateFormat => 0x2905,
Descriptor::ValidRange => 0x2906,
Descriptor::ExternalReportReference => 0x2907,
Descriptor::ReportReference => 0x2908,
Descriptor::NumberOfDigitals => 0x2909,
Descriptor::TriggerSetting => 0x290a,
Descriptor::TestComplexBitfield => 0x0000,
Descriptor::Unknown(ref uuid) => *uuid.to_hex(),
})
}
fn from_uuid(uuid: UUID) -> Descriptor {
match uuid.to_hex() {
0x2900 => Descriptor::ExtendedProperties,
0x2901 => Descriptor::UserDescription,
0x2902 => Descriptor::ClientConfiguration,
0x2903 => Descriptor::ServerConfiguration,
0x2904 => Descriptor::PresentationFormat,
0x2905 => Descriptor::AggregateFormat,
0x2906 => Descriptor::ValidRange,
0x2907 => Descriptor::ExternalReportReference,
0x2908 => Descriptor::ReportReference,
0x2909 => Descriptor::NumberOfDigitals,
0x290a => Descriptor::TriggerSetting,
0x0000 => Descriptor::TestComplexBitfield,
_ => Descriptor::Unknown(uuid),
}
}
fn to_str(&self) -> &'static str {
match *self {
Descriptor::ExtendedProperties => "Characteristic Extended Properties",
Descriptor::UserDescription => "Characteristic User Description",
Descriptor::ClientConfiguration => "Client Characteristic Configuration",
Descriptor::ServerConfiguration => "Server Characteristic Configuration",
Descriptor::PresentationFormat => "Characteristic Presentation Format",
Descriptor::AggregateFormat => "Characteristic Aggregate Format",
Descriptor::ValidRange => "Valid Range",
Descriptor::ExternalReportReference => "External Report Reference",
Descriptor::ReportReference => "Report Reference",
Descriptor::NumberOfDigitals => "Number of Digitals",
Descriptor::TriggerSetting => "Trigger Setting",
Descriptor::TestComplexBitfield => "Text Complex BitField",
Descriptor::Unknown(ref uuid) => "Unknown",
}
}
}<|fim▁end|>
|
pub enum Descriptor {
ExtendedProperties,
UserDescription,
|
<|file_name|>defaults.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
generic_comments = getattr(settings, "COMMENTS_APP", "") == "mezzanine.generic"
if generic_comments:
register_setting(
name="COMMENTS_ACCOUNT_REQUIRED",
label=_("Accounts required for commenting"),
description=_("If ``True``, users must log in to comment."),
editable=True,
default=False,
)
register_setting(
name="COMMENTS_DISQUS_SHORTNAME",
label=_("Disqus shortname"),
description=_("Shortname for the http://disqus.com comments "
"service."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_PUBLIC_KEY",
label=_("Disqus public key"),
description=_("Public key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_SECRET_KEY",
label=_("Disqus secret key"),
description=_("Secret key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DEFAULT_APPROVED",
label=_("Auto-approve comments"),
description=_("If ``True``, built-in comments are approved by "
"default."),
editable=True,
default=True,
)
register_setting(
name="COMMENT_FILTER",
description=_("Dotted path to the function to call on a comment's "
"value before it is rendered to the template."),
editable=False,
default=None,
)
register_setting(
name="COMMENTS_NOTIFICATION_EMAILS",
label=_("Comment notification email addresses"),
description=_("A comma separated list of email addresses that "
"will receive an email notification each time a "
"new comment is posted on the site."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_NUM_LATEST",
label=_("Admin comments"),
description=_("Number of latest comments shown in the admin "
"dashboard."),
editable=True,
default=5,
)
register_setting(
name="COMMENTS_UNAPPROVED_VISIBLE",
label=_("Show unapproved comments"),
description=_("If ``True``, comments that have ``is_public`` "
"unchecked will still be displayed, but replaced with a "
"``waiting to be approved`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_REMOVED_VISIBLE",
label=_("Show removed comments"),
description=_("If ``True``, comments that have ``removed`` "
"checked will still be displayed, but replaced "
"with a ``removed`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_USE_RATINGS",
description=_("If ``True``, comments can be rated."),
editable=False,
default=True,
)
register_setting(
name="RATINGS_ACCOUNT_REQUIRED",
label=_("Accounts required for rating"),
description=_("If ``True``, users must log in to rate content "
"such as blog posts and comments."),
editable=True,
default=False,
)
register_setting(
name="RATINGS_RANGE",
description=_("A sequence of integers that are valid ratings."),
editable=False,
default=range(getattr(settings, "RATINGS_MIN", 1),
getattr(settings, "RATINGS_MAX", 5) + 1),
)<|fim▁end|>
|
Default settings for the ``mezzanine.generic`` app. Each of these can be
overridden in your project's settings module, just like regular
|
<|file_name|>test_addonconf.py<|end_file_name|><|fim▁begin|>import sys
import unittest
sys.path.insert(0, "../src/build")
import addonconf
class AddonConfModuleTestCase(unittest.TestCase):
def test_load(self):
# act
config = addonconf.load("configs/config.json")
# assert
self.assertEqual(config, None, "Wrong return value for not exists config")
def test_load2(self):
# act
config = addonconf.load("configs/config.json.1")
# assert
self.assertEqual(config, None, "Wrong return value for unvalide config")
def test_load3(self):
# arrange
correct_config = {'version': '0.1', 'xpi': {'theme': 'firefox-theme-test.xpi', 'package': 'firefox-test-@[email protected]', 'extension': 'firefox-extension-test.xpi'}, 'max-version': '31.0a1', 'directory-structure': {'shared-dir': 'chrome'}, 'min-version': '29.0'}
# act
config = addonconf.load("configs/config.json.2")<|fim▁hole|>
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
# assert
self.assertEqual(config, correct_config, "Uncorrect load config")
|
<|file_name|>google-cloud-function.ts<|end_file_name|><|fim▁begin|>// @ts-expect-error `npm install --save-dev @types/express`
import { Request, Response } from 'express'
import { Telegraf } from 'telegraf'
const { BOT_TOKEN, FUNCTION_NAME, PROJECT_ID, REGION } = process.env<|fim▁hole|>}
const bot = new Telegraf(BOT_TOKEN)
// eslint-disable-next-line @typescript-eslint/no-floating-promises
bot.telegram.setWebhook(
`https://${REGION!}-${PROJECT_ID!}.cloudfunctions.net/${FUNCTION_NAME!}`
)
bot.command('hello', (ctx) => ctx.reply('Hello, friend!'))
export const botFunction = async (req: Request, res: Response) => {
try {
await bot.handleUpdate(req.body)
} finally {
res.status(200).end()
}
}<|fim▁end|>
|
if (BOT_TOKEN === undefined) {
throw new TypeError('BOT_TOKEN must be provided!')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.