blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9a3d85f621d23e6aedf6d0d238f468b8abd4befc | 1f94b6ff9477e380084bf00c591d92b0b2985e69 | /PythonEshow/apis/AUsers.py | 1b2e67a5504df084d2dec10fb70253366de4ad1d | [] | no_license | guofengma/Eshow | dff32fa1da152f30d776b7e8fdc2d5ffc1ef4c40 | 4b13cb4c328d7832bea3393000635106dd683b28 | refs/heads/master | 2020-04-24T07:22:46.757263 | 2018-05-03T00:59:31 | 2018-05-03T00:59:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | # *- coding:utf8 *-
import sys
import os
sys.path.append(os.path.dirname(os.getcwd()))
from flask_restful import Resource
from config.requests import apis_wrong, param_miss
class AUsers(Resource):
def __init__(self):
from control.CUsers import CUsers
self.cusers = CUsers()
def post(self, users):
print "==================================================="
print "api name is {0}".format(users)
print "==================================================="
apis = {
"login": "self.cusers.login()"
}
if users not in apis:
return apis_wrong
return eval(apis[users]) | [
"[email protected]"
] | |
e5dbd30c0c8cca38c4563b3f180af1302597c50f | 6c48ad953031fd6be870e8bd8775538b9ac7033e | /python/demo08_module/demo12_file_copy_2.py | d6dbda50aec8cdfa170dae8235f52b3bb86b0ea9 | [] | no_license | yeswhos/Code-Practice | b080c9484f510d02c2d78e388fc03eedc397aa7b | 0fd8263a5c87dbd0e8b1dd5a38f32a188870308b | refs/heads/master | 2023-04-08T13:11:06.105039 | 2023-03-16T11:34:03 | 2023-03-16T11:34:03 | 247,809,031 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | file_read = open("D:\GitR\Code-Practice\README.md")
file_write = open("D:\python_a.md", "w")
while True:
text = file_read.readline()
if not text:
break
file_write.write(text)
file_read.close()
file_write.close() | [
"[email protected]"
] | |
50da484a10d8c1763aa27665fa7bb665fd69438e | 2aace9bb170363e181eb7520e93def25f38dbe5c | /build/idea-sandbox/system/python_stubs/cache/bdf774aa44376518b55fa0857c4aa1e281ab006abf9d2169b8b93383f1e63a63/cython_runtime.py | 2f39ea21b21c76ab3e8b7b67c14f4bc7f0faba86 | [] | no_license | qkpqkp/PlagCheck | 13cb66fd2b2caa2451690bb72a2634bdaa07f1e6 | d229904674a5a6e46738179c7494488ca930045e | refs/heads/master | 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | # encoding: utf-8
# module cython_runtime
# from C:\Users\Doly\Anaconda3\lib\site-packages\scipy\interpolate\interpnd.cp37-win_amd64.pyd
# by generator 1.147
# no doc
# no imports
# Variables with simple values
__loader__ = None
__spec__ = None
# no functions
# no classes
| [
"[email protected]"
] | |
354a96afb879b8c3ef70574d8789f264ecd2c01f | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/nos/v6_0_2f/interface/fortygigabitethernet/snmp/__init__.py | 548b4c7ea3ff0beb53b6146afc568278cdfc288f | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,251 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import trap
class snmp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/fortygigabitethernet/snmp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The SNMP configurations for an interface.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__trap',)
_yang_name = 'snmp'
_rest_name = 'snmp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__trap = YANGDynClass(base=trap.trap, is_container='container', presence=False, yang_name="trap", rest_name="trap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Trap related configurations', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'fortygigabitethernet', u'snmp']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'FortyGigabitEthernet', u'snmp']
def _get_trap(self):
"""
Getter method for trap, mapped from YANG variable /interface/fortygigabitethernet/snmp/trap (container)
YANG Description: SNMP Trap configuration
"""
return self.__trap
def _set_trap(self, v, load=False):
"""
Setter method for trap, mapped from YANG variable /interface/fortygigabitethernet/snmp/trap (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_trap is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trap() directly.
YANG Description: SNMP Trap configuration
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=trap.trap, is_container='container', presence=False, yang_name="trap", rest_name="trap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Trap related configurations', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """trap must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=trap.trap, is_container='container', presence=False, yang_name="trap", rest_name="trap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Trap related configurations', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__trap = t
if hasattr(self, '_set'):
self._set()
def _unset_trap(self):
self.__trap = YANGDynClass(base=trap.trap, is_container='container', presence=False, yang_name="trap", rest_name="trap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Trap related configurations', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
trap = __builtin__.property(_get_trap, _set_trap)
_pyangbind_elements = {'trap': trap, }
| [
"[email protected]"
] | |
c4b8c299ef806ae544a61f97faf8c7b11cc72052 | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/tf_util/show_record.py | 8ed8b6ce8bda4b761875613a73799b90c7385020 | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 708 | py | import sys
import tensorflow as tf
def file_show(fn):
cnt = 0
n_display = 5
for record in tf.compat.v1.python_io.tf_record_iterator(fn):
example = tf.train.Example()
example.ParseFromString(record)
feature = example.features.feature
keys = feature.keys()
print("---- record -----")
for key in keys:
if key in ["masked_lm_weights", "rel_score"]:
v = feature[key].float_list.value
else:
v = feature[key].int64_list.value
print(key)
print(v)
cnt += 1
if cnt >= n_display: ##
break
if __name__ == "__main__":
file_show(sys.argv[1])
| [
"[email protected]"
] | |
d81d54bc1bb25218cba01ca8702d7129ffa2acb9 | bd498cbbb28e33370298a84b693f93a3058d3138 | /Google/benchmarks/transformer/implementations/transformer-research-TF-tpu-v4-128/lingvo/tasks/mt/params/params.py | a2907a7f5d715856f2ba60d1e0f68757bd1f29b4 | [
"Apache-2.0"
] | permissive | piyushghai/training_results_v0.7 | afb303446e75e3e9789b0f6c40ce330b6b83a70c | e017c9359f66e2d814c6990d1ffa56654a73f5b0 | refs/heads/master | 2022-12-19T16:50:17.372320 | 2020-09-24T01:02:00 | 2020-09-24T18:01:01 | 298,127,245 | 0 | 1 | Apache-2.0 | 2020-09-24T00:27:21 | 2020-09-24T00:27:21 | null | UTF-8 | Python | false | false | 1,198 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Machine translation model hyper-parameters."""
# Import ModelParams to ensure that they are added to the global registry.
# pylint: disable=unused-import
import REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.tasks.mt.params.wmt14_en_de
import REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.tasks.mt.params.wmtm16_en_de
import REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.tasks.mt.params.mlperf
# pylint: enable=unused-import
| [
"[email protected]"
] | |
b063bd197215f270b9d6e66e2f4be27ffb58d140 | 3777658387aa9e78d7c04202d7fd47d59b9e1271 | /MachineLearning/FeatureEngineering/target_encoding.py | f69b809d8d766c93e3a1d6807f5f2eb76abfff60 | [] | no_license | jocoder22/PythonDataScience | 709363ada65b6db61ee73c27d8be60587a74f072 | c5a9af42e41a52a7484db0732ac93b5945ade8bb | refs/heads/master | 2022-11-08T17:21:08.548942 | 2022-10-27T03:21:53 | 2022-10-27T03:21:53 | 148,178,242 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,099 | py | #!/usr/bin/env python
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import KFold
from contextlib import contextmanager
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_squared_error
from sklearn.ensemble import RandomForestRegressor
sp = {'sep':'\n\n', 'end':'\n\n'}
path = r'C:\Users\Jose\Desktop\PythonDataScience\MachineLearning\FeatureEngineering'
os.chdir(path)
df = pd.read_csv('housing.csv')
kfold = KFold(n_splits=4, shuffle=True, random_state=1973)
def test_encoding(train, test, target, cat, alpha=7):
# global mean on the train data
mean_global = train[target].mean()
# Get categorical feature sum and size
cat_sum = train.groupby(cat)[target].sum()
cat_size = train.groupby(cat).size()
# smoothed statistics
train_smoothed = (cat_sum + mean_global * alpha) / (cat_size + alpha)
# get encodings for test data
test_encoded = test[cat].map(train_smoothed).fillna(mean_global)
return test_encoded.values
def train_encoding(train, target, cat, alpha=7):
# 4-fold cross-validation
k_fold = KFold(n_splits=4, random_state=1973, shuffle=True)
feature_t = pd.Series(index=train.index)
# train k-fold encoding
for train_index, test_index in k_fold.split(train):
cv_train, cv_test = train.iloc[train_index], train.iloc[test_index]
# out-of-fold statistics and apply to cv_test
cv_test_feature = test_encoding(cv_train, cv_test, target, cat, alpha)
# create new train feature for the fold
feature_t.iloc[test_index] = cv_test_feature
return feature_t.values
def target_encoding(train, test, target, cat, alpha=7):
# test data mean target coded feature
test_mean_coded = test_encoding(train, test, target, cat, alpha)
# train data mean target coded feature
train_mean_coded = train_encoding(train, target, cat, alpha)
# Return new features to add to the model
return train_mean_coded, test_mean_coded
| [
"[email protected]"
] | |
f9d1d9cf9b8c99744ce68c3ed06d3bf7eefe6d21 | 648fbac90569e520540a14ad7be6714dc3aa303d | /scripts/cell/card_10000220.py | d05d6fc2bb0421c57d41269da5248ce6095fe30e | [] | no_license | zhlhmjz/kbe_lscs | e573c1eac74c12005cb6c50288a952b2874e231b | 6dc4eae5ab7fee66e929204abc80125f4f6be8f6 | refs/heads/master | 2021-08-24T13:43:42.208183 | 2017-11-21T08:39:14 | 2017-11-21T08:39:14 | 111,522,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | # -*- coding: utf-8 -*-
import KBEngine
from KBEDebug import *
from interfaces.GameObj import GameObj
class card_10000220(GameObj):
#卡牌名称:尘魔
#卡牌描述:<b>风怒</b>,<b>过载:</b>2
def __init__(self):
GameObj.__init__(self)
#--------------------------------------------------------------------------------------------
# Callbacks
#--------------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------------
# Effect
#--------------------------------------------------------------------------------------------
| [
"[email protected]"
] | |
1a421e4b7a56288ef0f491e649e419218ddae590 | b3ac12dfbb8fa74500b406a0907337011d4aac72 | /tests/simulation/test_simulation.py | 4d360e7d047c84c5a25402b92a5ff5871f45f7c9 | [
"Apache-2.0"
] | permissive | chia-os/goldcoin-blockchain | ab62add5396b7734c11d3c37c41776994489d5e7 | 5c294688dbbe995ae1d4422803f6fcf3e1cc6077 | refs/heads/main | 2023-08-11T23:58:53.617051 | 2021-09-12T15:33:26 | 2021-09-12T15:33:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,165 | py | import pytest
from goldcoin.types.peer_info import PeerInfo
from tests.block_tools import BlockTools
from goldcoin.util.ints import uint16
from tests.core.node_height import node_height_at_least
from tests.setup_nodes import self_hostname, setup_full_node, setup_full_system, test_constants
from tests.time_out_assert import time_out_assert
test_constants_modified = test_constants.replace(
**{
"DIFFICULTY_STARTING": 2 ** 8,
"DISCRIMINANT_SIZE_BITS": 1024,
"SUB_EPOCH_BLOCKS": 140,
"WEIGHT_PROOF_THRESHOLD": 2,
"WEIGHT_PROOF_RECENT_BLOCKS": 350,
"MAX_SUB_SLOT_BLOCKS": 50,
"NUM_SPS_SUB_SLOT": 32, # Must be a power of 2
"EPOCH_BLOCKS": 280,
"SUB_SLOT_ITERS_STARTING": 2 ** 20,
"NUMBER_ZERO_BITS_PLOT_FILTER": 5,
}
)
class TestSimulation:
@pytest.fixture(scope="function")
async def extra_node(self):
b_tools = BlockTools(constants=test_constants_modified)
async for _ in setup_full_node(test_constants_modified, "blockchain_test_3.db", 21240, b_tools):
yield _
@pytest.fixture(scope="function")
async def simulation(self):
async for _ in setup_full_system(test_constants_modified):
yield _
@pytest.mark.asyncio
async def test_simulation_1(self, simulation, extra_node):
node1, node2, _, _, _, _, _, _, _, server1 = simulation
await server1.start_client(PeerInfo(self_hostname, uint16(21238)))
# Use node2 to test node communication, since only node1 extends the chain.
await time_out_assert(1500, node_height_at_least, True, node2, 7)
async def has_compact(node1, node2):
peak_height_1 = node1.full_node.blockchain.get_peak_height()
headers_1 = await node1.full_node.blockchain.get_header_blocks_in_range(0, peak_height_1)
peak_height_2 = node2.full_node.blockchain.get_peak_height()
headers_2 = await node2.full_node.blockchain.get_header_blocks_in_range(0, peak_height_2)
# Commented to speed up.
# cc_eos = [False, False]
# icc_eos = [False, False]
# cc_sp = [False, False]
# cc_ip = [False, False]
has_compact = [False, False]
for index, headers in enumerate([headers_1, headers_2]):
for header in headers.values():
for sub_slot in header.finished_sub_slots:
if sub_slot.proofs.challenge_chain_slot_proof.normalized_to_identity:
# cc_eos[index] = True
has_compact[index] = True
if (
sub_slot.proofs.infused_challenge_chain_slot_proof is not None
and sub_slot.proofs.infused_challenge_chain_slot_proof.normalized_to_identity
):
# icc_eos[index] = True
has_compact[index] = True
if (
header.challenge_chain_sp_proof is not None
and header.challenge_chain_sp_proof.normalized_to_identity
):
# cc_sp[index] = True
has_compact[index] = True
if header.challenge_chain_ip_proof.normalized_to_identity:
# cc_ip[index] = True
has_compact[index] = True
# return (
# cc_eos == [True, True] and icc_eos == [True, True] and cc_sp == [True, True] and cc_ip == [True, True]
# )
return has_compact == [True, True]
await time_out_assert(1500, has_compact, True, node1, node2)
node3 = extra_node
server3 = node3.full_node.server
peak_height = max(node1.full_node.blockchain.get_peak_height(), node2.full_node.blockchain.get_peak_height())
await server3.start_client(PeerInfo(self_hostname, uint16(21237)))
await server3.start_client(PeerInfo(self_hostname, uint16(21238)))
await time_out_assert(600, node_height_at_least, True, node3, peak_height)
| [
"[email protected]"
] | |
25964edc220c95004ffd24b272ac7962457a8fe4 | 0baac2c4aa84f65896054043486577b6e08ba9ef | /python/257-binaryTree.py | 055403a0ce48d1fc8161d52f26eb968b284befbe | [] | no_license | hy299792458/LeetCode | c302983b81151acddffe3a71b03b4aceb20b4fa4 | bb24717283a6b3ddd463b68cba34f70df75ddfed | refs/heads/master | 2021-01-21T17:01:58.082623 | 2017-09-12T16:49:44 | 2017-09-12T16:49:44 | 91,924,578 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def binaryTreePaths(self, root):
self.re = []
def DFS(root,path):
if root.left == root.right == None:
self.re.append(path + [root.val])
else:
if root.left:
DFS(root.left, path + [root.val])
if root.right:
DFS(root.right, path + [root.val])
if root:
DFS(root, [])
return map(lambda x: '->'.join(map(str, x)), self.re)
| [
"[email protected]"
] | |
e3b9d889f720be23cad9a69d505cfc0d1ee141aa | edfb435ee89eec4875d6405e2de7afac3b2bc648 | /tags/selenium-2.0-beta-3/py/test/selenium/webdriver/common/children_finding_tests.py | f0ff588cab08890a162d720fadf174538ba0f18c | [
"Apache-2.0"
] | permissive | Escobita/selenium | 6c1c78fcf0fb71604e7b07a3259517048e584037 | f4173df37a79ab6dd6ae3f1489ae0cd6cc7db6f1 | refs/heads/master | 2021-01-23T21:01:17.948880 | 2012-12-06T22:47:50 | 2012-12-06T22:47:50 | 8,271,631 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,205 | py | #!/usr/bin/python
# Copyright 2008-2010 WebDriver committers
# Copyright 2008-2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import tempfile
import time
import shutil
import unittest
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchFrameException
class ChildrenFindingTests(unittest.TestCase):
def testShouldFindElementByXPath(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
child = element.find_element_by_xpath("select")
self.assertEqual(child.get_attribute("id"), "2")
def testShouldNotFindElementByXPath(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
try:
element.find_element_by_xpath("select/x")
self.fail("Expected NoSuchElementException to have been thrown")
except NoSuchElementException, e:
pass
except Exception, e:
self.fail("Expected NoSuchElementException to have been thrown but got " + str(e))
def testShouldFindElementsByXpath(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
children = element.find_elements_by_xpath("select/option")
self.assertEqual(len(children), 8);
self.assertEqual(children[0].text, "One")
self.assertEqual(children[1].text, "Two")
def testShouldNotFindElementsByXpath(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
children = element.find_elements_by_xpath("select/x")
self.assertEqual(len(children), 0)
def FindingElementsOnElementByXPathShouldFindTopLevelElements(self):
self._loadSimplePage()
parent = self.driver.find_element_by_id("multiline")
allParaElements = self.driver.find_elements_by_xpath("//p")
children = parent.find_elements_by_xpath("//p")
self.assertEqual(len(allParaElements), len(children))
def testShouldFindElementByName(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
child = element.find_element_by_name("selectomatic")
self.assertEqual(child.get_attribute("id"), "2")
def testShouldFindElementsByName(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
children = element.find_elements_by_name("selectomatic")
self.assertEqual(len(children), 2)
def testShouldFindElementById(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
child = element.find_element_by_id("2")
self.assertEqual(child.get_attribute("name"), "selectomatic")
def testShouldFindElementsById(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("form2")
child = element.find_elements_by_id("2")
self.assertEqual(len(child), 2)
def testShouldFindElementByIdWhenMultipleMatchesExist(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_id("test_id_div")
child = element.find_element_by_id("test_id")
self.assertEqual(child.text, "inside")
def testShouldFindElementByIdWhenNoMatchInContext(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_id("test_id_div")
try:
element.find_element_by_id("test_id_out")
self.Fail("Expected NoSuchElementException to have been thrown")
except NoSuchElementException, e:
pass
except Exception, e:
self.Fail("Expected NoSuchElementException to have been thrown but got " + str(e))
def testShouldFindElementByLinkText(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("div1")
child = element.find_element_by_link_text("hello world")
self.assertEqual(child.get_attribute("name"), "link1")
def testShouldFindElementByLinkText(self):
self._loadPage("nestedElements")
element = self.driver.find_element_by_name("div1")
children = element.find_elements_by_link_text("hello world")
self.assertEqual(len(children), 2)
def testShouldFindElementByClassName(self):
self._loadPage("nestedElements")
parent = self.driver.find_element_by_name("classes")
element = parent.find_element_by_class_name("one")
self.assertEqual("Find me", element.text)
def testShouldFindElementsByClassName(self):
self._loadPage("nestedElements")
parent = self.driver.find_element_by_name("classes")
elements = parent.find_elements_by_class_name("one")
self.assertEqual(2, len(elements))
def testShouldFindElementByTagName(self):
self._loadPage("nestedElements")
parent = self.driver.find_element_by_name("div1")
element = parent.find_element_by_tag_name("a")
self.assertEqual("link1", element.get_attribute("name"))
def testShouldFindElementsByTagName(self):
self._loadPage("nestedElements")
parent = self.driver.find_element_by_name("div1")
elements = parent.find_elements_by_tag_name("a")
self.assertEqual(2, len(elements))
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| [
"simon.m.stewart@07704840-8298-11de-bf8c-fd130f914ac9"
] | simon.m.stewart@07704840-8298-11de-bf8c-fd130f914ac9 |
35b8fad1bb1071613fe56f28618735be9dffbce5 | 6b9084d234c87d7597f97ec95808e13f599bf9a1 | /models/TransT/variants/pvt_variant/network.py | 94f771b0903e048e8623af1830ee9828453853d2 | [] | no_license | LitingLin/ubiquitous-happiness | 4b46234ce0cb29c4d27b00ec5a60d3eeb52c26fc | aae2d764e136ca4a36c054212b361dd7e8b22cba | refs/heads/main | 2023-07-13T19:51:32.227633 | 2021-08-03T16:02:03 | 2021-08-03T16:02:03 | 316,664,903 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,472 | py | import torch
import torch.nn as nn
def _get_single_scale(feat):
if isinstance(feat, (list, tuple)):
assert len(feat) == 1
feat = feat[0]
return feat
class PVTFeatureFusionNetwork(nn.Module):
def __init__(self, backbone, transformer, head,
transformer_hidden_dim, enable_input_projection,
template_output_stage, template_output_dim, template_output_shape,
search_output_stage, search_output_dim, search_output_shape):
super(PVTFeatureFusionNetwork, self).__init__()
self.backbone = backbone
self.transformer = transformer
self.head = head
self.template_output_stage = template_output_stage
self.template_output_shape = template_output_shape[1], template_output_shape[0] # H, W
self.search_output_stage = search_output_stage
self.search_output_shape = search_output_shape[1], search_output_shape[0] # H, W
self.template_input_projection = None
self.search_input_projection = None
if enable_input_projection:
self.template_input_projection = nn.Linear(template_output_dim, transformer_hidden_dim)
self.search_input_projection = nn.Linear(search_output_dim, transformer_hidden_dim)
nn.init.xavier_uniform_(self.template_input_projection.weight)
nn.init.xavier_uniform_(self.search_input_projection.weight)
def _forward_feat(self, x, output_stage, input_projection):
x = _get_single_scale(self.backbone(x, (output_stage,), False))
if input_projection is not None:
x = input_projection(x)
return x
def forward(self, z, x):
z_feat = self._forward_feat(z, self.template_output_stage, self.template_input_projection)
x_feat = self._forward_feat(x, self.search_output_stage, self.search_input_projection)
feat = self.transformer(z_feat, x_feat, *self.template_output_shape, *self.search_output_shape)
return self.head(feat.unsqueeze(0))
@torch.no_grad()
def template(self, z):
return self._forward_feat(z, self.template_output_stage, self.template_input_projection)
@torch.no_grad()
def track(self, z_feat, x):
x_feat = self._forward_feat(x, self.search_output_stage, self.search_input_projection)
feat = self.transformer(z_feat, x_feat, *self.template_output_shape, *self.search_output_shape)
return self.head(feat.unsqueeze(0))
| [
"[email protected]"
] | |
6f0e28490cd886a03f99cc71aa0351c98825ad0a | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/management/v20171101preview/get_management_group.py | c098e353012344ab7443b776bd5a168f08338890 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,338 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetManagementGroupResult',
'AwaitableGetManagementGroupResult',
'get_management_group',
]
@pulumi.output_type
class GetManagementGroupResult:
"""
The management group details.
"""
def __init__(__self__, children=None, details=None, display_name=None, id=None, name=None, tenant_id=None, type=None):
if children and not isinstance(children, list):
raise TypeError("Expected argument 'children' to be a list")
pulumi.set(__self__, "children", children)
if details and not isinstance(details, dict):
raise TypeError("Expected argument 'details' to be a dict")
pulumi.set(__self__, "details", details)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def children(self) -> Optional[Sequence['outputs.ManagementGroupChildInfoResponse']]:
"""
The list of children.
"""
return pulumi.get(self, "children")
@property
@pulumi.getter
def details(self) -> Optional['outputs.ManagementGroupDetailsResponse']:
"""
The details of a management group.
"""
return pulumi.get(self, "details")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The friendly name of the management group.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The fully qualified ID for the management group. For example, /providers/Microsoft.Management/managementGroups/0000000-0000-0000-0000-000000000000
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the management group. For example, 00000000-0000-0000-0000-000000000000
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
The AAD Tenant ID associated with the management group. For example, 00000000-0000-0000-0000-000000000000
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. For example, /providers/Microsoft.Management/managementGroups
"""
return pulumi.get(self, "type")
class AwaitableGetManagementGroupResult(GetManagementGroupResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetManagementGroupResult(
children=self.children,
details=self.details,
display_name=self.display_name,
id=self.id,
name=self.name,
tenant_id=self.tenant_id,
type=self.type)
def get_management_group(expand: Optional[str] = None,
group_id: Optional[str] = None,
recurse: Optional[bool] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetManagementGroupResult:
"""
The management group details.
:param str expand: The $expand=children query string parameter allows clients to request inclusion of children in the response payload.
:param str group_id: Management Group ID.
:param bool recurse: The $recurse=true query string parameter allows clients to request inclusion of entire hierarchy in the response payload.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['groupId'] = group_id
__args__['recurse'] = recurse
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:management/v20171101preview:getManagementGroup', __args__, opts=opts, typ=GetManagementGroupResult).value
return AwaitableGetManagementGroupResult(
children=__ret__.children,
details=__ret__.details,
display_name=__ret__.display_name,
id=__ret__.id,
name=__ret__.name,
tenant_id=__ret__.tenant_id,
type=__ret__.type)
| [
"[email protected]"
] | |
106f06f8c9ccb5d0b6bbaf7b92803d75acdbb60c | 422faa17d37d453fc5a9b5a05854f144c90c0477 | /tests/test_general.py | 9a6e97a00ee7a608bac542ac8f21b69817781f2f | [
"MIT"
] | permissive | ArtellaPipe/artellapipe-tools-playblastmanager | 71ae722425c33040770de00be295ecc9b1674765 | 05f1647b6b3b367c9ba9d5e8978cf32b5823f819 | refs/heads/master | 2020-08-03T01:14:20.291644 | 2020-05-04T02:00:53 | 2020-05-04T02:00:53 | 211,578,868 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains general tests for artellapipe-tools-playblastmanager
"""
import pytest
from artellapipe.tools.playblastmanager import __version__
def test_version():
assert __version__.get_version()
| [
"[email protected]"
] | |
11074d328013b09d059a18ca498387b2322d0959 | acaa1e54cf7963560b1ffe2c84136767f266d928 | /luxPlugin/Lux/LuxNodes/ShaderNodes/arealightShader.py | 23577a4a29e604d7a5d9fc8dd4d6c0d85e069563 | [] | no_license | LuxRender/LuxMaya | c019deba3c284d691f75dfbf2caed3b2418828b9 | 3891e40c3c4c3a054e5ff1ff16d051d4e690cc4a | refs/heads/master | 2021-01-01T02:25:32.792668 | 2014-04-09T12:06:04 | 2014-04-09T12:06:04 | 239,139,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,526 | py | # ------------------------------------------------------------------------------
# Lux material shader node for Maya
#
# by Doug Hammond 05/2008
#
# This file is licensed under the GPL
# http://www.gnu.org/licenses/gpl-3.0.txt
#
# $Id$
#
# ------------------------------------------------------------------------------
#
# Lux material shader node for Maya ( arealight attributes )
#
# ------------------------------------------------------------------------------
from maya import OpenMaya
from maya import OpenMayaMPx
from Lux.LuxNodes.ShaderNode import ShaderNode
class arealightShader(OpenMayaMPx.MPxNode, ShaderNode):
"""
AreaLight fragment of luxshader
"""
# arealight
L = OpenMaya.MObject() # color
gain = OpenMaya.MObject()
numsamples = OpenMaya.MObject()
lightGroup = OpenMaya.MObject()
def __init__(self):
OpenMayaMPx.MPxNode.__init__(self)
@staticmethod
def shaderInitializer():
try:
# color
arealightShader.L = arealightShader.makeColor("arealightL", "all")
arealightShader.gain = arealightShader.makeFloat("arealightGain","aga", 1.0)
arealightShader.numsamples = arealightShader.makeInteger("arealightNumsamples", "ans", 1)
arealightShader.lightGroup = arealightShader.makeString('arealightGroup', "alg", "default")
except:
OpenMaya.MGlobal.displayError("Failed to create arealight attributes\n")
raise
| [
"devnull@localhost"
] | devnull@localhost |
1a3467126afd0d06ecc949e651ef026e823f9635 | 55d560fe6678a3edc9232ef14de8fafd7b7ece12 | /libs/python/test/wrapper_held_type.py | 5beb657e5fe8805cc00c6e996467e1a764b6ae9b | [
"BSL-1.0"
] | permissive | stardog-union/boost | ec3abeeef1b45389228df031bf25b470d3d123c5 | caa4a540db892caa92e5346e0094c63dea51cbfb | refs/heads/stardog/develop | 2021-06-25T02:15:10.697006 | 2020-11-17T19:50:35 | 2020-11-17T19:50:35 | 148,681,713 | 0 | 0 | BSL-1.0 | 2020-11-17T19:50:36 | 2018-09-13T18:38:54 | C++ | UTF-8 | Python | false | false | 740 | py | # Copyright David Abrahams 2005. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
'''
>>> from wrapper_held_type_ext import *
>>> d = data()
>>> print(d.id())
42
>>> do_nothing( d )
>>> print(d.id())
42
>>> d = create_data()
>>> print(d.id())
42
>>> do_nothing( d )
>>> print(d.id())
42
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print("running...")
import sys
status = run()[0]
if (status == 0): print("Done.")
sys.exit(status)
| [
"[email protected]"
] | |
369057547a5b566698c0b19db73582f98621b00b | 010215c1421f5275a846e7154189b22cdd3c89bc | /Misc/Data Structures/Tree/InOrderSucessor.py | c94e331a5097ff35d1f5ae308ec5f29381d89ff6 | [] | no_license | bsextion/CodingPractice_Py | ab54d5715298645a8fd7ab6945bf3b22d4e6a874 | da2847a04705394c32a6fe1b5f6c6b64c24647a3 | refs/heads/master | 2023-08-16T17:14:47.643989 | 2021-09-28T19:23:40 | 2021-09-28T19:23:40 | 383,658,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 356 | py | class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
def inorder_successor_bst(root, d):
list = inorder_successor_bst(root.left)
list.append(root.val)
list = inorder_successor_bst(root.right)
return list
inorder_successor_bst()
| [
"[email protected]"
] | |
8c3da2993bf4a417d5a34fbbd066b1711d95ab47 | c4249ce9e7cb26ae006bc9951ea676ae2250777b | /gamslib/nemhaus/nemhaus-scalar.py | e2fafab032c25e83f003bf558a9659f2b3d2ede0 | [] | no_license | vaidasj/alg-mod-rev | 79de3ef1e110f4bd07cbdef6951de2e4216f47f1 | a3ec6b5c21700a2f28ac6bf7db6aa22540748c6e | refs/heads/master | 2021-06-27T14:06:39.997411 | 2020-10-19T15:47:54 | 2020-10-19T15:47:54 | 180,074,989 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,629 | py | # MIP written by GAMS Convert at 12/13/18 10:32:18
#
# Equation counts
# Total E G L N X C B
# 42 6 36 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 57 37 20 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 165 165 0 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= 2*m.x2 + 4*m.x3 + 3*m.x4 + 2*m.x5 + 4*m.x6 + 3*m.x7 + 2*m.x8 + 4*m.x9 + 3*m.x10 + 2*m.x11
+ 4*m.x12 + 3*m.x13 + 6*m.x14 + 2*m.x15 + 3*m.x16 + 6*m.x17 + 2*m.x18 + 3*m.x19 + 6*m.x20
+ 2*m.x21 + 3*m.x22 + 6*m.x23 + 2*m.x24 + 3*m.x25 + 5*m.x26 + 3*m.x27 + 5*m.x28 + 3*m.x29
+ 5*m.x30 + 3*m.x31 + 5*m.x32 + 3*m.x33 + 3*m.x34 + 3*m.x35 + 3*m.x36 + 3*m.x37, sense=minimize)
m.c2 = Constraint(expr= m.b38 + m.b39 + m.b40 + m.b41 == 1)
m.c3 = Constraint(expr= m.b42 + m.b43 + m.b44 + m.b45 == 1)
m.c4 = Constraint(expr= m.b46 + m.b47 + m.b48 + m.b49 == 1)
m.c5 = Constraint(expr= m.b50 + m.b51 + m.b52 + m.b53 == 1)
m.c6 = Constraint(expr= m.b54 + m.b55 + m.b56 + m.b57 == 1)
m.c7 = Constraint(expr= m.x2 - m.b38 - m.b46 >= -1)
m.c8 = Constraint(expr= m.x3 - m.b38 - m.b50 >= -1)
m.c9 = Constraint(expr= m.x4 - m.b38 - m.b54 >= -1)
m.c10 = Constraint(expr= m.x5 - m.b39 - m.b47 >= -1)
m.c11 = Constraint(expr= m.x6 - m.b39 - m.b51 >= -1)
m.c12 = Constraint(expr= m.x7 - m.b39 - m.b55 >= -1)
m.c13 = Constraint(expr= m.x8 - m.b40 - m.b48 >= -1)
m.c14 = Constraint(expr= m.x9 - m.b40 - m.b52 >= -1)
m.c15 = Constraint(expr= m.x10 - m.b40 - m.b56 >= -1)
m.c16 = Constraint(expr= m.x11 - m.b41 - m.b49 >= -1)
m.c17 = Constraint(expr= m.x12 - m.b41 - m.b53 >= -1)
m.c18 = Constraint(expr= m.x13 - m.b41 - m.b57 >= -1)
m.c19 = Constraint(expr= m.x14 - m.b42 - m.b46 >= -1)
m.c20 = Constraint(expr= m.x15 - m.b42 - m.b50 >= -1)
m.c21 = Constraint(expr= m.x16 - m.b42 - m.b54 >= -1)
m.c22 = Constraint(expr= m.x17 - m.b43 - m.b47 >= -1)
m.c23 = Constraint(expr= m.x18 - m.b43 - m.b51 >= -1)
m.c24 = Constraint(expr= m.x19 - m.b43 - m.b55 >= -1)
m.c25 = Constraint(expr= m.x20 - m.b44 - m.b48 >= -1)
m.c26 = Constraint(expr= m.x21 - m.b44 - m.b52 >= -1)
m.c27 = Constraint(expr= m.x22 - m.b44 - m.b56 >= -1)
m.c28 = Constraint(expr= m.x23 - m.b45 - m.b49 >= -1)
m.c29 = Constraint(expr= m.x24 - m.b45 - m.b53 >= -1)
m.c30 = Constraint(expr= m.x25 - m.b45 - m.b57 >= -1)
m.c31 = Constraint(expr= m.x26 - m.b46 - m.b50 >= -1)
m.c32 = Constraint(expr= m.x27 - m.b46 - m.b54 >= -1)
m.c33 = Constraint(expr= m.x28 - m.b47 - m.b51 >= -1)
m.c34 = Constraint(expr= m.x29 - m.b47 - m.b55 >= -1)
m.c35 = Constraint(expr= m.x30 - m.b48 - m.b52 >= -1)
m.c36 = Constraint(expr= m.x31 - m.b48 - m.b56 >= -1)
m.c37 = Constraint(expr= m.x32 - m.b49 - m.b53 >= -1)
m.c38 = Constraint(expr= m.x33 - m.b49 - m.b57 >= -1)
m.c39 = Constraint(expr= m.x34 - m.b50 - m.b54 >= -1)
m.c40 = Constraint(expr= m.x35 - m.b51 - m.b55 >= -1)
m.c41 = Constraint(expr= m.x36 - m.b52 - m.b56 >= -1)
m.c42 = Constraint(expr= m.x37 - m.b53 - m.b57 >= -1)
| [
"[email protected]"
] | |
921fa8f2a830465c26dbdde57b2c42a5a2b92c27 | 50f3067244b89c12c975d2df06fb6d8bac5c3dd1 | /docs/conf.py | aabefca297dc99e7bf78e2d3d2eb7f7ccb7dcc0a | [
"BSD-3-Clause"
] | permissive | bmerry/python-prompt-toolkit | 9cbc51d3369e393333ec9198432c191b87a0db1f | 8aa99991c189722678127f9a5d16fd9429828b8b | refs/heads/master | 2020-05-23T03:40:21.169188 | 2017-03-12T18:42:57 | 2017-03-12T18:42:57 | 84,746,245 | 1 | 0 | null | 2017-03-12T18:06:25 | 2017-03-12T18:06:25 | null | UTF-8 | Python | false | false | 8,570 | py | # -*- coding: utf-8 -*-
#
# prompt_toolkit documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 31 14:17:08 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.graphviz' ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'prompt_toolkit'
copyright = u'2014, Jonathan Slenders'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.13'
# The full version, including alpha/beta/rc tags.
release = '1.0.13'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
html_theme = 'default'
else:
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
html_theme = 'pyramid'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'prompt_toolkitdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'prompt_toolkit.tex', u'prompt_toolkit Documentation',
u'Jonathan Slenders', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'prompt_toolkit', u'prompt_toolkit Documentation',
[u'Jonathan Slenders'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'prompt_toolkit', u'prompt_toolkit Documentation',
u'Jonathan Slenders', 'prompt_toolkit', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| [
"[email protected]"
] | |
abf75a6d88c345fc7b3aee14aaf48dc58804f6d2 | d1c427249d1161c1f4f848e1de23d95c03ae40a3 | /28_Paymentprofile_id_staging.py | 6cb31265aecbbf31def67f0641571b8a566b21d4 | [] | no_license | Sangee2610/pythonscripts_march1 | 94b80ab3b037793022d114d7cd3604d69ba82147 | 2fb224fc0753beb3d65d873f658cdae247425cf1 | refs/heads/master | 2020-04-26T05:03:00.998024 | 2019-03-01T15:07:46 | 2019-03-01T15:07:46 | 173,321,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | import psycopg2
import config as cfg
conn = cfg.DATABASE_CONNECT
cur = conn.cursor()
import csv
import pandas as pd
import numpy as np
cur.execute("""
DROP TABLE IF EXISTS prd_Staging_Paymentprofile_Id;
CREATE TABLE prd_Staging_Paymentprofile_Id as
SELECT
(CASE WHEN directdebitkey = '' then NULL else cast(CAST(directdebitkey as FLOAT) as INT) END) as directdebitkey,
(CASE WHEN PaymentProfileKey = '' then NULL else cast(CAST(PaymentProfileKey as FLOAT) as INT) END) as PaymentProfileKey,
Id,
(CASE WHEN ContactKey = '' then NULL else cast(CAST(ContactKey as FLOAT) as INT) END) as ContactKey,
Contact
FROM prd_Landing_Paymentprofile_Id
""")
conn.commit()
conn.close()
| [
"[email protected]"
] | |
7f9c4842c03b353925d9c9584a7d99ea198f1fd7 | 004ed43634f98ada91ce6f19ccfa26146bcac9f3 | /137.py | 3c803cd9ba6e6e62023064f539b951045f07b176 | [] | no_license | tusonggao/leetcode | 490120028ccd1c33759fae5f7c2bc4cf820fab99 | 86be81f3df0d93bd676265211ccd1b29251c2824 | refs/heads/master | 2020-03-27T05:23:04.634426 | 2018-11-22T14:36:50 | 2018-11-22T14:36:50 | 146,014,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | #使用O(n)空间的解法1
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
s = sum(set(nums)) * 3
ans = (s - sum(nums))//2
return ans
#使用O(n)空间的解法2
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
from collections import Counter
counter = Counter(nums)
for val, num in counter.items():
if num != 3:
return val
#使用O(1)空间的解法1
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
result = 0
for i in range(32):
count = 0
for j in nums:
count += (j>>i) & 1
count %= 3
result |= count<<i
return int(result)
| [
"[email protected]"
] | |
059c47aad0c952917bc35d44b45e7a956c3726f0 | eb93a40dd29f8f6b72d2e7bbc375c226e6dc78c7 | /02_Arrays_in_Numpy/5.3_Indizierung.py | 8b576d3b57eecb304312633160e4ad5b8e27b079 | [
"MIT"
] | permissive | felixdittrich92/numerisches_python | ac789a4b19da1b6566ef149c52ffbcb97e60db25 | 0f895ee19b4fa3cf7ad38cd3dfe3cd7020ee34a7 | refs/heads/master | 2020-12-01T18:41:30.269530 | 2020-01-13T15:45:44 | 2020-01-13T15:45:44 | 230,732,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,975 | py | import numpy as np
print("--------------------------------------")
print("eindimensionale Arrays indizieren")
print("--------------------------------------")
F = np.array([1, 1, 2, 3, 5, 8, 13, 21])
# Ausgabe erstes Element
print(F[0])
# Ausgabe letzes Element
print(F[-1])
S = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
print(S[2:5])
print(S[:4])
print(S[6:])
print(S[:])
print("--------------------------------------")
print("Mehrdimensionale Arrays indizieren")
print("--------------------------------------")
# Spalte
# 0 1 2
A = np.array([[3.4, 5.7, -3.2], # 0
[1.1, -3.8, 7.7], # 1 Zeile
[2.2, 5.9, -1.0]]) # 2
# [Zeile][Spalte]
print(A[1][2])
# komplette Zeile
print(A[1])
# Position : [0, 1][0, 1]
# Spalte
# 0 1
B = np.array([ [[111, 112], [121, 122]], # 0
[[211, 212], [221, 222]], # 1 Zeile
[[311, 312], [321, 322]] ]) # 2
# [Zeile][Spalte][Position]
print(B[1][1][1])
print("--------------------------------------")
print("Mehrdimensionale Arrays indizieren")
print(" Teilbereichoperator")
print("--------------------------------------")
A = np.array([
[11, 12, 13, 14, 15],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35],
[41, 42, 43, 44, 45],
[51, 52, 53, 54, 55] ])
# [start:stop:step]
# Spalte , Zeile von(inklusive) : bis(exklusive)
print(A[:3, 2:])
print("------------------")
print(A[3:,:])
print("------------------")
print(A[:, 4:])
print("------------------")
X = np.arange(28).reshape(4, 7)
print(X)
print("------------------")
print(X[::2, ::3])
print("------------------")
print(X[::, ::3])
print("------------------")
# dreidimensionales Array
A = np.array([ [ [45, 12, 4], [45, 13, 5], [46, 12, 6] ],
[ [46, 14, 4], [45, 14, 5], [46, 11, 5] ],
[ [47, 13, 2], [48, 15, 5], [46, 15, 1] ], ])
print(A[1:3, 0:2, :])
| [
"[email protected]"
] | |
cd2cf9f275c063541c25fbac765f7c8469b29af8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2922/61020/315606.py | 36b9d2abe54aaeb1adafe1950c49c73cccc9b584 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | n=int(input())
a=[int(x) for x in input().split()]
a=list(set(a))
a.sort()
if len(a)>3:
print("NO")
else:
if len(a)==3 and 2*a[1]!=a[0]+a[2]:
print("NO")
else:
print("YES")
| [
"[email protected]"
] | |
7b0705d6febfc1e52481c590401bb939cccadd4f | 0308ca5b152a082c1a206a1a136fd45e79b48143 | /usvao/VAO/software/registry/VORegistryInABox/branches/sso/lib/python/VORegInABox/nicedom/domimpl.py | 2c9ad1218b4eec98c96cef5be4b47dba90d09d19 | [] | no_license | Schwarzam/usvirtualobservatory | b609bf21a09c187b70e311a4c857516284049c31 | 53fe6c14cc9312d048326acfa25377e3eac59858 | refs/heads/master | 2022-03-28T23:38:58.847018 | 2019-11-27T16:05:47 | 2019-11-27T16:05:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,731 | py | #! /usr/bin/env python
#
"""
an (incomplete) implementation of XPath v1.0. This implementation allows but
ignores predicates.
"""
import re, sys, os
from cStringIO import StringIO
from xml.sax import saxutils
dmxlen = 79
dadon = ' '
spre = re.compile('\s+')
class NodeList:
def __init__(self, node=None):
self.head = node
(self.tail, self.length) = self._pace(self.head)
self._lu = None
def _pace(self, node, length=0):
if node is None:
return (node, length)
elif node.nextSibling is None:
return (node, length+1)
else:
return self._pace(node.nextSibling, length+1)
def _setlookup(self):
self._lu = []
node = self.head
while node is not None:
self._lu.append(node._getself())
node = node.nextSibling
def appendChain(self, node):
if node is None: return node
(tail, length) = self._pace(node)
if self.head is None:
self.head = node
node.previousSibling = None
else:
self.tail.nextSibling = node
node.previousSibling = self.tail
self.tail = tail
self.length += length
if self._lu is not None:
n = node
while n is not None:
self._lu.append(n)
n = n.nextSibling
return node
def append(self, node):
node.nextSibling = None
return NodeList.appendChain(self, node)
def _is(self, nodeA, nodeB):
return nodeA is nodeB
def insert(self, node, before=None):
if before is None:
return self.append(node)
nd = self.head
while nd is not None and not self._is(nd, before):
nd = nd.nextSibling
if nd is None:
raise ValueError, "insert node not found"
if self._lu is not None:
self._lu = None
node.previousSibling = nd.previousSibling
if nd.previousSibling is None:
self.head = node
else:
nd.previousSibling.nextSibling = node
node.nextSibling = nd
nd.previousSibling = node
self.length += 1
def replace(self, newnode, oldnode):
if oldnode is None:
raise ValueError, "old node not found"
nd = self.head
while nd is not None and not self._is(nd, oldnode):
nd = nd.nextSibling
if nd is None:
raise ValueError, "old node has not been found"
if self._lu is not None:
self._lu = None
if nd.previousSibling is None:
# nd is the head
self.head = newnode
else:
nd.previousSibling.nextSibling = newnode
newnode.previousSibling = nd.previousSibling
newnode.nextSibling = nd.nextSibling
newnode.parentNode = nd.parentNode
if self.tail == nd:
self.tail = newnode
nd.nextSibling = None
nd.previousSibling = None
nd.parentNode = None
return nd
def remove(self, oldnode):
if oldnode is None:
raise ValueError, "old node not found"
nd = self.head
while nd is not None and not self._is(nd, oldnode):
nd = nd.nextSibling
if nd is None:
raise ValueError, "old node has not been found"
if self._lu is not None:
self._lu = None
if nd.previousSibling is None:
self.head = nd.nextSibling
else:
nd.previousSibling.nextSibling = nd.nextSibling
if nd.nextSibling is None:
self.tail = nd.previousSibling
else:
nd.nextSibling.previousSibling = nd.previousSibling
nd.nextSibling = None
nd.previousSibling = None
self.length -= 1
return nd
def item(self, index):
if self._lu is None or len(self._lu) != self.length:
self._setlookup()
if index >= self.length or index < 0:
return None
else:
return self._lu[index]
def clone(self, parent=None):
out = NodeList()
self._copyInto(out, parent)
return out
def _copyInto(self, out, parent):
node = self.head
while node is not None:
cnode = node.cloneNode(true)
if parent is not None:
cnode.parentNode = parent
out.append(cnode)
class _WrappedNode:
def __init__(self, node):
self.nextSibling = None
self.previousSibling = None
self.self = node
def _getself(self):
return self.self
class WrappedNodeList(NodeList):
"""This NodeList implementation allows its members to belong to multiple
lists"""
def __init__(self, node=None):
NodeList.__init__(self)
if node is not None:
self.append(node)
def _is(self, nodeA, nodeB):
if (hasattr(nodeA, "self")): nodeA = nodeA.self
if (hasattr(nodeB, "self")): nodeB = nodeB.self
return nodeA is nodeB
def append(self, node):
return NodeList.append(self, _WrappedNode(node))
def appendChain(self, node):
return self.appendChain(WrappedNode(node))
def insert(self, node, before):
return NodeList.insert(self, _WrappedNode(node), _WrappedNode(before))
def replace(self, node, before):
return NodeList.replace(self, _WrappedNode(node), _WrappedNode(before))
def remove(self, node):
return NodeList.remove(self, _WrappedNode(node))
class NamedNodeMap(NodeList):
def __init__(self, node=None):
NodeList.__init__(self, node)
self._map = None
def _setmap(self):
self._map = {}
node = self.head
while node is not None:
if node.nodeName is not None:
self._map[node.nodeName] = node._getself()
node = node.nextSibling
def getNamedItem(self, name):
if self._map is None:
self._setmap()
if self._map.has_key(name):
return self._map[name]
else:
return None
def removeNamedItem(self, name):
return self.remove(self.getNamedItem(name))
def setNamedItem(self, node):
out = None
old = self.getNamedItem(node.nodeName)
if old is not None:
out = self.replace(node, old)
else:
self.append(node)
if self._map is not None:
self._map[node.nodeName] = node
return out
def clone(self, parent=None):
out = NamedNodeMap()
self._copyInto(out, parent)
class WrappedNamedNodeMap(WrappedNodeList):
"""This NamedNodeMap implementation allows its members to belong to
multiple maps"""
def __init__(self, node):
NodeList.__init__(self, node)
self._map = None
def _setmap(self):
NamedNodeMap._setmap(self)
def _deref(self, node):
if node is not None:
node = node.self
return node
def getNamedItem(self, name):
return self._deref(NamedNodeMap.getNameItem(self, name))
def removeNamedItem(self, name):
return self._deref(NamedNodeMap.removeNamedItem(self, name))
def setNamedItem(self, node):
return self._deref(NamedNodeMap.setNamedItem(self, node))
class Node:
"""an implementation of a DOM Node"""
ELEMENT_NODE = 1;
ATTRIBUTE_NODE = 2;
TEXT_NODE = 3;
CDATA_SECTION_NODE = 4;
ENTITY_REFERENCE_NODE = 5;
ENTITY_NODE = 6;
PROCESSING_INSTRUCTION_NODE = 7;
COMMENT_NODE = 8;
DOCUMENT_NODE = 9;
DOCUMENT_TYPE_NODE = 10;
DOCUMENT_FRAGMENT_NODE = 11;
NOTATION_NODE = 12;
def __init__(self, type, owner=None):
self.nodeType = type
self.nodeName = None
self.nodeValue = None
self.parentNode = None
self.childNodes = NodeList()
self._updateHeadTail()
self.nextSibling = None
self.previousSibling = None
self.attributes = None
self.ownerDocument = owner
def _getself(self):
return self
def insertBefore(self, newChild, refChild):
if newChild.ownerDocument is not self.ownerDocument:
raise ValueError, "new node is from wrong doucment"
if newChild.parentNode is not None:
raise ValueError, "new node is already inside a document"
try:
self.removeChild(newChild)
except:
pass
newChild.parentNode = self
out = self.childNodes.insert(newChild, refChild)
self._updateHeadTail()
return out
def _updateHeadTail(self):
self.firstChild = self.childNodes.head
self.lastChild = self.childNodes.tail
def replaceChild(self, newChild, oldChild):
if newChild.ownerDocument is not self.ownerDocument:
raise ValueError, "new node is from wrong doucment"
if newChild.parentNode is not None:
raise ValueError, "new node is already inside a document"
out = self.childNodes.replace(newChild, oldChild)
self._updateHeadTail()
return out
def removeChild(self, oldChild):
out = self.childNodes.remove(oldChild)
out.parentNode = None
self._updateHeadTail()
return out
def appendChild(self, newChild):
if newChild.ownerDocument is not self.ownerDocument:
raise ValueError, "new node is from wrong doucment"
if newChild.parentNode is not None:
raise ValueError, "new node is already inside a document"
newChild.parentNode = self
out = self.childNodes.append(newChild);
self._updateHeadTail()
return out
def hasChildNodes(self):
return self.childNodes.length > 0;
def cloneNode(self, deep):
out = Node(type)
self._copyInto(out, deep)
return out
def _copyInto(self, out, deep):
out.nodeName = self.nodeName
out.nodeValue = self.nodeValue
out.ownerDocument = self.ownerDocument
if self.attributes is not None:
out.attributes = self.attributes.clone()
if deep:
out.childNodes = self.childNodes.clone()
def toxml(self):
out = StringIO()
self.writexml(out)
return out.getvalue()
def __repr__(self):
return self.toxml()
def encode(self, text):
return saxutils.escape(text.encode('utf_8','xmlcharrefreplace'))
class Text(Node):
def __init__(self, owner=None, text=''):
Node.__init__(self, Node.TEXT_NODE, owner)
self.nodeValue = text
self.nodeName = "#text"
def cloneNode(self, deep):
out = Text(self)
self._copyInto(out)
return out
def getvalue(self):
return self.nodeValue
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
strm.write(prefix)
strm.write(self.encode(self.nodeValue))
class Comment(Node):
def __init__(self, owner=None, text=''):
Node.__init__(self, Node.COMMENT_NODE, owner)
self.nodeValue = text
self.nodeName = "#comment"
def cloneNode(self, deep):
out = Comment(self)
self._copyInto(out)
return out
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
strm.write(prefix)
strm.write('<!-- ')
strm.write(self.nodeValue)
strm.write('-->\n')
def getvalue(self):
return ''
class ProcessingInstruction(Node):
def __init__(self, target, owner=None, text=''):
Node.__init__(self, Node.PROCESSING_INSTRUCTION_NODE, owner)
self.nodeValue = text
self.nodeName = target
def cloneNode(self, deep):
out = ProcessingInstruction(self, self.target)
self._copyInto(out)
return out
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
strm.write(prefix)
strm.write('<?')
strm.write(self.nodeName)
strm.write(' ')
strm.write(self.nodeValue)
strm.write('?>\n')
def getvalue(self):
return ''
class Attr(Node):
def __init__(self, name, value=None, owner=None):
Node.__init__(self, Node.ATTRIBUTE_NODE, owner)
self.nodeName = name
self.nodeValue = value
self.specified = value is not None
self.name = name
self.value = value
def cloneNode(self, deep):
out = Attr(self, self.name, self.value)
self._copyInto(out)
return out
def _copyInto(self, out, deep):
Node._copyInto(self, out, deep)
out.specified = self.specified
def getvalue(self):
if self.value is not None:
return self.value
else:
return ''
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
strm.write(self.name)
strm.write('="')
if self.value is not None:
strm.write(self.encode(self.value))
strm.write('"')
class Element(Node):
def __init__(self, tag, owner=None):
Node.__init__(self, Node.ELEMENT_NODE, owner)
self.nodeName = tag
self.tagName = tag
self.nodeValue = None
self.attributes = NamedNodeMap()
self.oneAttPerLine = False
def cloneNode(self, deep):
out = Text(self)
self._copyInto(out)
return out
def getElementsByTagName(self, name):
raise RuntimeError, "not yet supported"
def getChildrenByTagName(self, name, out=None):
"""
return all child elements that match a given name
@param name the tag name to match
@param out the NodeList object to add the nodes to; if None,
one will be created
@return NodeList the list of matching nodes
"""
out = WrappedNodeList()
child = self.firstChild
while child is not None:
if child.nodeType == Node.ELEMENT_NODE and \
child.nodeName == name:
out.append(child)
child = child.nextSibling
return out
def getAttributeNode(self, name):
return self.attributes.getNamedItem(name)
def setAttributeNode(self, newAttr):
if newAttr.ownerDocument is not self.ownerDocument:
raise ValueError, "new attribute is from wrong doucment"
return self.attributes.setNamedItem(newAttr)
def removeAttributeNode(self, name):
return self.attributes.removeNamedItem(name)
def getAttribute(self, name):
out = self.getAttributeNode(name)
if out is None: return ''
return out.nodeValue
def setAttribute(self, name, value):
self.attributes.setNamedItem(
self.ownerDocument.createAttribute(name, value))
def removeAttribute(self, name):
try:
self.removeAttributeNode(name)
except:
pass
def normalize():
raise RuntimeError, "normalize not yet supported"
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
self.writeOpenTag(strm, prefix, maxlen)
if self.childNodes.length > 0:
strm.write('>')
strm.write('\n')
node = self.firstChild
while node is not None:
node.writexml(strm, prefix+addon, addon, maxlen)
node = node.nextSibling
self.writeCloseTag(strm, prefix)
else:
strm.write('/>')
strm.write('\n')
def writeOpenTag(self, strm, prefix='', maxlen=dmxlen):
buf = StringIO();
buf.write(prefix)
buf.write('<')
buf.write(self.tagName)
attindent = len(buf.getvalue())
first = True
if self.attributes.length > 0:
for i in xrange(0, self.attributes.length):
att = self.attributes.item(i)
if not att.specified:
continue
att = att.toxml()
if not first and \
(self.oneAttPerLine or
len(att)+len(buf.getvalue())+1 > maxlen):
strm.write(buf.getvalue())
strm.write('\n')
buf.close()
buf = StringIO()
buf.write(attindent*' ')
buf.write(' ')
buf.write(att)
if first: first = False
strm.write(buf.getvalue());
return attindent
def writeCloseTag(self, strm, prefix=''):
strm.write(prefix)
strm.write('</')
strm.write(self.nodeName)
strm.write('>')
def getvalue(self):
out = ''
child = self.firstChild
while child is not None:
out += child.getvalue()
child = child.nextSibling
return out
class TextElement(Element):
def __init__(self, tagname, text='', owner=None):
Element.__init__(self, tagname, owner)
self.wrapLines = False
if owner is not None:
textNode = owner.createTextNode(text)
self.appendChild(textNode)
self.nodeValue = text
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
self.writeOpenTag(strm, prefix, maxlen)
strm.write('>')
if self.wrapLines:
strm.write('\n')
self.wraplines(strm, self.encode(self.nodeValue), prefix+addon,
maxlen)
strm.write(prefix)
else:
strm.write(self.encode(self.nodeValue))
self.writeCloseTag(strm)
strm.write('\n')
def wraplines(self, strm, text, prefix=dadon, maxlen=dmxlen):
maxlen -= len(prefix)-1
p = 0
e1 = 0
e2 = 0
while p < len(text):
if len(text)-p <= maxlen:
e1 = len(text)
else:
e2 = text[p+maxlen:].find(' ')
if e2 < 0: e2 = len(text)
e1 = text[p:p+maxlen+e2].rfind(' ')
if e1 < 0: e1 = e2
strm.write(prefix)
strm.write(text[p:p+e1])
strm.write('\n')
p += e1+1
while p < len(text) and text[p] == ' ': p += 1
class Document(Node):
def __init__(self, tag=None):
Node.__init__(self, Node.DOCUMENT_NODE, self)
self.nodeName = "#document"
self.nodeValue = None
self.documentElement = None
if tag is not None:
self.appendChild(self.createElement(tag))
def appendChild(self, newChild):
if self.documentElement is not None and \
newChild.nodeType == Node.ELEMENT_NODE:
raise RuntimeError, "Document already has root node"
out = Node.appendChild(self, newChild)
if newChild.nodeType == Node.ELEMENT_NODE:
self.documentElement = newChild
return out
def insertBefore(self, newChild, refChild):
if self.documentElement is not None and \
newChild.nodeType == Node.ELEMENT_NODE:
raise RuntimeError, "Document already has root node"
if newChild.nodeType == Node.ELEMENT_NODE:
self.documentElement = newChild
return Node.insertBefore(self, newChild, refChild)
def replaceChild(self, newChild, oldChild):
if self.documentElement is not None and \
oldChild.nodeType != Node.ELEMENT_NODE and \
newChild.nodeType == Node.ELEMENT_NODE:
raise RuntimeError, "Document already has root node"
out = Node.replaceChild(self, newChild, oldChild)
if out.nodeType == Node.ELEMENT_NODE:
self.documentElement = out
return out
def removeChild(self, oldChild):
out = Node.removeChild(self, oldChild)
if oldChild.nodeType == Node.ELEMENT_NODE:
self.documentElement = None
return out
def createComment(self, data):
return Comment(self, data)
def createTextNode(self, data):
return Text(self, data)
def createElement(self, tagname):
return Element(tagname, self)
def createTextElement(self, tagname, text):
return TextElement(tagname, text, self)
def createAttribute(self, name, value=None):
return Attr(name, value, self)
def createProcessingInstruction(self, target, data):
return ProcessingInstruction(target, self, data)
def getElementsByTagName(self, name):
raise RuntimeError, "not yet supported"
def importNode(self, node, remove=True):
if remove and node.parentNode is not Node:
node.parentNode.removeChild(node)
node.ownerDocument = self
if node.attributes is not None:
for i in xrange(0, node.attributes.length):
self.importNode(node.attributes.item(i), False)
node = node.firstChild
while node is not None:
self.importNode(node, False)
node = node.nextSibling
def writexml(self, strm, prefix='', addon=dadon, maxlen=dmxlen):
strm.write(prefix)
strm.write('<?xml version="1.0" encoding="UTF-8"?>\n')
node = self.firstChild
while node is not None:
node.writexml(strm, prefix, addon, maxlen)
node = node.nextSibling
def getvalue(self):
out = ''
child = firstChild
while child is not None:
out += child.getvalue
return out
| [
"usvirtualobservatory@5a1e9bf7-f4d4-f7d4-5b89-e7d39643c4b5"
] | usvirtualobservatory@5a1e9bf7-f4d4-f7d4-5b89-e7d39643c4b5 |
fc3d484eaf43c149b9bc4c394e98539e429435ff | 81b20a9c51779c21b779ac0b1c5bf669359521ef | /py_object_detection/tf_api/object_detection/builders/box_coder_builder.py | a68bc0bba4d9538894ee1ba887e14d4f72a804dc | [] | no_license | thekindler/py-object-detection | bae1401f025458605c9244f9a763e17a0138d2ec | a8d13c496bab392ef5c8ad91a20fbfa9af1899bb | refs/heads/master | 2023-06-23T02:42:08.180311 | 2021-07-17T18:40:46 | 2021-07-17T18:40:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,897 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A function to build an object detection box coder from configuration."""
from py_object_detection.tf_api.object_detection.box_coders import mean_stddev_box_coder, faster_rcnn_box_coder, \
keypoint_box_coder
from py_object_detection.tf_api.object_detection.box_coders import square_box_coder
from py_object_detection.tf_api.object_detection.protos import box_coder_pb2
def build(box_coder_config):
"""Builds a box coder object based on the box coder config.
Args:
box_coder_config: A box_coder.proto object containing the config for the
desired box coder.
Returns:
BoxCoder based on the config.
Raises:
ValueError: On empty box coder proto.
"""
if not isinstance(box_coder_config, box_coder_pb2.BoxCoder):
raise ValueError('box_coder_config not of type box_coder_pb2.BoxCoder.')
if box_coder_config.WhichOneof('box_coder_oneof') == 'faster_rcnn_box_coder':
return faster_rcnn_box_coder.FasterRcnnBoxCoder(scale_factors=[
box_coder_config.faster_rcnn_box_coder.y_scale,
box_coder_config.faster_rcnn_box_coder.x_scale,
box_coder_config.faster_rcnn_box_coder.height_scale,
box_coder_config.faster_rcnn_box_coder.width_scale
])
if box_coder_config.WhichOneof('box_coder_oneof') == 'keypoint_box_coder':
return keypoint_box_coder.KeypointBoxCoder(
box_coder_config.keypoint_box_coder.num_keypoints,
scale_factors=[
box_coder_config.keypoint_box_coder.y_scale,
box_coder_config.keypoint_box_coder.x_scale,
box_coder_config.keypoint_box_coder.height_scale,
box_coder_config.keypoint_box_coder.width_scale
])
if (box_coder_config.WhichOneof('box_coder_oneof') ==
'mean_stddev_box_coder'):
return mean_stddev_box_coder.MeanStddevBoxCoder(
stddev=box_coder_config.mean_stddev_box_coder.stddev)
if box_coder_config.WhichOneof('box_coder_oneof') == 'square_box_coder':
return square_box_coder.SquareBoxCoder(scale_factors=[
box_coder_config.square_box_coder.y_scale,
box_coder_config.square_box_coder.x_scale,
box_coder_config.square_box_coder.length_scale
])
raise ValueError('Empty box coder.')
| [
"[email protected]"
] | |
3c42c9c5c4d05db796df9b1dbcd44d792c7c20c4 | 0faf042dafd21547e00a872f636298217f03ae7a | /setup.py | fa19c6ca95866fbd2ef029f241fa04d712d2ce6f | [
"MIT"
] | permissive | thonra/acb.py | 46a82ef4fa5ac4dfddb4b0e7870d99c0901fd54f | 07b541b7febe2723d2479b53e9a138537ac039ce | refs/heads/master | 2023-03-05T14:41:02.043840 | 2021-02-11T02:11:12 | 2021-02-11T02:11:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | import sys
from setuptools import setup, Extension
def main():
args = dict(
ext_modules=[
Extension(
"_acb_speedup", sources=["fast_sub/module.c"], py_limited_api=True
)
],
)
setup(**args)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
9e0d86e491c9da06f5a81810e2c2731b2f262398 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R1/benchmark/startPyquil115.py | 81cdad116459c95b012d36c3ee9e375a88fe5403 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,148 | py | # qubit number=4
# total number=10
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += CNOT(3,0) # number=6
prog += RX(-1.561371548834127,1) # number=9
prog += Z(3) # number=7
prog += CNOT(3,0) # number=8
prog += H(2) # number=3
prog += H(3) # number=4
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil115.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
4447e96d1d09fad72873ff9e7b30509590ca119e | 25c531d2acc0218cc8fc3e275db4c2042dbc3a96 | /flow_of_control/looping/odd_or_even.py | f679274657adf6626e371e7f73cd392dfa051697 | [] | no_license | anaswara-97/python_project | 230242287886479ec134cb48cdfbacb70e9c9228 | efd0156d0c67b9686f52638b8b3264eb6bdef23d | refs/heads/master | 2023-08-16T16:16:11.063927 | 2021-09-20T14:24:50 | 2021-09-20T14:24:50 | 402,699,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | num=int(input("enter the number : "))
if(num%2==0):
print(num,"is an even number")
else:
print(num,"is an odd number")
| [
"[email protected]"
] | |
f96f04496e819e172c6148e5e68b01f4b46af114 | 51ecea4fc3409cc2c2c9b2547ebefb3cae42ef87 | /backend/chat/models.py | d83f538a9407b5e5d2ae0591fa29b4ced7d18cb4 | [] | no_license | crowdbotics-apps/binge-watch-28251 | e2b8e7f711258afa3f04530c4a376e14fb71ad9b | a747c11de008eac81032e1aafa65b73914588eb7 | refs/heads/master | 2023-05-31T11:38:46.201806 | 2021-06-27T07:30:19 | 2021-06-27T07:30:19 | 380,675,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,152 | py | from django.conf import settings
from django.db import models
class Thread(models.Model):
"Generated Model"
name = models.CharField(
max_length=255,
)
thread_photo = models.URLField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
class ThreadMember(models.Model):
"Generated Model"
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadmember_profile",
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadmember_thread",
)
is_admin = models.BooleanField()
timestamp_joined = models.DateTimeField(
auto_now_add=True,
)
timestamp_left = models.DateTimeField()
last_rejoined = models.DateTimeField()
class MessageAction(models.Model):
"Generated Model"
action = models.CharField(
max_length=7,
)
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="messageaction_message",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="messageaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class ThreadAction(models.Model):
"Generated Model"
action = models.CharField(
max_length=7,
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadaction_thread",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class ForwardedMessage(models.Model):
"Generated Model"
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="forwardedmessage_message",
)
forwarded_by = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_by",
)
forwarded_to = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_to",
)
timestamp_forwarded = models.DateTimeField(
auto_now_add=True,
)
class Message(models.Model):
"Generated Model"
message = models.TextField()
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="message_thread",
)
sent_by = models.ForeignKey(
"chat.ThreadMember",
on_delete=models.CASCADE,
related_name="message_sent_by",
)
attachment = models.URLField()
is_draft = models.BooleanField()
is_delivered = models.BooleanField()
is_read = models.BooleanField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
timestamp_delivered = models.DateTimeField()
timestamp_read = models.DateTimeField()
# Create your models here.
| [
"[email protected]"
] | |
51223964a1071ee1dbf12ed363ebed058c2eb870 | 6a302928aad4cbb5d86c8bbcabded1f03a157adc | /Ensemble_Methods/bagging_classification.py | 17d3a54b90d23066b4e3bea1dbb0d1f8cfdf2402 | [] | no_license | IvanRado/AI | c905a2c1d566777869405f371611c1bd9690d7e4 | 9c9d5cee02b18778e9a417de3e9ad07c0a1fd6c4 | refs/heads/master | 2020-12-08T06:51:45.766263 | 2020-06-20T23:41:16 | 2020-06-20T23:41:16 | 232,917,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,143 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier
from sklearn.utils import shuffle
def plot_decision_boundary(X, model):
h = .02 # step size in the mesh
# create a mesh to plot in
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.contour(xx, yy, Z, cmap=plt.cm.Paired)
np.random.seed(10)
N = 500
D = 2
X = np.random.randn(N,D)
sep = 2
X[:125] += np.array([sep, sep])
X[125:250] += np.array([sep, -sep])
X[250:375] += np.array([-sep, -sep])
X[375:] += np.array([-sep, sep])
Y = np.array([0]*125 + [1]*125 + [0]*125 + [1]*125 )
plt.scatter(X[:,0], X[:,1], s=100, c = Y, alpha = 0.5)
plt.show()
model = DecisionTreeClassifier()
model.fit(X, Y)
print("Score for 1 tree:", model.score(X,Y))
plt.scatter(X[:,0], X[:,1], s=100, c = Y, alpha=0.5)
plot_decision_boundary(X, model)
plt.show()
class BaggedTreeClassifier():
def __init__(self, B):
self.B = B
def fit(self, X, Y):
N = len(X)
self.models = []
for b in range(self.B):
idx = np.random.choice(N, size=N, replace = True)
Xb = X[idx]
Yb = Y[idx]
model = DecisionTreeClassifier(max_depth=2)
model.fit(Xb,Yb)
self.models.append(model)
def predict(self, X):
predictions = np.zeros(len(X))
for model in self.models:
predictions += model.predict(X)
return np.round(predictions / self.B)
def score(self, X, Y):
P = self.predict(X)
return np.mean(Y == P)
model = BaggedTreeClassifier(200)
model.fit(X,Y)
print("Score for bagged model:", model.score(X,Y))
plt.scatter(X[:,0], X[:,1], s=100, c = Y, alpha=0.5)
plot_decision_boundary(X, model)
plt.show() | [
"[email protected]"
] | |
fc178c151c60fb7c502343b672b87caf93bd859b | 9c0cae2af1111529cde36f57021751d576537f9f | /edxmarketo/urls.py | 66f451fa9e36f2eda7fa65363c1eebc0d157fda0 | [] | no_license | ISCLC/edxmarketo | 425ccc3aeafac3d3d319bda96ddab6973cefdeda | 7d3223b9dad6b93863eed515c8b2e45b1133d6db | refs/heads/master | 2021-01-21T06:02:19.749558 | 2016-04-06T00:12:53 | 2016-04-06T00:12:53 | 43,975,976 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | from django.conf.urls import url
from django.conf import settings
urlpatterns = [
url(r'^marketo_course_access$', 'edxmarketo.views.set_marketo_course_access_date'),
]
if settings.DEBUG:
urlpatterns += [
url(r'^marketo_test$', 'edxmarketo.views.test_marketo_connection'),
]
| [
"[email protected]"
] | |
f9c216d9e36499b2c663d881bad98abae74f7b89 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/playground/ebayer/zabbix/comar/package.py | 783303a21ba22b9377dc757238ebed5c1cbe27df | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
def postInstall(fromVersion, fromRelease, toVersion, toRelease):
os.system("/bin/chown -R zabbix.zabbix /var/log/zabbix")
os.system("/bin/chown -R zabbix.zabbix /var/run/zabbix")
| [
"[email protected]"
] | |
d9b5608829804232254ebd2360dcae1406ed96ba | 9c4ceb78678a8755c6ac7e54a47e8054b6e79c2c | /mozdns/create_zone/tests.py | 256894622faca6aae8a54b697031acdf6bc21d4a | [] | no_license | caseybecking/inventory | 068ca06a9b2c28a4baaea1c71491fa029552ab1b | aa74ed891f665a0eed6899b631e08b2227e42887 | refs/heads/master | 2021-01-23T01:41:28.443608 | 2014-03-17T15:32:49 | 2014-03-17T15:32:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,506 | py | from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from mozdns.domain.models import Domain
from mozdns.nameserver.models import Nameserver
from mozdns.soa.models import SOA
from mozdns.tests.utils import random_label, random_byte
from mozdns.view.models import View
def localize(url):
return '/en-US' + url
class CreateZoneTests(TestCase):
def setUp(self):
self.c = Client()
Domain(name="com").save()
Domain(name="mozilla.com").save()
self.private_view = View.objects.create(name='private')
self.public_view = View.objects.create(name='public')
def get_post_data(self):
"""Return a valid set of data"""
return {
'root_domain': '{0}.{0}.mozilla.com'.format(
random_label() + random_label()),
'soa_primary': 'ns1.mozilla.com',
'soa_contact': 'noc.mozilla.com',
'nameserver_1': 'ns1.mozilla.com',
'nameserver_2': 'ns2.mozilla.com',
'nameserver_3': 'ns3.mozilla.com',
'ttl_1': random_byte(),
'ttl_2': random_byte(),
'ttl_3': random_byte(),
'private_view_1': 'on',
'private_view_2': 'on',
'private_view_3': '',
'public_view_1': 'on',
'public_view_2': '',
'public_view_3': 'on',
}
# NS1 has all views
# NS2 has private and no public
# NS3 has no private and public
def _ensure_no_change(self, post_data):
soa_count = SOA.objects.all().count()
domain_count = Domain.objects.all().count()
ns_count = Nameserver.objects.all().count()
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
new_soa_count = SOA.objects.all().count()
new_domain_count = Domain.objects.all().count()
new_ns_count = Nameserver.objects.all().count()
self.assertEqual(new_soa_count, soa_count)
self.assertEqual(new_domain_count, domain_count)
self.assertEqual(new_ns_count, ns_count)
def _check_domain_tree(self, root_domain_name):
self.assertTrue(Domain.objects.filter(name=root_domain_name))
root_domain = Domain.objects.get(name=root_domain_name)
self.assertFalse(root_domain.purgeable)
p_domain = root_domain.master_domain
while p_domain:
self.assertEqual(None, p_domain.soa)
p_domain = p_domain.master_domain
def test_create_zone(self):
soa_count = SOA.objects.all().count()
domain_count = Domain.objects.all().count()
ns_count = Nameserver.objects.all().count()
post_data = self.get_post_data()
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
new_soa_count = SOA.objects.all().count()
new_domain_count = Domain.objects.all().count()
new_ns_count = Nameserver.objects.all().count()
self.assertEqual(new_soa_count, soa_count + 1)
self.assertEqual(new_domain_count, domain_count + 2)
self.assertEqual(new_ns_count, ns_count + 3)
self._check_domain_tree(post_data['root_domain'])
# Do it again. The use of a random domain should give us a new set of
# domain values.
soa_count = SOA.objects.all().count()
domain_count = Domain.objects.all().count()
ns_count = Nameserver.objects.all().count()
post_data = self.get_post_data()
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
new_soa_count = SOA.objects.all().count()
new_domain_count = Domain.objects.all().count()
new_ns_count = Nameserver.objects.all().count()
self.assertEqual(new_soa_count, soa_count + 1)
self.assertEqual(new_domain_count, domain_count + 2)
self.assertEqual(new_ns_count, ns_count + 3)
self._check_domain_tree(post_data['root_domain'])
def test_more_realistic_creation(self):
post_data = self.get_post_data()
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
first_root_domain = post_data['root_domain']
self._check_domain_tree(first_root_domain)
# Now create a new zone under the created zone. Make sure the tree
# under the new zone is preserved.
second_root_domain = "{0}.{1}".format(
random_label(), first_root_domain)
post_data['root_domain'] = second_root_domain
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
self._check_domain_tree(first_root_domain)
self.assertTrue(Domain.objects.filter(name=second_root_domain))
root_domain = Domain.objects.get(name=second_root_domain)
self.assertFalse(root_domain.purgeable)
self.assertFalse(root_domain.master_domain.purgeable)
self.assertNotEqual(None, root_domain.soa)
self.assertFalse(None, root_domain.master_domain.soa)
def test_create_zone_bad_soa(self):
post_data = self.get_post_data()
post_data['root_domain'] = ''
self._ensure_no_change(post_data)
# Try a bad primary
post_data = self.get_post_data()
post_data['soa_primary'] = 'adsf..afds'
self._ensure_no_change(post_data)
# Try a bad contact
post_data = self.get_post_data()
post_data['soa_contact'] = 'adsf.#afds'
self._ensure_no_change(post_data)
# Try a missing contact
post_data = self.get_post_data()
del post_data['soa_contact']
self._ensure_no_change(post_data)
def test_create_zone_bad_ns(self):
# Bad ns server
post_data = self.get_post_data()
post_data['nameserver_1'] = '..'
self._ensure_no_change(post_data)
# No glue
post_data = self.get_post_data()
post_data['nameserver_3'] = 'ns1.' + post_data['root_domain']
self._ensure_no_change(post_data)
def test_create_tld(self):
# Try a bad primary
post_data = self.get_post_data()
post_data['root_domain'] = 'asdf'
post_data['soa_primary'] = 'adsf..'
self._ensure_no_change(post_data)
def test_create_validate_views(self):
# Try a bad primary
post_data = self.get_post_data()
post_data['root_domain'] = 'safasdf.mozilla.com'
resp = self.c.post(localize(reverse('create-zone-ajax')), post_data)
self.assertEqual(200, resp.status_code)
d = Domain.objects.get(name=post_data['root_domain'])
# NS1 has all views
# NS2 has private and no public
# NS3 has no private and public
ns1 = d.nameserver_set.get(server=post_data['nameserver_1'])
self.assertTrue(self.private_view in ns1.views.all())
self.assertTrue(self.public_view in ns1.views.all())
ns2 = d.nameserver_set.get(server=post_data['nameserver_2'])
self.assertTrue(self.private_view in ns2.views.all())
self.assertTrue(self.public_view not in ns2.views.all())
ns3 = d.nameserver_set.get(server=post_data['nameserver_3'])
self.assertTrue(self.private_view not in ns3.views.all())
self.assertTrue(self.public_view in ns3.views.all())
| [
"[email protected]"
] | |
91923f2d70578815bd5c42388cd28f696541cc8a | 7ba4e38e0835cd009a078ce39a480b5bacaba21f | /sample_code/chap5/5.1.1.merton2d.py | 4d23bc23a0147325378c2537d556466d3e55833f | [] | no_license | moguranran/computer_vision_test | fe0641987905755c733e4ab16f48c3b76d01b3f4 | 4c5b5572d01e13a42eefb2423e66e34675c305cb | refs/heads/master | 2022-04-20T17:53:37.668609 | 2020-03-31T00:13:02 | 2020-03-31T00:13:02 | 249,196,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from PIL import Image
from pylab import *
execfile('load_vggdata.py')
# 3D点を同次座標にして射影する
X = vstack( (points3D,ones(points3D.shape[1])) )
x = P[0].project(X)
# 画像1の上に点を描画する
figure()
imshow(im1)
plot(points2D[0][0],points2D[0][1],'*')
axis('off')
figure()
imshow(im1)
plot(x[0],x[1],'r.')
axis('off')
show()
| [
"[email protected]"
] | |
073f60314ce1438ff3d603dd52a52964ccd838c4 | 521580589177e7eb44ef809d7be2ae0f74d1d2ce | /tests/utils.py | 1d25782e40bb0dac7ea8339f7197e5e84187cbd0 | [
"BSD-3-Clause"
] | permissive | isabella232/typeseam | 3e2455ad27ae6234868a4b17ade25b9867500c69 | 3e9d090ec84f2110ae69051364bb0905feb2f02c | refs/heads/master | 2023-04-01T16:12:57.666686 | 2016-08-18T01:56:14 | 2016-08-18T01:56:14 | 358,551,773 | 0 | 0 | BSD-3-Clause | 2021-04-16T09:52:11 | 2021-04-16T09:49:54 | null | UTF-8 | Python | false | false | 213 | py | import re
from bs4 import BeautifulSoup
def get_value_for_name(name, unicode_text):
soup = BeautifulSoup(unicode_text, 'html.parser')
t = soup.find(attrs={'name': name})
return t.attrs.get('value')
| [
"[email protected]"
] | |
47b493a00ab878d569dab455ac4af3ca7a951d1c | f8ad6963bfc851657ea50c6a036cfad29cdd7f60 | /Books/DeepLearningLearningFromTheFounderOfKeras/chapter6/sub6_3_2.py | e226b633774a2b310e3e59774ec825790fb6fef2 | [] | no_license | foru120/PythonRepository | e1ab0265c0f50ef2e9acdf7447237c913560692b | db6b6be0f9fb91b0a81a3b6a2ec5631daab10f98 | refs/heads/master | 2021-01-01T06:53:11.728109 | 2019-04-25T13:52:50 | 2019-04-25T13:52:50 | 97,541,222 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,583 | py | #todo p.282 ~ p.299
#todo code 6-32 ~ code 6-44
#todo 6.3.2 데이터 준비
import os
import numpy as np
data_dir = 'G:/04.dataset/09.jena_climate'
fname = os.path.join(data_dir, 'jena_climate_2009_2016.csv')
f = open(fname)
data = f.read()
f.close()
lines = data.split('\n')
header = lines[0].split(',')
lines = lines[1:]
float_data = np.zeros((len(lines), len(header) - 1))
for i, line in enumerate(lines):
values = [float(x) for x in line.split(',')[1:]]
float_data[i, :] = values
mean = float_data[:200000].mean(axis=0)
std = float_data[:200000].std(axis=0)
float_data -= mean
float_data /= std
def generator(data, lookback, delay, min_index, max_index, shuffle=False, batch_size=128, step=6):
"""
:param data: 원본 데이터 배열
:param lookback: 입력으로 사용하기 위해 거슬러 올라갈 타임스텝
:param delay: 타깃으로 사용할 미래의 타임스텝
:param min_index: 추출할 타임스텝의 범위를 지정하기 위한 data 배열의 인덱스. 검증 데이터와 테스트 데이터를 분리하는 데 사용
:param max_index: 추출할 타임스텝의 범위를 지정하기 위한 data 배열의 인덱스. 검증 데이터와 테스트 데이터를 분리하는 데 사용
:param shuffle: 샘플을 섞을지, 시간 순서대로 추출할지를 결정
:param batch_size: 배치의 샘플 수
:param step: 데이터를 샘플링할 타임스텝 간격. 1시간에 하나의 데이터 포인트를 추출하기 위해 기본값 6으로 지정
:return:
"""
if max_index is None:
max_index = len(data) - delay - 1
i = min_index + lookback
while True:
if shuffle:
rows = np.random.randint(min_index + lookback, max_index, size=batch_size)
else:
if i + batch_size >= max_index:
i = min_index + lookback
rows = np.arange(i, min(i + batch_size, max_index))
i += len(rows)
samples = np.zeros((len(rows),
lookback // step,
data.shape[-1]))
targets = np.zeros((len(rows),))
for j, row in enumerate(rows):
indices = range(rows[j] - lookback, rows[j], step)
samples[j] = data[indices]
targets[j] = data[rows[j] + delay][1]
yield samples, targets
lookback = 1440
step = 6
delay = 144
batch_size = 128
train_gen = generator(data=float_data,
lookback=lookback,
delay=delay,
min_index=0,
max_index=200000,
shuffle=True,
step=step,
batch_size=batch_size)
val_gen = generator(data=float_data,
lookback=lookback,
delay=delay,
min_index=200001,
max_index=300000,
step=step,
batch_size=batch_size)
test_gen = generator(data=float_data,
lookback=lookback,
delay=delay,
min_index=300001,
max_index=None,
step=step,
batch_size=batch_size)
val_steps = (300000 - 200001 - lookback) // batch_size
test_steps = (len(float_data) - 300001 - lookback) // batch_size
#todo 상식 수준의 기준점 (loss: 0.29)
def evaluate_naive_method():
batch_maes = []
for step in range(val_steps):
samples, targets = next(val_gen)
preds = samples[:, -1, 1]
mae = np.mean(np.abs(preds - targets))
batch_maes.append(mae)
print('상식 수준의 기준점:', np.mean(batch_maes) * std[1])
# evaluate_naive_method()
#todo 기본적인 머신 러닝 방법 (loss: 0.3378)
# from keras.models import Sequential
# from keras import layers
# from keras.optimizers import RMSprop
#
# model = Sequential()
# model.add(layers.Flatten(input_shape=(lookback // step, float_data.shape[-1])))
# model.add(layers.Dense(32, activation='relu'))
# model.add(layers.Dense(1))
#
# model.compile(optimizer=RMSprop(), loss='mae')
# history = model.fit_generator(generator=train_gen,
# steps_per_epoch=500,
# epochs=20,
# validation_data=val_gen,
# validation_steps=val_steps)
#todo 첫 번째 순환 신경망-GRU (loss: 0.2980)
# from keras.models import Sequential
# from keras import layers
# from keras.optimizers import RMSprop
#
# model = Sequential()
# model.add(layers.GRU(32, input_shape=(None, float_data.shape[-1])))
# model.add(layers.Dense(1))
#
# model.compile(optimizer=RMSprop(), loss='mae')
# history = model.fit_generator(generator=train_gen,
# steps_per_epoch=500,
# epochs=20,
# validation_data=val_gen,
# validation_steps=val_steps)
#todo 드롭아웃 규제된 GPU를 사용한 모델을 훈련하고 평가하기 (loss: 0.2702)
# from keras.models import Sequential
# from keras import layers
# from keras.optimizers import RMSprop
#
# model = Sequential()
# model.add(layers.GRU(32,
# dropout=0.2,
# recurrent_dropout=0.2,
# input_shape=(None, float_data.shape[-1])))
# model.add(layers.Dense(1))
#
# model.compile(optimizer=RMSprop(), loss='mae')
# history = model.fit_generator(generator=train_gen,
# steps_per_epoch=500,
# epochs=40,
# validation_data=val_gen,
# validation_steps=val_steps)
#todo 스태킹 순환 층 (loss: 0.2686)
# from keras.models import Sequential
# from keras import layers
# from keras.optimizers import RMSprop
#
# model = Sequential()
# model.add(layers.GRU(32,
# dropout=0.1,
# recurrent_dropout=0.5,
# return_sequences=True,
# input_shape=(None, float_data.shape[-1])))
# model.add(layers.GRU(64,
# activation='relu',
# dropout=0.1,
# recurrent_dropout=0.5))
# model.add(layers.Dense(1))
#
# model.compile(optimizer=RMSprop(), loss='mae')
# history = model.fit_generator(generator=train_gen,
# steps_per_epoch=500,
# epochs=40,
# validation_data=val_gen,
# validation_steps=val_steps)
#todo 양방향 LSTM 을 훈련하고 평가하기 (loss: )
from keras.models import Sequential
from keras import layers
from keras.optimizers import RMSprop
model = Sequential()
model.add(layers.Bidirectional(layers.GRU(32), input_shape=(None, float_data.shape[-1])))
model.add(layers.Dense(1))
model.compile(optimizer=RMSprop(), loss='mae')
history = model.fit_generator(generator=train_gen,
steps_per_epoch=500,
epochs=40,
validation_data=val_gen,
validation_steps=val_steps)
import matplotlib.pyplot as plt
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(loss) + 1)
plt.figure()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show() | [
"[email protected]"
] | |
bb7df9eaa48f87436b6a91a78aade6d51d87c59b | bd275d991b6c87609c2d1c7a00e42d09d6f8284c | /zhrtvc/tools/run_local.py | aa1a5284c94eefbd1142c55e0c608ac20a4068c4 | [
"MIT"
] | permissive | wulol/zhrtvc | 01dcfe9e5a087dbdca8f2ba773a8a9e46cabc483 | 99e594621643d1f6a8197b2c1f616c1d4a89c79b | refs/heads/master | 2023-01-18T23:29:56.108717 | 2020-12-05T10:02:43 | 2020-12-05T10:02:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,034 | py | #!usr/bin/env python
# -*- coding: utf-8 -*-
# author: kuangdd
# date: 2020/2/20
"""
"""
from pathlib import Path
from functools import partial
from multiprocessing.pool import Pool
from matplotlib import pyplot as plt
from tqdm import tqdm
import collections as clt
import os
import re
import json
import numpy as np
import shutil
import aukit
from aukit.audio_griffinlim import default_hparams, mel_spectrogram
# from hparams import hparams
my_hp = {
"n_fft": 1024, "hop_size": 256, "win_size": 1024,
"sample_rate": 22050, "max_abs_value": 4.0,
"fmin": 0, "fmax": 8000,
"preemphasize": True,
'symmetric_mels': True,
}
# default_hparams.update(hparams.values())
# # default_hparams.update(my_hp)
#
# a = {(k, v) for k, v in hparams.values().items() if type(v) in {str, int, float, tuple, bool, type(None)}}
# b = {(k, v) for k, v in default_hparams.items() if type(v) in {str, int, float, tuple, bool, type(None)}}
# print(a - b)
# print(b - a)
#
# _pad_len = (default_hparams.n_fft - default_hparams.hop_size) // 2
def wavs2mels(indir: Path, outdir: Path):
for fpath in tqdm(indir.glob("*.wav")):
wav = aukit.load_wav(fpath, sr=16000)
wav = np.pad(wav.flatten(), (_pad_len, _pad_len), mode="reflect")
mel = mel_spectrogram(wav, default_hparams)
np.save(outdir.joinpath(fpath.stem + ".npy"), mel, allow_pickle=False)
def get_train_files(indir: Path):
others = []
names = []
for fpath in tqdm(sorted(indir.glob("**/*.wav"))):
s = os.path.getsize(fpath)
if s < 32000:
print(s, fpath)
others.append(fpath)
continue
name = "/".join(fpath.relative_to(indir).parts)
names.append(name)
with open(indir.joinpath("train_files.txt"), "w", encoding="utf8") as fout:
for name in names:
fout.write(name + "\n")
_hanzi_re = re.compile(r'[\u4E00-\u9FA5]')
_pause_dict = {'#1': '%', '#2': '%', '#3': '$', '#4': '$'}
def convert_line(line):
index, han_text, pny_text = line.strip().split('\t')
pnys = pny_text.strip().split()
parts = re.split(r'(#\d)', han_text)
cnt = 0
outs = []
for part in parts:
if part.startswith('#'):
pny = _pause_dict[part]
outs.append(pny)
else:
for zi in part:
if _hanzi_re.search(zi):
if zi != '儿':
pny = pnys[cnt]
outs.append(pny)
cnt += 1
else:
if len(pnys) - 1 >= cnt and pnys[cnt].startswith('er'):
pny = pnys[cnt]
outs.append(pny)
cnt += 1
# else:
# outs.append(zi)
out_text = ' '.join(outs)
# out_line = f'{index}|{out_text}|{han_text}\n'
out_line = f'wav/biaobei/{index}.wav\t{out_text}\tbiaobei\n'
return out_line
def biaobei2aishell3():
"""
000085 现在是#2道儿#2越走#1越宽#3,人气#2越搞#1越旺#4。 xian4 zai4 shi4 daor4 yue4 zou3 yue4 kuan1 ren2 qi4 yue4 gao3 yue4 wang4
"""
inpath = r'F:\bigdata\public_audio\bznsyp\metadata.csv'
outpath = r'F:\bigdata\public_audio\bznsyp\train.txt'
with open(outpath, 'wt', encoding='utf8') as fout:
for num, line in enumerate(tqdm(open(inpath, encoding='utf8'))):
out_line = convert_line(line)
fout.write(out_line)
if __name__ == "__main__":
print(__file__)
indir = Path(r"E:\lab\melgan\data\aliexamples")
outdir = Path(r"E:\lab\melgan\data\aliexamples_mel")
# outdir.mkdir(exist_ok=True)
# wavs2mels(indir=indir, outdir=outdir)
indir = Path(r"E:\data\aliaudio\alijuzi")
# get_train_files(indir=indir)
line = '000085 现在是#2道儿#2越走#1越宽#3,人气#2越搞#1越旺#4。 xian4 zai4 shi4 daor4 yue4 zou3 yue4 kuan1 ren2 qi4 yue4 gao3 yue4 wang4'
out = convert_line(line)
print(out)
biaobei2aishell3()
| [
"[email protected]"
] | |
abf6d188268e5a9aa36725a70f24302522e07c35 | 1a0d5e2e9da4be4babbb55fa0edfb4e708044567 | /P0-Sudoku/solution.py | 0191cc77c0671765cfe6a4b1185fc8cb0c0781ff | [] | no_license | andy1li/udacity-aind | 45adc280b8880aa1e5592f9b40ea76133eac10c8 | ecf1d9466578557e1ed991ffa925d620fd0f87ed | refs/heads/master | 2021-05-15T07:50:37.877110 | 2018-04-04T02:18:11 | 2018-04-04T02:18:11 | 109,645,801 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,562 | py | from utils import *
from collections import Counter
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI')
for cs in ('123','456','789')]
diag_units = [[r+c for r, c in zip(rows, cols)],
[r+c for r, c in zip(rows, reversed(cols))]]
unitlist = row_units + column_units + square_units + diag_units
# Must be called after all units (including diagonals) are added to the unitlist
units = extract_units(unitlist, boxes)
peers = extract_peers(units, boxes)
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Parameters
----------
values(dict)
a dictionary of the form {'box_name': '123456789', ...}
Returns
-------
dict
The values dictionary with the naked twins eliminated from peers
"""
for unit in unitlist:
twos = Counter(values[box] for box in unit
if len(values[box]) == 2)
for twins, count in twos.items():
if count == 2: # if naked twins
for box in unit:
if values[box] != twins: # for non-twins box
new_value = values[box]
for twin in twins:
new_value = new_value.replace(twin, '')
assign_value(values, box, new_value)
return values
def eliminate(values):
"""Apply the eliminate strategy to a Sudoku puzzle
The eliminate strategy says that if a box has a value assigned, then none
of the peers of that box can have the same value.
Parameters
----------
values(dict)
a dictionary of the form {'box_name': '123456789', ...}
Returns
-------
dict
The values dictionary with the assigned values eliminated from peers
"""
solved = [box for box in values
if len(values[box]) == 1]
for box in solved:
for peer in peers[box]:
new_value = values[peer].replace(values[box],'')
assign_value(values, peer, new_value)
return values
def only_choice(values):
"""Apply the only choice strategy to a Sudoku puzzle
The only choice strategy says that if only one box in a unit allows a certain
digit, then that box must be assigned that digit.
Parameters
----------
values(dict)
a dictionary of the form {'box_name': '123456789', ...}
Returns
-------
dict
The values dictionary with all single-valued boxes assigned
"""
for unit in unitlist:
for digit in '123456789':
appearances = [box for box in unit
if digit in values[box]]
if len(appearances) == 1:
assign_value(values, appearances[0], digit)
return values
def count_box(values, n):
return sum(len(v) == n for v in values.values())
def reduce_puzzle(values):
"""Reduce a Sudoku puzzle by repeatedly applying all constraint strategies
Parameters
----------
values(dict)
a dictionary of the form {'box_name': '123456789', ...}
Returns
-------
dict or False
The values dictionary after continued application of the constraint strategies
no longer produces any changes, or False if the puzzle is unsolvable
"""
stalled = False
while not stalled:
before = count_box(values, 1)
values = naked_twins(only_choice(eliminate(values)))
after = count_box(values, 1)
stalled = before == after
if count_box(values, 0): return False
return values
def search(values):
"""Apply depth first search to solve Sudoku puzzles in order to solve puzzles
that cannot be solved by repeated reduction alone.
Parameters
----------
values(dict)
a dictionary of the form {'box_name': '123456789', ...}
Returns
-------
dict or False
The values dictionary with all boxes assigned or False
"""
values = reduce_puzzle(values)
if values is False:
return False ## Failed earlier
if count_box(values, 1) == 81:
return values ## Solved!
n, box = min((len(values[box]), box) for box in boxes
if len(values[box]) > 1)
for value in values[box]:
new_sudoku = values.copy()
new_sudoku[box] = value
attempt = search(new_sudoku)
if attempt: return attempt
return False
def solve(grid):
"""Find the solution to a Sudoku puzzle using search and constraint propagation
Parameters
----------
grid(string)
a string representing a sudoku grid.
Ex. '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns
-------
dict or False
The dictionary representation of the final sudoku grid or False if no solution exists.
"""
return search(grid2values(grid))
if __name__ == "__main__":
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(grid2values(diag_sudoku_grid))
result = solve(diag_sudoku_grid)
display(result)
try:
import PySudoku
PySudoku.play(grid2values(diag_sudoku_grid), result, history)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
| [
"[email protected]"
] | |
b4a29825c5aa72eb08388b9b95ccd734c2c97f45 | 1a9696b7f30c7164e4b7c57933b7b2d8df83ab2c | /Camera_Functions.py | ac8225bbbdba87107de0a732272afe2616447dc5 | [
"MIT"
] | permissive | SBCV/PythonBlenderUtility | 5bc6d20bd4298097b6893b2739a6879e29b0e084 | 4f91c5a356fede103bcb8c2a9ba1d4d0b01aadc3 | refs/heads/master | 2020-04-02T02:06:52.888904 | 2020-02-17T13:04:02 | 2020-02-17T13:04:02 | 153,892,269 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,888 | py | import bpy
import numpy as np
from Utility.Types.Camera import Camera
from Utility.Math.Conversion.Conversion_Collection import convert_opengl_to_computer_vision_camera
from Utility.Logging_Extension import logger
def get_calibration_mat(blender_camera):
#logger.info('get_calibration_mat: ...')
scene = bpy.context.scene
render_resolution_width = scene.render.resolution_x
render_resolution_height = scene.render.resolution_y
focal_length_in_mm = float(blender_camera.data.lens)
sensor_width_in_mm = float(blender_camera.data.sensor_width)
focal_length_in_pixel = \
float(max(scene.render.resolution_x, scene.render.resolution_y)) * \
focal_length_in_mm / sensor_width_in_mm
max_extent = max(render_resolution_width, render_resolution_height)
p_x = render_resolution_width / 2.0 - blender_camera.data.shift_x * max_extent
p_y = render_resolution_height / 2.0 - blender_camera.data.shift_y * max_extent
calibration_mat = Camera.compute_calibration_mat(
focal_length_in_pixel, cx=p_x, cy=p_y)
#logger.info('get_calibration_mat: Done')
return calibration_mat
def get_computer_vision_camera_matrix(blender_camera):
"""
Blender and Computer Vision Camera Coordinate Frame Systems (like VisualSfM, Bundler)
differ by their y and z axis
:param blender_camera:
:return:
"""
# Only if the objects have a scale of 1,
# the 3x3 part of the corresponding matrix_world contains a pure rotation
# Otherwise it also contains scale or shear information
if tuple(blender_camera.scale) != (1, 1, 1):
logger.vinfo('blender_camera.scale', blender_camera.scale)
assert False
opengl_cam_mat = np.array(blender_camera.matrix_world)
computer_vision_cam_mat = convert_opengl_to_computer_vision_camera(
opengl_cam_mat)
return computer_vision_cam_mat
| [
"[email protected]"
] | |
eddd22a4f755d6844fc83b4621b94e1687021fd6 | e65d16ea1e8d412bac75a809be6d390126bdf528 | /homeassistant/components/google_assistant_sdk/__init__.py | e2791f6000f3ebf95250338fe0b609d7c1df0863 | [
"Apache-2.0"
] | permissive | syssi/home-assistant | 6347d57866cb16ab9d4499ad38e2be6f0399077f | fd43687833741b21221769d46b4d1ecef8a94711 | refs/heads/dev | 2023-08-17T09:31:52.680518 | 2023-06-11T14:22:12 | 2023-06-11T14:22:12 | 97,874,495 | 6 | 16 | Apache-2.0 | 2023-09-13T06:31:21 | 2017-07-20T20:12:37 | Python | UTF-8 | Python | false | false | 6,693 | py | """Support for Google Assistant SDK."""
from __future__ import annotations
import aiohttp
from gassist_text import TextAssistant
from google.oauth2.credentials import Credentials
import voluptuous as vol
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, discovery, intent
from homeassistant.helpers.config_entry_oauth2_flow import (
OAuth2Session,
async_get_config_entry_implementation,
)
from homeassistant.helpers.typing import ConfigType
from .const import (
CONF_ENABLE_CONVERSATION_AGENT,
CONF_LANGUAGE_CODE,
DATA_MEM_STORAGE,
DATA_SESSION,
DOMAIN,
)
from .helpers import (
GoogleAssistantSDKAudioView,
InMemoryStorage,
async_send_text_commands,
default_language_code,
)
SERVICE_SEND_TEXT_COMMAND = "send_text_command"
SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND = "command"
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER = "media_player"
SERVICE_SEND_TEXT_COMMAND_SCHEMA = vol.All(
{
vol.Required(SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND): vol.All(
cv.ensure_list, [vol.All(str, vol.Length(min=1))]
),
vol.Optional(SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER): cv.comp_entity_ids,
},
)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Google Assistant SDK component."""
hass.async_create_task(
discovery.async_load_platform(
hass, Platform.NOTIFY, DOMAIN, {CONF_NAME: DOMAIN}, config
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Google Assistant SDK from a config entry."""
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {}
implementation = await async_get_config_entry_implementation(hass, entry)
session = OAuth2Session(hass, entry, implementation)
try:
await session.async_ensure_token_valid()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(
"OAuth session is not valid, reauth required"
) from err
raise ConfigEntryNotReady from err
except aiohttp.ClientError as err:
raise ConfigEntryNotReady from err
hass.data[DOMAIN][entry.entry_id][DATA_SESSION] = session
mem_storage = InMemoryStorage(hass)
hass.data[DOMAIN][entry.entry_id][DATA_MEM_STORAGE] = mem_storage
hass.http.register_view(GoogleAssistantSDKAudioView(mem_storage))
await async_setup_service(hass)
entry.async_on_unload(entry.add_update_listener(update_listener))
await update_listener(hass, entry)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
for service_name in hass.services.async_services()[DOMAIN]:
hass.services.async_remove(DOMAIN, service_name)
if entry.options.get(CONF_ENABLE_CONVERSATION_AGENT, False):
conversation.async_unset_agent(hass, entry)
return True
async def async_setup_service(hass: HomeAssistant) -> None:
"""Add the services for Google Assistant SDK."""
async def send_text_command(call: ServiceCall) -> None:
"""Send a text command to Google Assistant SDK."""
commands: list[str] = call.data[SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND]
media_players: list[str] | None = call.data.get(
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER
)
await async_send_text_commands(hass, commands, media_players)
hass.services.async_register(
DOMAIN,
SERVICE_SEND_TEXT_COMMAND,
send_text_command,
schema=SERVICE_SEND_TEXT_COMMAND_SCHEMA,
)
async def update_listener(hass, entry):
"""Handle options update."""
if entry.options.get(CONF_ENABLE_CONVERSATION_AGENT, False):
agent = GoogleAssistantConversationAgent(hass, entry)
conversation.async_set_agent(hass, entry, agent)
else:
conversation.async_unset_agent(hass, entry)
class GoogleAssistantConversationAgent(conversation.AbstractConversationAgent):
"""Google Assistant SDK conversation agent."""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Initialize the agent."""
self.hass = hass
self.entry = entry
self.assistant: TextAssistant | None = None
self.session: OAuth2Session | None = None
@property
def attribution(self):
"""Return the attribution."""
return {
"name": "Powered by Google Assistant SDK",
"url": "https://www.home-assistant.io/integrations/google_assistant_sdk/",
}
@property
def supported_languages(self) -> list[str]:
"""Return a list of supported languages."""
language_code = self.entry.options.get(
CONF_LANGUAGE_CODE, default_language_code(self.hass)
)
return [language_code]
async def async_process(
self, user_input: conversation.ConversationInput
) -> conversation.ConversationResult:
"""Process a sentence."""
if self.session:
session = self.session
else:
session = self.hass.data[DOMAIN][self.entry.entry_id][DATA_SESSION]
self.session = session
if not session.valid_token:
await session.async_ensure_token_valid()
self.assistant = None
if not self.assistant:
credentials = Credentials(session.token[CONF_ACCESS_TOKEN])
language_code = self.entry.options.get(
CONF_LANGUAGE_CODE, default_language_code(self.hass)
)
self.assistant = TextAssistant(credentials, language_code)
resp = self.assistant.assist(user_input.text)
text_response = resp[0] or "<empty response>"
intent_response = intent.IntentResponse(language=user_input.language)
intent_response.async_set_speech(text_response)
return conversation.ConversationResult(
response=intent_response, conversation_id=user_input.conversation_id
)
| [
"[email protected]"
] | |
5d38c3be05c32a6e2c9c7214932c4650eef0e83d | 01776becc70eafe6dcbad140eb40a862bc623341 | /LeetCode/Easy/989.Add to Array-Form of Integer.py | d22f06f8e69782b38fd59c1bb2f89bad9b11807f | [] | no_license | AnthonyTsui/AlgoPractice | 8eae4d197080c0a94b0127ed5a95198f5d2f3269 | 59fcb2826fb95a304cf7b4b9a77c2ae710fb5c9a | refs/heads/master | 2022-12-02T18:20:58.104356 | 2020-08-29T23:58:17 | 2020-08-29T23:58:17 | 250,649,377 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | # For a non-negative integer X, the array-form of X is an array of its digits in left to right order. For example, if X = 1231, then the array form is [1,2,3,1].
# Given the array-form A of a non-negative integer X, return the array-form of the integer X+K.
# Example 1:
# Input: A = [1,2,0,0], K = 34
# Output: [1,2,3,4]
# Explanation: 1200 + 34 = 1234
# Example 2:
# Input: A = [2,7,4], K = 181
# Output: [4,5,5]
# Explanation: 274 + 181 = 455
# Example 3:
# Input: A = [2,1,5], K = 806
# Output: [1,0,2,1]
# Explanation: 215 + 806 = 1021
# Example 4:
# Input: A = [9,9,9,9,9,9,9,9,9,9], K = 1
# Output: [1,0,0,0,0,0,0,0,0,0,0]
# Explanation: 9999999999 + 1 = 10000000000
#Time Complexity: O(N)
#Space Complexity: O(1)
class Solution(object):
def addToArrayForm(self, A, K):
"""
:type A: List[int]
:type K: int
:rtype: List[int]
"""
for i in range(len(A)-1, -1, -1):
K, A[i] = divmod(K+A[i], 10)
return [int(i) for i in str(K)] + A if K else A
# toNum = int(''.join(str(c) for c in A)) + K
# return [c for c in str(toNum)]
| [
"[email protected]"
] | |
10f2137ddf75bb1c12336712495266baa6e927af | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /watchdog/events.py | f28a2fc256392fdd3c0a19df0f441453dadf24db | [] | no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,597 | py | #Embedded file name: watchdog\events.py
"""
:module: watchdog.events
:synopsis: File system events and event handlers.
:author: [email protected] (Yesudeep Mangalapilly)
Event Classes
-------------
.. autoclass:: FileSystemEvent
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: FileSystemMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileMovedEvent
:members:
:show-inheritance:
.. autoclass:: DirMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileModifiedEvent
:members:
:show-inheritance:
.. autoclass:: DirModifiedEvent
:members:
:show-inheritance:
.. autoclass:: FileCreatedEvent
:members:
:show-inheritance:
.. autoclass:: DirCreatedEvent
:members:
:show-inheritance:
.. autoclass:: FileDeletedEvent
:members:
:show-inheritance:
.. autoclass:: DirDeletedEvent
:members:
:show-inheritance:
Event Handler Classes
---------------------
.. autoclass:: FileSystemEventHandler
:members:
:show-inheritance:
.. autoclass:: PatternMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: RegexMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: LoggingEventHandler
:members:
:show-inheritance:
"""
import os.path
import logging
import re
from pathtools.path import absolute_path
from pathtools.patterns import match_any_paths
from watchdog.utils import has_attribute
from watchdog.utils import unicode_paths
EVENT_TYPE_MOVED = 'moved'
EVENT_TYPE_DELETED = 'deleted'
EVENT_TYPE_CREATED = 'created'
EVENT_TYPE_MODIFIED = 'modified'
class FileSystemEvent(object):
"""
Immutable type that represents a file system event that is triggered
when a change occurs on the monitored file system.
All FileSystemEvent objects are required to be immutable and hence
can be used as keys in dictionaries or be added to sets.
"""
def __init__(self, event_type, src_path, is_directory = False):
self._src_path = src_path
self._is_directory = is_directory
self._event_type = event_type
@property
def is_directory(self):
"""True if event was emitted for a directory; False otherwise."""
return self._is_directory
@property
def src_path(self):
"""Source path of the file system object that triggered this event."""
return self._src_path
@property
def event_type(self):
"""The type of the event as a string."""
return self._event_type
def __str__(self):
return self.__repr__()
def __repr__(self):
return '<%(class_name)s: event_type=%(event_type)s, src_path=%(src_path)s, is_directory=%(is_directory)s>' % dict(class_name=self.__class__.__name__, event_type=self.event_type, src_path=self.src_path, is_directory=self.is_directory)
@property
def key(self):
return (self.event_type, self.src_path, self.is_directory)
def __eq__(self, event):
return self.key == event.key
def __ne__(self, event):
return self.key != event.key
def __hash__(self):
return hash(self.key)
class FileSystemMovedEvent(FileSystemEvent):
"""
File system event representing any kind of file system movement.
"""
def __init__(self, src_path, dest_path, is_directory):
super(FileSystemMovedEvent, self).__init__(event_type=EVENT_TYPE_MOVED, src_path=src_path, is_directory=is_directory)
self._dest_path = dest_path
@property
def dest_path(self):
"""The destination path of the move event."""
return self._dest_path
@property
def key(self):
return (self.event_type,
self.src_path,
self.dest_path,
self.is_directory)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s, dest_path=%(dest_path)s, is_directory=%(is_directory)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path, dest_path=self.dest_path, is_directory=self.is_directory)
class FileDeletedEvent(FileSystemEvent):
"""File system event representing file deletion on the file system."""
def __init__(self, src_path):
super(FileDeletedEvent, self).__init__(event_type=EVENT_TYPE_DELETED, src_path=src_path)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class FileModifiedEvent(FileSystemEvent):
"""File system event representing file modification on the file system."""
def __init__(self, src_path):
super(FileModifiedEvent, self).__init__(event_type=EVENT_TYPE_MODIFIED, src_path=src_path)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class FileCreatedEvent(FileSystemEvent):
"""File system event representing file creation on the file system."""
def __init__(self, src_path):
super(FileCreatedEvent, self).__init__(event_type=EVENT_TYPE_CREATED, src_path=src_path)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class FileMovedEvent(FileSystemMovedEvent):
"""File system event representing file movement on the file system."""
def __init__(self, src_path, dest_path):
super(FileMovedEvent, self).__init__(src_path=src_path, dest_path=dest_path, is_directory=False)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s, dest_path=%(dest_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path, dest_path=self.dest_path)
class DirDeletedEvent(FileSystemEvent):
"""File system event representing directory deletion on the file system."""
def __init__(self, src_path):
super(DirDeletedEvent, self).__init__(event_type=EVENT_TYPE_DELETED, src_path=src_path, is_directory=True)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class DirModifiedEvent(FileSystemEvent):
"""
File system event representing directory modification on the file system.
"""
def __init__(self, src_path):
super(DirModifiedEvent, self).__init__(event_type=EVENT_TYPE_MODIFIED, src_path=src_path, is_directory=True)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class DirCreatedEvent(FileSystemEvent):
"""File system event representing directory creation on the file system."""
def __init__(self, src_path):
super(DirCreatedEvent, self).__init__(event_type=EVENT_TYPE_CREATED, src_path=src_path, is_directory=True)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path)
class DirMovedEvent(FileSystemMovedEvent):
"""File system event representing directory movement on the file system."""
def __init__(self, src_path, dest_path):
super(DirMovedEvent, self).__init__(src_path=src_path, dest_path=dest_path, is_directory=True)
def __repr__(self):
return '<%(class_name)s: src_path=%(src_path)s, dest_path=%(dest_path)s>' % dict(class_name=self.__class__.__name__, src_path=self.src_path, dest_path=self.dest_path)
def sub_moved_events(self, _walker = os.walk):
"""Generates moved events for file sytem objects within the
moved directory.
:param _walker:
Walker used to walk directory trees :func:`os.walk` style. Sanity
tests use this parameter to inject a mock walker that behaves like
:func:`os.walk`.
:returns:
iterable of event objects of type :class:`FileMovedEvent` and
:class:`DirMovedEvent`.
"""
return list(_generate_sub_moved_events_for(self.src_path, self.dest_path, _walker=_walker))
class FileSystemEventHandler(object):
"""Base file system event handler that you can override methods from.
"""
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
self.on_any_event(event)
_method_map = {EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted}
event_type = event.event_type
_method_map[event_type](event)
def on_any_event(self, event):
"""Catch-all event handler.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
pass
def on_moved(self, event):
"""Called when a file or a directory is moved or renamed.
:param event:
Event representing file/directory movement.
:type event:
:class:`DirMovedEvent` or :class:`FileMovedEvent`
"""
pass
def on_created(self, event):
"""Called when a file or directory is created.
:param event:
Event representing file/directory creation.
:type event:
:class:`DirCreatedEvent` or :class:`FileCreatedEvent`
"""
pass
def on_deleted(self, event):
"""Called when a file or directory is deleted.
:param event:
Event representing file/directory deletion.
:type event:
:class:`DirDeletedEvent` or :class:`FileDeletedEvent`
"""
pass
def on_modified(self, event):
"""Called when a file or directory is modified.
:param event:
Event representing file/directory modification.
:type event:
:class:`DirModifiedEvent` or :class:`FileModifiedEvent`
"""
pass
class PatternMatchingEventHandler(FileSystemEventHandler):
"""
Matches given patterns with file paths associated with occurring events.
"""
def __init__(self, patterns = None, ignore_patterns = None, ignore_directories = False, case_sensitive = False):
super(PatternMatchingEventHandler, self).__init__()
self._patterns = patterns
self._ignore_patterns = ignore_patterns
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def patterns(self):
"""
(Read-only)
Patterns to allow matching event paths.
"""
return self._patterns
@property
def ignore_patterns(self):
"""
(Read-only)
Patterns to ignore matching event paths.
"""
return self._ignore_patterns
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if has_attribute(event, 'dest_path'):
paths.append(unicode_paths.decode(event.dest_path))
if event.src_path:
paths.append(unicode_paths.decode(event.src_path))
if match_any_paths(paths, included_patterns=self.patterns, excluded_patterns=self.ignore_patterns, case_sensitive=self.case_sensitive):
self.on_any_event(event)
_method_map = {EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted}
event_type = event.event_type
_method_map[event_type](event)
class RegexMatchingEventHandler(FileSystemEventHandler):
"""
Matches given regexes with file paths associated with occurring events.
"""
def __init__(self, regexes = ['.*'], ignore_regexes = [], ignore_directories = False, case_sensitive = False):
super(RegexMatchingEventHandler, self).__init__()
if case_sensitive:
self._regexes = [ re.compile(r) for r in regexes ]
self._ignore_regexes = [ re.compile(r) for r in ignore_regexes ]
else:
self._regexes = [ re.compile(r, re.I) for r in regexes ]
self._ignore_regexes = [ re.compile(r, re.I) for r in ignore_regexes ]
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def regexes(self):
"""
(Read-only)
Regexes to allow matching event paths.
"""
return self._regexes
@property
def ignore_regexes(self):
"""
(Read-only)
Regexes to ignore matching event paths.
"""
return self._ignore_regexes
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if has_attribute(event, 'dest_path'):
paths.append(unicode_paths.decode(event.dest_path))
if event.src_path:
paths.append(unicode_paths.decode(event.src_path))
if any((r.match(p) for r in self.ignore_regexes for p in paths)):
return
if any((r.match(p) for r in self.regexes for p in paths)):
self.on_any_event(event)
_method_map = {EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted}
event_type = event.event_type
_method_map[event_type](event)
class LoggingEventHandler(FileSystemEventHandler):
"""Logs all the events captured."""
def on_moved(self, event):
super(LoggingEventHandler, self).on_moved(event)
what = 'directory' if event.is_directory else 'file'
logging.info('Moved %s: from %s to %s', what, event.src_path, event.dest_path)
def on_created(self, event):
super(LoggingEventHandler, self).on_created(event)
what = 'directory' if event.is_directory else 'file'
logging.info('Created %s: %s', what, event.src_path)
def on_deleted(self, event):
super(LoggingEventHandler, self).on_deleted(event)
what = 'directory' if event.is_directory else 'file'
logging.info('Deleted %s: %s', what, event.src_path)
def on_modified(self, event):
super(LoggingEventHandler, self).on_modified(event)
what = 'directory' if event.is_directory else 'file'
logging.info('Modified %s: %s', what, event.src_path)
class LoggingFileSystemEventHandler(LoggingEventHandler):
"""For backwards-compatibility. Please use :class:`LoggingEventHandler` instead."""
pass
def _generate_sub_moved_events_for(src_dir_path, dest_dir_path, _walker = os.walk):
"""Generates an event list of :class:`DirMovedEvent` and :class:`FileMovedEvent`
objects for all the files and directories within the given moved directory
that were moved along with the directory.
:param src_dir_path:
The source path of the moved directory.
:param dest_dir_path:
The destination path of the moved directory.
:param _walker:
Walker used to walk directory trees :func:`os.walk` style. Sanity tests
use this parameter to inject a mock walker that behaves like
:func:`os.walk`.
:returns:
An iterable of file system events of type :class:`DirMovedEvent` and
:class:`FileMovedEvent`.
"""
src_dir_path = absolute_path(src_dir_path) if src_dir_path else None
dest_dir_path = absolute_path(dest_dir_path)
for root, directories, filenames in _walker(dest_dir_path):
for directory in directories:
full_path = os.path.join(root, directory)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
yield DirMovedEvent(renamed_path, full_path)
for filename in filenames:
full_path = os.path.join(root, filename)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
yield FileMovedEvent(renamed_path, full_path)
| [
"[email protected]"
] | |
0fea981dccef636eea89d4210cf00364ba2d1897 | 46caeb8f8a896036b5a1d054416c7c5530076381 | /tests/test_app.py | 76a63be24e856031dfeed6d51195aeef37a16987 | [
"MIT"
] | permissive | c137digital/unv_template | dce0a65eacf18faa3ca6cdf08202fe11d45c943a | 2c356478408a27eb68eaaf38a74b32a6633a11d0 | refs/heads/master | 2020-04-14T06:41:16.725736 | 2019-11-02T13:19:27 | 2019-11-02T13:19:27 | 163,693,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | import pytest
from package.app import SomeExampleApp
@pytest.fixture
def instance():
return SomeExampleApp('test')
def test_calls_count(instance):
assert instance.ncalls == 0
assert instance.name == 'test'
assert instance.power(2, 3) == 8
assert instance.ncalls == 1
| [
"[email protected]"
] | |
47ed7e9ba7f07c5b42cfeccb783d83179ac0f0f7 | 93e054990eab64c6567845d33c7cffa30f44139a | /apps/dc_algorithm/management/commands/band_math_app/models.py | 30b7419334cd45202656ca3e156f370f41918d7f | [
"Apache-2.0"
] | permissive | conicRelief/data_cube_ui | 04d163ac3b94bb88abf0530e8b81cf6bed24ec6c | 15b694c1944c7a692dd98f8c4181a4b7b03bfbeb | refs/heads/master | 2021-06-28T14:36:31.316200 | 2017-09-02T17:53:44 | 2017-09-02T17:53:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,226 | py | # Copyright 2016 United States Government as represented by the Administrator
# of the National Aeronautics and Space Administration. All Rights Reserved.
#
# Portion of this code is Copyright Geoscience Australia, Licensed under the
# Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License
# at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The CEOS 2 platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.db import models
from django.core.exceptions import ValidationError
from django.conf import settings
from apps.dc_algorithm.models import Area, Compositor, Satellite
from apps.dc_algorithm.models import (Query as BaseQuery, Metadata as BaseMetadata, Result as BaseResult, ResultType as
BaseResultType, UserHistory as BaseUserHistory, AnimationType as
BaseAnimationType, ToolInfo as BaseToolInfo)
from utils.data_cube_utilities.dc_mosaic import (create_mosaic, create_median_mosaic, create_max_ndvi_mosaic, create_min_ndvi_mosaic)
import datetime
import numpy as np
class UserHistory(BaseUserHistory):
"""
Extends the base user history adding additional fields
See the dc_algorithm.UserHistory docstring for more information
"""
pass
class ToolInfo(BaseToolInfo):
"""
Extends the base ToolInfo adding additional fields
See the dc_algorithm.ToolInfo docstring for more information
"""
pass
class Query(BaseQuery):
"""
Extends base query, adds app specific elements. See the dc_algorithm.Query docstring for more information
Defines the get_or_create_query_from_post as required, adds new fields, recreates the unique together
field, and resets the abstract property. Functions are added to get human readable names for various properties,
foreign keys should define __str__ for a human readable name.
"""
compositor = models.ForeignKey(Compositor)
#TODO: add color scale here
color_scale_path = '/home/' + settings.LOCAL_USER + '/Datacube/data_cube_ui/utils/color_scales/default_color_scale'
measurements = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'cf_mask']
base_result_dir = '/datacube/ui_results/band_math_app'
class Meta(BaseQuery.Meta):
unique_together = (('platform', 'area_id', 'time_start', 'time_end', 'latitude_max', 'latitude_min',
'longitude_max', 'longitude_min', 'title', 'description', 'compositor'))
abstract = True
def get_fields_with_labels(self, labels, field_names):
for idx, label in enumerate(labels):
yield [label, getattr(self, field_names[idx])]
def get_chunk_size(self):
"""Implements get_chunk_size as required by the base class
See the base query class docstring for more information.
"""
if not self.compositor.is_iterative():
return {'time': None, 'geographic': 0.005}
return {'time': 25, 'geographic': 0.5}
def get_iterative(self):
"""implements get_iterative as required by the base class
See the base query class docstring for more information.
"""
return self.compositor.id != "median_pixel"
def get_reverse_time(self):
"""implements get_reverse_time as required by the base class
See the base query class docstring for more information.
"""
return self.compositor.id == "most_recent"
def get_processing_method(self):
"""implements get_processing_method as required by the base class
See the base query class docstring for more information.
"""
processing_methods = {
'most_recent': create_mosaic,
'least_recent': create_mosaic,
'max_ndvi': create_max_ndvi_mosaic,
'min_ndvi': create_min_ndvi_mosaic,
'median_pixel': create_median_mosaic
}
return processing_methods.get(self.compositor.id, create_mosaic)
@classmethod
def get_or_create_query_from_post(cls, form_data):
"""Implements the get_or_create_query_from_post func required by base class
See the get_or_create_query_from_post docstring for more information.
Parses out the time start/end, creates the product, and formats the title/description
Args:
form_data: python dict containing either a single obj or a list formatted with post_data_to_dict
Returns:
Tuple containing the query model and a boolean value signifying if it was created or loaded.
"""
query_data = form_data
query_data['title'] = "Band Math Query" if 'title' not in form_data or form_data['title'] == '' else form_data[
'title']
query_data['description'] = "None" if 'description' not in form_data or form_data[
'description'] == '' else form_data['description']
valid_query_fields = [field.name for field in cls._meta.get_fields()]
query_data = {key: query_data[key] for key in valid_query_fields if key in query_data}
try:
query = cls.objects.get(**query_data)
return query, False
except cls.DoesNotExist:
query = cls(**query_data)
query.save()
return query, True
class Metadata(BaseMetadata):
"""
Extends base metadata, adding additional fields and adding abstract=True.
zipped_metadata_fields is required.
See the dc_algorithm.Metadata docstring for more information
"""
zipped_metadata_fields = [
'acquisition_list', 'clean_pixels_per_acquisition', 'clean_pixel_percentages_per_acquisition'
]
class Meta(BaseMetadata.Meta):
abstract = True
def metadata_from_dataset(self, metadata, dataset, clear_mask, parameters):
"""implements metadata_from_dataset as required by the base class
See the base metadata class docstring for more information.
"""
for metadata_index, time in enumerate(dataset.time.values.astype('M8[ms]').tolist()):
clean_pixels = np.sum(clear_mask[metadata_index, :, :] == True)
if time not in metadata:
metadata[time] = {}
metadata[time]['clean_pixels'] = 0
metadata[time]['clean_pixels'] += clean_pixels
return metadata
def combine_metadata(self, old, new):
"""implements combine_metadata as required by the base class
See the base metadata class docstring for more information.
"""
for key in new:
if key in old:
old[key]['clean_pixels'] += new[key]['clean_pixels']
continue
old[key] = new[key]
return old
def final_metadata_from_dataset(self, dataset):
"""implements final_metadata_from_dataset as required by the base class
See the base metadata class docstring for more information.
"""
self.pixel_count = len(dataset.latitude) * len(dataset.longitude)
self.clean_pixel_count = np.sum(dataset[list(dataset.data_vars)[0]].values != -9999)
self.percentage_clean_pixels = (self.clean_pixel_count / self.pixel_count) * 100
self.save()
def metadata_from_dict(self, metadata_dict):
"""implements metadata_from_dict as required by the base class
See the base metadata class docstring for more information.
"""
dates = list(metadata_dict.keys())
dates.sort(reverse=True)
self.total_scenes = len(dates)
self.scenes_processed = len(dates)
self.acquisition_list = ",".join([date.strftime("%m/%d/%Y") for date in dates])
self.clean_pixels_per_acquisition = ",".join([str(metadata_dict[date]['clean_pixels']) for date in dates])
self.clean_pixel_percentages_per_acquisition = ",".join(
[str((metadata_dict[date]['clean_pixels'] * 100) / self.pixel_count) for date in dates])
self.save()
class Result(BaseResult):
"""
Extends base result, adding additional fields and adding abstract=True
See the dc_algorithm.Result docstring for more information
"""
# result path + other data. More to come.
mosaic_path = models.CharField(max_length=250, default="")
plot_path = models.CharField(max_length=250, default="")
data_path = models.CharField(max_length=250, default="")
data_netcdf_path = models.CharField(max_length=250, default="")
class Meta(BaseResult.Meta):
abstract = True
class BandMathTask(Query, Metadata, Result):
"""
Combines the Query, Metadata, and Result abstract models
"""
pass
| [
"[email protected]"
] | |
175a78136f62e7e999d0cc195755ace57255346f | 93bda31263d66cc557cb084d08c26388cf3d6bd5 | /fluid/image_classification/caffe2fluid/kaffe/custom_layers/__init__.py | 703c6a0a8091df79c73465be8c52248af518f3ca | [
"Apache-2.0"
] | permissive | denglelaibh/models | 323982b172e6aced9b6e99ecdbfe00d98c23be8f | f93838a4258c2a197cfa9e14c244b4da7a042a88 | refs/heads/develop | 2020-03-19T13:53:17.619489 | 2018-06-08T04:43:04 | 2018-06-08T04:43:04 | 126,789,492 | 0 | 0 | Apache-2.0 | 2018-05-16T03:17:38 | 2018-03-26T07:21:18 | Python | UTF-8 | Python | false | false | 2,781 | py | """
"""
from .register import get_registered_layers
#custom layer import begins
import axpy
import flatten
import argmax
import reshape
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params, node=None):
""" set args for function 'f' using the parameters in node.layer.parameters
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
from ..protobuf_to_dict import protobuf_to_dict
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if arg_name in params:
kwargs[arg_name] = params[arg_name]
return arg_list, kwargs
def has_layer(kind):
""" test whether this layer exists in custom layer
"""
return kind in custom_layers
def compute_output_shape(kind, node):
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
shape_func = custom_layers[kind]['shape']
parents = node.parents
inputs = [list(p.output_shape) for p in parents]
arg_names, kwargs = set_args(shape_func, node.params)
if len(inputs) == 1:
inputs = inputs[0]
return shape_func(inputs, **kwargs)
def make_node(template, kind, node):
""" make a PaddleNode for custom layer which means construct
a piece of code to define a layer implemented in 'custom_layers'
Args:
@template (PaddleNode): a factory to new a instance of PaddleNode
@kind (str): type of custom layer
@node (graph.Node): a layer in the net
Returns:
instance of PaddleNode
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
#construct arguments needed by custom layer function from node's parameters
arg_names, kwargs = set_args(layer_func, node.params, node)
return template('custom_layer', kind, **kwargs)
def make_custom_layer(kind, inputs, name, *args, **kwargs):
""" execute a custom layer which is implemented by users
Args:
@kind (str): type name of this layer
@inputs (vars): variable list created by fluid
@namme (str): name for this layer
@args (tuple): other positional arguments
@kwargs (dict): other kv arguments
Returns:
output (var): output variable for this layer
"""
assert kind in custom_layers, "layer[%s] not exist in custom layers" % (
kind)
layer_func = custom_layers[kind]['layer']
return layer_func(inputs, name, *args, **kwargs)
| [
"[email protected]"
] | |
594b9ff615c4528fa5b53c1416302d703b587f1a | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /rMr8yRxS8TeF9pDyn_1.py | a88174ca6c4bb567f7acad1237817c0edd346dcd | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py |
def war_of_numbers(lst):
return abs(sum(n if n%2 else -n for n in lst))
| [
"[email protected]"
] | |
4fdb674302427ea07cec8e247bb16ca0fe677c37 | 8015f1c62a2cb4efd21aa8938336913bf8117868 | /bamap/ba3209.pngMap.py | 747aa05ece22d9fb787c1b47286fdeb6150f43c4 | [] | no_license | GamerNoTitle/Beepers-and-OLED | 675b5e3c179df0f0e27b42bf594c43860d03b9af | afe1340e5394ae96bda5f9022a8a66824368091e | refs/heads/master | 2020-04-20T00:09:47.122471 | 2019-04-29T04:59:35 | 2019-04-29T04:59:35 | 168,515,579 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 8,468 | py | ba3209.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011110',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111000000000011111000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111110000000000111100000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111110000000011110000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111110000000011000000000000',
'00000000000000000000000000000000100100000000000000000000000000000000000000000000000000000000001111111111111000001111000000000000',
'00000000000000000000000000000000111100000000000000000000000000000000000000000000000000000000001111111111111001001110000000000000',
'00000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000011111111111000100100000000000000',
'00000000000000000000000000000000000011000000000000000000000000000000000000000000000000000000000011111111111001100000000000000000',
'00000000000000000000000000000000000011111000000000000000000000000000000000000000000000000000000000001111111111100000000000000000',
'00000000000000000000000000000000000010111001000000000000000000000000000000000000000000000000000000011111111111100000000000000000',
'00000000000000000000000000000000000000101111010000000000000000000000000000000000000000000000000000001111110000000000000000000000',
'00000000000000000000000000000000000000001111110000000000000000000000000000000000000000000000000000111111110000000000000000000000',
'00000000000000000000000000000000000000000011111000100000000000000000000000000000000000000000011111111111110000000000000000000000',
'00000000000000000000000000000000000000000011111111000000000000000000000000000000000000000000111111111111100000000000000000000000',
'00000000000000000000000000000000000000000000001111111111111111111001000101010010011111111111111111111100000000000000000000000000',
'00000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111111000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111110000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111110000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111100000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000101111111111111111111111111111111111100000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000101111111111111111111111111111111111100000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111100000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001011111111111111111111111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111010000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111110010000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111111100000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111111110000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000010111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111110000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111000000000000000000000000000000000',
]
| [
"[email protected]"
] | |
13e7e0e3254aa8534f9f9b8c72d17e9b62aca991 | 1dac4a650f5061bed9574a84cef4bdb87fdc3ebf | /tests/contrib/test_tone_convert.py | f25f8dc1434179aa9d3c1b2f9af4eb1bc7134c1d | [
"MIT"
] | permissive | rontian/python-pinyin | 29cf191ac1812de30a147ffdbd90a74b52ef2c2d | a421a83127ee55cba09ecabbf63d0c1bfb3a3aea | refs/heads/master | 2023-09-04T18:01:13.417107 | 2021-11-14T03:42:51 | 2021-11-14T03:42:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,342 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from pytest import mark
from pypinyin.contrib.tone_convert import (
tone_to_normal,
tone_to_tone2,
tone2_to_tone,
tone_to_tone3,
tone3_to_tone,
tone2_to_normal,
tone2_to_tone3,
tone3_to_tone2,
tone3_to_normal,
to_normal,
to_tone,
to_tone2,
to_tone3,
)
@mark.parametrize('pinyin,result', [
['zhōng', 'zhong'],
['ān', 'an'],
['yuè', 'yue'],
['er', 'er'],
['nǚ', 'nv'],
['nv', 'nv'],
['ā', 'a'],
['a', 'a'],
])
def test_tone_to_normal(pinyin, result):
assert tone_to_normal(pinyin) == result
assert to_normal(pinyin) == result
assert to_normal(result) == result
@mark.parametrize('pinyin,v_to_u,result', [
['nǚ', False, 'nv'],
['nv', False, 'nv'],
['nǚ', True, 'nü'],
['nv', True, 'nü'],
])
def test_tone_to_normal_with_v_to_u(pinyin, v_to_u, result):
assert tone_to_normal(pinyin, v_to_u=v_to_u) == result
assert to_normal(pinyin, v_to_u=v_to_u) == result
@mark.parametrize('pinyin,result', [
['zhōng', 'zho1ng'],
['ān', 'a1n'],
['yuè', 'yue4'],
['er', 'er'],
['nǚ', 'nv3'],
['nv', 'nv'],
['ā', 'a1'],
['a', 'a'],
['shang', 'shang'],
])
def test_tone_tone2(pinyin, result):
assert tone_to_tone2(pinyin) == result
assert to_tone2(pinyin) == result
assert tone2_to_tone(result) == pinyin
assert to_tone(result) == pinyin
assert to_tone(pinyin) == pinyin
assert to_tone2(result) == result
@mark.parametrize('pinyin,neutral_tone_with_5,result', [
['shang', False, 'shang'],
['shang', True, 'sha5ng'],
])
def test_tone_tone2_with_neutral_tone_with_5(
pinyin, neutral_tone_with_5, result):
assert tone_to_tone2(
pinyin, neutral_tone_with_5=neutral_tone_with_5) == result
assert to_tone2(pinyin, neutral_tone_with_5=neutral_tone_with_5) == result
assert tone2_to_tone(result) == pinyin
assert to_tone(result) == pinyin
@mark.parametrize('pinyin,v_to_u,result', [
['nǚ', False, 'nv3'],
['nv', False, 'nv'],
['nǚ', True, 'nü3'],
['nv', True, 'nü'],
])
def test_tone_tone2_with_v_to_u(pinyin, v_to_u, result):
assert tone_to_tone2(pinyin, v_to_u=v_to_u) == result
assert to_tone2(pinyin, v_to_u=v_to_u) == result
assert tone2_to_tone(result) == pinyin
if 'v' not in pinyin:
assert to_tone(result) == pinyin
@mark.parametrize('pinyin,result', [
['zhōng', 'zhong1'],
['ān', 'an1'],
['yuè', 'yue4'],
['er', 'er'],
['nǚ', 'nv3'],
['nv', 'nv'],
['ā', 'a1'],
['a', 'a'],
['shang', 'shang'],
])
def test_tone_tone3(pinyin, result):
assert tone_to_tone3(pinyin) == result
assert to_tone3(pinyin) == result
assert tone3_to_tone(result) == pinyin
assert to_tone(result) == pinyin
assert to_tone(pinyin) == pinyin
assert to_tone3(result) == result
@mark.parametrize('pinyin,neutral_tone_with_5,result', [
['shang', False, 'shang'],
['shang', True, 'shang5'],
])
def test_tone_tone3_with_neutral_tone_with_5(
pinyin, neutral_tone_with_5, result):
assert tone_to_tone3(
pinyin, neutral_tone_with_5=neutral_tone_with_5) == result
assert to_tone3(
pinyin, neutral_tone_with_5=neutral_tone_with_5) == result
assert tone3_to_tone(result) == pinyin
assert to_tone(result) == pinyin
@mark.parametrize('pinyin,v_to_u,result', [
['nǚ', False, 'nv3'],
['nǚ', True, 'nü3'],
['nv', True, 'nü'],
])
def test_tone_tone3_with_v_to_u(pinyin, v_to_u, result):
assert tone_to_tone3(pinyin, v_to_u=v_to_u) == result
assert to_tone3(pinyin, v_to_u=v_to_u) == result
assert tone3_to_tone(result) == pinyin
if 'v' not in pinyin:
assert to_tone(result) == pinyin
@mark.parametrize('pinyin,result', [
['zho1ng', 'zhong1'],
['a1n', 'an1'],
['yue4', 'yue4'],
['er', 'er'],
['nv3', 'nv3'],
['nü3', 'nv3'],
['a1', 'a1'],
['a', 'a'],
['shang', 'shang'],
['sha5ng', 'shang5'],
])
def test_tone2_tone3(pinyin, result):
assert tone2_to_tone3(pinyin) == result
assert to_tone3(pinyin) == result
@mark.parametrize('pinyin,v_to_u,result', [
['lüe3', False, 'lve3'],
['lüe3', True, 'lüe3'],
])
def test_tone2_tone3_with_v_to_u(pinyin, v_to_u, result):
assert tone2_to_tone3(pinyin, v_to_u=v_to_u) == result
@mark.parametrize('pinyin,result', [
['zho1ng', 'zhong'],
['a1n', 'an'],
['yue4', 'yue'],
['er', 'er'],
['nv3', 'nv'],
['nü3', 'nv'],
['a1', 'a'],
['a', 'a'],
['shang', 'shang'],
['sha5ng', 'shang'],
])
def test_tone2_to_normal(pinyin, result):
assert tone2_to_normal(pinyin) == result
assert to_normal(pinyin) == result
assert to_normal(result) == result
@mark.parametrize('pinyin,v_to_u,result', [
['nv3', False, 'nv'],
['nv3', True, 'nü'],
['nü3', False, 'nv'],
['nü3', True, 'nü'],
])
def test_tone2_to_normal_with_v_to_u(pinyin, v_to_u, result):
assert tone2_to_normal(pinyin, v_to_u=v_to_u) == result
assert to_normal(pinyin, v_to_u=v_to_u) == result
assert to_normal(result, v_to_u=v_to_u) == result
@mark.parametrize('pinyin,result', [
['zhong1', 'zhong'],
['an1', 'an'],
['yue4', 'yue'],
['er', 'er'],
['nv3', 'nv'],
['nü3', 'nv'],
['a1', 'a'],
['a', 'a'],
['shang', 'shang'],
['shang5', 'shang'],
])
def test_tone3_to_normal(pinyin, result):
assert tone3_to_normal(pinyin) == result
assert to_normal(pinyin) == result
@mark.parametrize('pinyin,v_to_u,result', [
['nv3', False, 'nv'],
['nv3', True, 'nü'],
['nü3', False, 'nv'],
['nü3', True, 'nü'],
])
def test_tone3_to_normal_with_v_to_u(pinyin, v_to_u, result):
assert tone3_to_normal(pinyin, v_to_u=v_to_u) == result
assert to_normal(pinyin, v_to_u=v_to_u) == result
@mark.parametrize('pinyin,result', [
['zhong1', 'zho1ng'],
['lüe4', 'lve4'],
])
def test_tone3_to_tone2(pinyin, result):
assert tone3_to_tone2(pinyin) == result
@mark.parametrize('pinyin,v_to_u,result', [
['lüe4', False, 'lve4'],
['lüe4', True, 'lüe4'],
])
def test_tone3_to_tone2_with_v_to_u(pinyin, v_to_u, result):
assert tone3_to_tone2(pinyin, v_to_u=v_to_u) == result
| [
"[email protected]"
] | |
207259048ae293c70ae0bac1216f01c829b5909a | 27e07c3175e06a09dd51ed3135d47c7c37295307 | /src/python/clawutil/conversion/setrun_template_amrclaw_2d.py | 69d3bbe7de2a26ae5f792c5f29de25bd4184def0 | [] | no_license | ahmadia/clawutil | 21d5037cbeafaaaa946a02de5b91214a44baac00 | 502a63b28807641a2eca57a05aa00b0b93b428b3 | refs/heads/master | 2021-01-16T22:36:19.096584 | 2013-02-21T20:57:06 | 2013-02-21T20:57:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,718 | py | """
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import os
import numpy as np
#------------------------------
def setrun(claw_pkg='amrclaw'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "amrclaw" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
from clawpack.clawutil import clawdata
assert claw_pkg.lower() == 'amrclaw', "Expected claw_pkg = 'amrclaw'"
num_dim = 2
rundata = clawdata.ClawRunData(claw_pkg, num_dim)
#------------------------------------------------------------------
# Problem-specific parameters to be written to setprob.data:
#------------------------------------------------------------------
# Sample setup to write one line to setprob.data ...
#probdata = rundata.new_UserData(name='probdata',fname='setprob.data')
#probdata.add_param('u', 0.5, 'ubar advection velocity')
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amrclaw.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Set single grid parameters first.
# See below for AMR parameters.
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.num_dim = num_dim
# Lower and upper edge of computational domain:
clawdata.lower[0] = {xlower:e} # xlower
clawdata.upper[0] = {xupper:e} # xupper
clawdata.lower[1] = {ylower:e} # ylower
clawdata.upper[1] = {yupper:e} # yupper
# Number of grid cells:
clawdata.num_cells[0] = {mx:d} # mx
clawdata.num_cells[1] = {my:d} # my
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.num_eqn = {num_eqn:d}
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.num_aux = {num_aux:d}
# Index of aux array corresponding to capacity function, if there is one:
clawdata.capa_index = {capa_index:d}
# -------------
# Initial time:
# -------------
clawdata.t0 = {t0:f}
# Restart from checkpoint file of a previous run?
# Note: If restarting, you must also change the Makefile to set:
# RESTART = True
# If restarting, t0 above should be from original run, and the
# restart_file 'fort.chkNNNNN' specified below should be in
# the OUTDIR indicated in Makefile.
clawdata.restart = False # True to restart from prior results
clawdata.restart_file = 'fort.chk00006' # File to use for restart data
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
clawdata.output_style = 1
if clawdata.output_style==1:
# Output ntimes frames at equally spaced times up to tfinal:
# Can specify num_output_times = 0 for no output
clawdata.num_output_times = {num_output_times:d}
clawdata.tfinal = {tfinal:f}
clawdata.output_t0 = True # output at initial (or restart) time?
elif clawdata.output_style == 2:
# Specify a list or numpy array of output times:
# Include t0 if you want output at the initial time.
clawdata.output_times = [0., 0.1]
elif clawdata.output_style == 3:
# Output every step_interval timesteps over total_steps timesteps:
clawdata.output_step_interval = 2
clawdata.total_steps = 4
clawdata.output_t0 = True # output at initial (or restart) time?
clawdata.output_format == 'ascii' # 'ascii' or 'netcdf'
clawdata.output_q_components = 'all' # could be list such as [True,True]
clawdata.output_aux_components = 'none' # could be list
clawdata.output_aux_onlyonce = True # output aux arrays only at t0
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 0
# --------------
# Time stepping:
# --------------
# if dt_variable==True: variable time steps used based on cfl_desired,
# if dt_variable==False: fixed time steps dt = dt_initial always used.
clawdata.dt_variable = {dt_variable:s}
# Initial time step for variable dt.
# (If dt_variable==0 then dt=dt_initial for all steps)
clawdata.dt_initial = {dt_initial:e}
# Max time step to be allowed if variable dt used:
clawdata.dt_max = {dt_max:e}
# Desired Courant number if variable dt used
clawdata.cfl_desired = {cfl_desired:f}
# max Courant number to allow without retaking step with a smaller dt:
clawdata.cfl_max = {cfl_max:f}
# Maximum number of time steps to allow between output times:
clawdata.steps_max = {steps_max:d}
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = {order:d}
# Use dimensional splitting? (not yet available for AMR)
clawdata.dimensional_split = 'unsplit'
# For unsplit method, transverse_waves can be
# 0 or 'none' ==> donor cell (only normal solver used)
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.transverse_waves = {transverse_waves:d}
# Number of waves in the Riemann solution:
clawdata.num_waves = {num_waves:d}
# List of limiters to use for each wave family:
# Required: len(limiter) == num_waves
# Some options:
# 0 or 'none' ==> no limiter (Lax-Wendroff)
# 1 or 'minmod' ==> minmod
# 2 or 'superbee' ==> superbee
# 3 or 'mc' ==> MC limiter
# 4 or 'vanleer' ==> van Leer
clawdata.limiter = {limiter:s}
clawdata.use_fwaves = False # True ==> use f-wave version of algorithms
# Source terms splitting:
# src_split == 0 or 'none' ==> no source term (src routine never called)
# src_split == 1 or 'godunov' ==> Godunov (1st order) splitting used,
# src_split == 2 or 'strang' ==> Strang (2nd order) splitting used, not recommended.
clawdata.source_split = {source_split:d}
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.num_ghost = {num_ghost:d}
# Choice of BCs at xlower and xupper:
# 0 or 'user' => user specified (must modify bcNamr.f to use this option)
# 1 or 'extrap' => extrapolation (non-reflecting outflow)
# 2 or 'periodic' => periodic (must specify this at both boundaries)
# 3 or 'wall' => solid wall for systems where q(2) is normal velocity
clawdata.bc_lower[0] = {mthbc_xlower:s} # at xlower
clawdata.bc_upper[0] = {mthbc_xupper:s} # at xupper
clawdata.bc_lower[1] = {mthbc_ylower:s} # at ylower
clawdata.bc_upper[1] = {mthbc_yupper:s} # at yupper
# ---------------
# Gauges:
# ---------------
clawdata.gauges = {gauges:s}
# for gauges append lines of the form [gaugeno, x, y, t1, t2]
# ---------------
# AMR parameters:
# ---------------
# max number of refinement levels:
clawdata.amr_levels_max = {amr_levels_max:d}
# List of refinement ratios at each level (length at least amr_level_max-1)
clawdata.refinement_ratios_x = {refinement_ratios_x:s}
clawdata.refinement_ratios_y = {refinement_ratios_y:s}
clawdata.refinement_ratios_t = {refinement_ratios_t:s}
# Specify type of each aux variable in clawdata.auxtype.
# This must be a list of length num_aux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
clawdata.aux_type = {aux_type:s}
# Flag for refinement based on Richardson error estimater:
clawdata.flag_richardson = {flag_richardson:s} # use Richardson?
clawdata.flag_richardson_tol = {flag_richardson_tol:e} # Richardson tolerance
# Flag for refinement using routine flag2refine:
clawdata.flag2refine = {flag2refine:s} # use this?
clawdata.flag2refine_tol = {flag2refine_tol:e} # tolerance used in this routine
# User can modify flag2refine to change the criterion for flagging.
# Default: check maximum absolute difference of first component of q
# between a cell and each of its neighbors.
# steps to take on each level L between regriddings of level L+1:
clawdata.regrid_interval = {regrid_interval:d}
# width of buffer zone around flagged points:
# (typically the same as regrid_interval so waves don't escape):
clawdata.regrid_buffer_width = {regrid_buffer_width:d}
# clustering alg. cutoff for (# flagged pts) / (total # of cells refined)
# (closer to 1.0 => more small grids may be needed to cover flagged cells)
clawdata.clustering_cutoff = {clustering_cutoff:f}
# print info about each regridding up to this level:
clawdata.verbosity_regrid = 0
# ---------------
# Regions:
# ---------------
clawdata.regions = {regions:s}
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
# --------------
# Checkpointing:
# --------------
# Specify when checkpoint files should be created that can be
# used to restart a computation.
clawdata.checkpt_style = 1
if clawdata.checkpt_style == 0:
# Do not checkpoint at all
pass
elif clawdata.checkpt_style == 1:
# Checkpoint only at tfinal.
pass
elif clawdata.checkpt_style == 2:
# Specify a list of checkpoint times.
clawdata.checkpt_times = [0.1,0.15]
elif clawdata.checkpt_style == 3:
# Checkpoint every checkpt_interval timesteps (on Level 1)
# and at the final time.
clawdata.checkpt_interval = 5
# ----- For developers -----
# Toggle debugging print statements:
clawdata.dprint = False # print domain flags
clawdata.eprint = False # print err est flags
clawdata.edebug = False # even more err est flags
clawdata.gprint = False # grid bisection/clustering
clawdata.nprint = False # proper nesting output
clawdata.pprint = False # proj. of tagged points
clawdata.rprint = False # print regridding summary
clawdata.sprint = False # space/memory output
clawdata.tprint = False # time step reporting each level
clawdata.uprint = False # update/upbnd reporting
return rundata
# end of function setrun
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
rundata = setrun(*sys.argv[1:])
rundata.write()
| [
"rjl@ned"
] | rjl@ned |
2713cbac7261c36dc7f85e151ef0d25eb08925bf | 60d737103373825b858e67292865bda8c6f2094f | /active/theses-riogrande.py | 0781b43ee13bfe2d8da1cbb43eb58928f2b60efa | [] | no_license | fschwenn/ejlmod | fbf4692b857f9f056f9105a7f616a256725f03b6 | ef17512c2e44baa0164fdc6abc997c70ed3d2a74 | refs/heads/master | 2023-01-24T18:56:35.581517 | 2023-01-20T11:18:16 | 2023-01-20T11:18:16 | 91,459,496 | 1 | 1 | null | 2021-10-04T11:58:15 | 2017-05-16T13:06:57 | Python | UTF-8 | Python | false | false | 6,015 | py | # -*- coding: utf-8 -*-
#harvest theses from Rio Grande do Sul U.
#FS: 2020-10-08
import getopt
import sys
import os
import urllib2
import urlparse
from bs4 import BeautifulSoup
import re
import ejlmod2
import codecs
import datetime
import time
import json
xmldir = '/afs/desy.de/user/l/library/inspire/ejl'#+'/special'
retfiles_path = "/afs/desy.de/user/l/library/proc/retinspire/retfiles"#+'_special'
now = datetime.datetime.now()
stampoftoday = '%4d-%02d-%02d' % (now.year, now.month, now.day)
jnlfilename = 'THESES-RioGrandeDoSul-%s' % (stampoftoday)
publisher = 'Rio Grande do Sul U.'
hdr = {'User-Agent' : 'Magic Browser'}
rpp = 50
pages = 4
boringdegrees = ['mestrado']
prerecs = []
for (depnr, department) in [('46', 'Physics'), ('48', 'Mathematics'), ('49', 'Applied Mathematics'), ('43', 'Computation')]:
for page in range(pages):
tocurl = 'https://lume.ufrgs.br/handle/10183/' + depnr +'/discover?rpp=' + str(rpp) + '&etal=0&group_by=none&page=' + str(page+1) + '&sort_by=dc.date.issued_dt&order=desc'
print '==={ %s }==={ %i/%i }==={ %s }===' % (department, page+1, pages, tocurl)
req = urllib2.Request(tocurl, headers=hdr)
tocpage = BeautifulSoup(urllib2.urlopen(req), features="lxml")
for div in tocpage.body.find_all('div', attrs = {'class' : 'artifact-description'}):
new = True
rec = {'tc' : 'T', 'jnl' : 'BOOK', 'note' : [department], 'keyw' : [], 'supervisor' : []}
for span in div.find_all('span', attrs = {'class' : 'date'}):
if re.search('[12]\d\d\d', span.text):
rec['year'] = re.sub('.*([12]\d\d\d).*', r'\1', span.text.strip())
if int(rec['year']) < now.year - 2:
new = False
print ' skip', rec['year']
if depnr in ['46', '48']:
rec['fc'] = 'm'
elif depnr == '43':
rec['fc'] = 'c'
if new:
for a in div.find_all('a'):
if re.search('handle', a['href']):
rec['artlink'] = 'https://lume.ufrgs.br' + a['href'] + '?show=full'
rec['hdl'] = re.sub('.*handle\/', '', a['href'])
prerecs.append(rec)
time.sleep(2)
i = 0
recs = []
for rec in prerecs:
keepit = True
i += 1
print '---{ %i/%i (%i) }---{ %s }------' % (i, len(prerecs), len(recs), rec['artlink'])
try:
artpage = BeautifulSoup(urllib2.build_opener(urllib2.HTTPCookieProcessor).open(rec['artlink']), features="lxml")
time.sleep(3)
except:
try:
print "retry %s in 180 seconds" % (rec['artlink'])
time.sleep(180)
artpage = BeautifulSoup(urllib2.build_opener(urllib2.HTTPCookieProcessor).open(rec['artlink']), features="lxml")
except:
print "no access to %s" % (rec['artlink'])
continue
for meta in artpage.head.find_all('meta'):
if meta.has_attr('name'):
#author
if meta['name'] == 'DC.creator':
author = meta['content']
rec['autaff'] = [[ author ]]
rec['autaff'][-1].append(publisher)
#title
elif meta['name'] == 'citation_title':
rec['tit'] = meta['content']
#date
elif meta['name'] == 'DCTERMS.issued':
rec['date'] = meta['content']
#abstract
elif meta['name'] == 'DCTERMS.abstract':
if meta['content']:
if meta.has_attr('xml:lang'):
if meta['xml:lang'] == 'en':
rec['abs'] = meta['content']
elif meta['xml:lang'] == 'pt':
rec['abspt'] = meta['content']
else:
rec['abs'] = meta['content']
#FFT
elif meta['name'] == 'citation_pdf_url':
rec['FFT'] = meta['content']
#keywords
elif meta['name'] == 'citation_keywords':
for keyw in re.split('[,;] ', meta['content']):
if not re.search('^info.eu.repo', keyw):
rec['keyw'].append(keyw)
#abstract
if 'abspt' in rec.keys() and not 'abs' in rec.keys():
rec['abs'] = rec['abspt']
for tr in artpage.body.find_all('tr', attrs = {'class' : 'ds-table-row'}):
for td in tr.find_all('td', attrs = {'class' : 'label-cell'}):
tdt = td.text.strip()
td.decompose()
for td in tr.find_all('td'):
if td.text.strip() == 'pt_BR':
continue
#supervisor
if tdt == 'dc.contributor.advisor':
rec['supervisor'] = [[ re.sub(' \(.*', '', td.text.strip()) ]]
#degree
elif tdt == 'dc.degree.level':
degree = td.text.strip()
if degree in boringdegrees:
print ' skip "%s"' % (degree)
keepit = False
else:
rec['note'].append(degree)
#language
elif tdt == 'dc.language.iso':
if td.text.strip() == 'por':
rec['language'] = 'portuguese'
for a in artpage.body.find_all('a'):
if a.has_attr('href') and re.search('creativecommons.org', a['href']):
rec['license'] = {'url' : a['href']}
if keepit:
recs.append(rec)
print ' ', rec.keys()
#closing of files and printing
xmlf = os.path.join(xmldir, jnlfilename+'.xml')
xmlfile = codecs.EncodedFile(codecs.open(xmlf, mode='wb'),'utf8')
ejlmod2.writenewXML(recs, xmlfile, publisher, jnlfilename)
xmlfile.close()
#retrival
retfiles_text = open(retfiles_path, "r").read()
line = jnlfilename+'.xml'+ "\n"
if not line in retfiles_text:
retfiles = open(retfiles_path, "a")
retfiles.write(line)
retfiles.close()
| [
"[email protected]"
] | |
990eee06dfeaf47b24859f2490bc130d5f365b43 | a3c68eafdb433c981f2b90e86f895e4f121c69fb | /笔试/腾讯/安排任务.py | 7774742e90b1ce6c0ce1f601472364aa5efae663 | [] | no_license | Cassiexyq/Program-Exercise | ccc236ea76ca99ddc6fe0c4c47edebc3d557cfad | e962cc3add047d61df275dd3e22a091018fd964a | refs/heads/master | 2020-04-25T20:27:33.561226 | 2019-09-22T15:29:35 | 2019-09-22T15:29:35 | 173,050,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | # -*- coding: utf-8 -*-
# @Author: xyq
n,m = [int(i) for i in input().strip().split()]
machine,task = [],[]
for i in range(n):
hours, level = [int(i) for i in input().strip().split()]
machine.append([hours, level])
for i in range(m):
hours,level = [int(i) for i in input().strip().split()]
task.append([hours,level])
machine.sort(key=lambda x:(x[0],x[1]),reverse=True)
task.sort(key=lambda x:(x[0],x[1]),reverse=True)
dp = [0 for i in range(101)]
j,cnt, res = 0,0,0
# 遍历每个任务,把满足时间的任务对应的难度级别+1
# 遍历大于任务难度级别的,找到符合难度级别的
for h, l in task:
while j < len(machine) and machine[j][0] >= h:
dp[machine[j][1]] += 1
j += 1
for i in range(l,101):
if dp[i] > 0 :
dp[i] -= 1
res += 200 * h + 3 * l
cnt += 1
break
print("%d %d" % (cnt, res))
| [
"[email protected]"
] | |
ad88264e1c80fd051137abcd95d7cc5ca5c19016 | 4ca7480c27ed98fd9c49ac11911f6c1229e53631 | /main.py | e3a55d8bf6eb0bdc2a30bedbaf60580310a6ed74 | [] | no_license | shawwn/tfimg | 33ba7b2fc7fa60fb41ba1bd4c76065170c40c3f8 | 0d464f56f1092996d90870f765d6a1d875f09b4f | refs/heads/master | 2023-03-05T09:47:53.394086 | 2023-02-18T13:58:48 | 2023-02-18T13:58:48 | 274,115,458 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,821 | py | import tensorflow as tf
import functools
def op_scope(fn, name=None):
if name is None:
name = fn.__name__
@functools.wraps(fn)
def _fn(*args, **kwargs):
with tf.name_scope(fn.__name__):
return fn(*args, **kwargs)
return _fn
@op_scope
def clamp(v, min=0., max=1.):
if anum(v):
return np.clip(v, min, max)
else:
return tf.clip_by_value(v, min, max)
@op_scope
def wrap(uv, wrap_mode="reflect"):
assert wrap_mode in ["clamp", "wrap", "reflect"]
if wrap_mode == "wrap":
return tf.math.floormod(uv, 1.0)
elif wrap_mode == "reflect":
return 1.0 - tf.abs(tf.math.floormod(uv, 2.0) - 1.0)
elif wrap_mode == "clamp":
return clamp(uv)
def aten(u):
return tf.is_tensor(u)
def anum(u):
return isinstance(u, float) or isinstance(u, int)
@op_scope
def iround(u):
if anum(u):
return u // 1.0
else:
return i32(tf.math.floordiv(f32(u), 1.0))
@op_scope
def lsh(u, by):
if anum(u) and anum(by):
return int(u) << by
else:
return tf.bitwise.left_shift(u, by)
@op_scope
def rsh(u, by):
if anum(u) and anum(by):
return int(u) >> by
else:
return tf.bitwise.right_shift(u, by)
import numpy as np
@op_scope
def sign(u):
if anum(u):
return np.sign(u)
else:
return tf.sign(u)
@op_scope
def min2(a, b):
if anum(a) and anum(b):
return min(a, b)
else:
return tf.minimum(a, b)
@op_scope
def max2(a, b):
if anum(a) and anum(b):
return max(a, b)
else:
return tf.maximum(a, b)
@op_scope
def min3(a, b, c):
if anum(a) and anum(b) and anum(c):
return min(a, b, c)
else:
return tf.minimum(a, tf.minimum(b, c))
@op_scope
def max3(a, b, c):
if anum(a) and anum(b) and anum(c):
return max(a, b, c)
else:
return tf.maximum(a, tf.maximum(b, c))
@op_scope
def min4(a, b, c, d):
if anum(a) and anum(b) and anum(c) and anum(d):
return min(a, b, c, d)
else:
return tf.minimum(a, tf.minimum(b, tf.minimum(c, d)))
@op_scope
def max4(a, b, c, d):
if anum(a) and anum(b) and anum(c) and anum(d):
return max(a, b, c, d)
else:
return tf.maximum(a, tf.maximum(b, tf.maximum(c, d)))
@op_scope
def f32(u):
if isinstance(u, (tuple, list)):
return tuple(f32(v) for v in u)
if anum(u):
return float(u)
else:
return tf.cast(u, tf.float32)
@op_scope
def i32(u):
if isinstance(u, (tuple, list)):
return tuple(i32(v) for v in u)
if anum(u):
return int(u)
else:
return tf.cast(u, tf.int32)
@op_scope
def u8(u):
if isinstance(u, (tuple, list)):
return tuple(u8(v) for v in u)
if anum(u):
return np.asarray(u).astype(np.uint8)
else:
return tf.cast(u, tf.uint8)
def arglist(*args):
if len(args) >= 1 and isinstance(args[0], (list, tuple)):
return tuple(args[0]) + args[1:]
return args
def unlist(args):
args = arglist(*args)
if len(args) == 1:
return args[0]
return args
@op_scope
def vzip(*xs):
return tf.stack(arglist(*xs), axis=-1)
@op_scope
def vunzip(uv, keepdims=False):
xs = tf.split(uv, np.shape(uv)[-1], -1)
if not keepdims:
xs = [tf.squeeze(x, -1) for x in xs]
return tuple(xs)
def lerp(a, b, t):
return (b - a) * t + a
def vspan(*dims):
dims = arglist(*dims)
return unlist(tf.range(0.0, n) / n for n in f32(iround(dims)))
def vmesh(*spans):
grids = tf.meshgrid(*spans, indexing='xy')
return vzip(grids)
def vgrid(*dims):
spans = vspan(*dims)
return vmesh(*spans)
def vshape(x):
if hasattr(x, 'shape'):
return np.shape(x)
return x
def bounds(img):
"""returns width and height"""
shape = vshape(img)
if len(shape) > 2:
shape = shape[0:-1]
return list(shape)
def channels(img):
shape = vshape(img)
assert len(shape) > 2
return shape[-1]
def area(shape):
return np.prod(bounds(shape))
def grab(src, u, v):
IH, IW = bounds(src)
u = clamp(iround(f32(u) + 0.5), 0, IW - 1)
v = clamp(iround(f32(v) + 0.5), 0, IH - 1)
inds = vzip(v, u)
out = tf.raw_ops.GatherNd(params=src, indices=tf.reshape(inds, (-1, inds.shape[-1])))
return tf.reshape(out, bounds(inds) + [channels(src)])
@op_scope
def sample(tex, uv, method="bilinear", wrap_mode="reflect"):
assert method in ["nearest", "bilinear", "area"]
if isinstance(uv, (list, tuple)):
uv = vzip(uv)
IH, IW = bounds(tex)
d_uv = 1.0 / vzip(f32(bounds(uv)))
uv = wrap(uv, wrap_mode)
ix, iy = vunzip(uv)
# normalize ix, iy from [0, 1] to [0, H-1] & [0, W-1]
ix = ix * (IW-1)
iy = iy * (IH-1)
if method == "nearest":
return grab(tex, ix, iy)
elif method == "bilinear":
# https://github.com/pytorch/pytorch/blob/f064c5aa33483061a48994608d890b968ae53fb5/aten/src/THNN/generic/SpatialGridSamplerBilinear.c#L105
# get NE, NW, SE, SW pixel values from (x, y)
ix_nw = iround(ix)
iy_nw = iround(iy)
ix_ne = ix_nw + 1
iy_ne = iy_nw
ix_sw = ix_nw
iy_sw = iy_nw + 1
ix_se = ix_nw + 1
iy_se = iy_nw + 1
# get surfaces to each neighbor:
sub = lambda a, b: f32(a) - f32(b)
nw = sub(ix_se , ix) * sub(iy_se , iy);
ne = sub(ix , ix_sw) * sub(iy_sw , iy);
sw = sub(ix_ne , ix) * sub(iy , iy_ne);
se = sub(ix , ix_nw) * sub(iy , iy_nw);
nw_val = grab(tex, ix_nw, iy_nw)
ne_val = grab(tex, ix_ne, iy_ne)
sw_val = grab(tex, ix_sw, iy_sw)
se_val = grab(tex, ix_se, iy_se)
def mul(a, da):
return f32(a) * tf.expand_dims(da, -1)
out = mul(nw_val, nw)
out += mul(ne_val, ne)
out += mul(sw_val, sw)
out += mul(se_val, se)
return out
else:
u_0, v_0 = vunzip(uv)
u_1, v_1 = vunzip(uv + d_uv)
# summed area table.
# if uvs are flipped, result is negative, so take abs
img_sum = tf.cumsum(tf.cumsum(f32(img), 0), 1) / area(img)
out_00 = sample(img_sum, (u_0, v_0), "bilinear", wrap_mode=wrap_mode)
out_01 = sample(img_sum, (u_0, v_1), "bilinear", wrap_mode=wrap_mode)
out_10 = sample(img_sum, (u_1, v_0), "bilinear", wrap_mode=wrap_mode)
out_11 = sample(img_sum, (u_1, v_1), "bilinear", wrap_mode=wrap_mode)
out = abs(out_00 + out_11 - out_10 - out_01) * area(uv)
return out
def readwrite(filename, mode, data=None):
if filename == '-':
if mode.startswith('r'):
f = sys.stdin.buffer if 'b' in mode else sys.stdin
return f.read()
else:
f = sys.stdout.buffer if 'b' in mode else sys.stdout
return f.write(data)
else:
try:
from smart_open import open
except ImportError:
from builtins import open
with open(filename, mode) as f:
if mode.startswith('r'):
return f.read()
else:
return f.write(data)
if __name__ == "__main__":
import sys
args = sys.argv[1:]
indata = readwrite(args[0], 'rb')
img = tf.io.decode_image(indata, channels=3)
IW, IH, *IC = np.shape(img)
outfile = args[1]
w = '64' if len(args) <= 2 else args[2]
h = '0' if len(args) <= 3 else args[3]
if w.endswith('%'): w = (float(w[:-1])/100 * IW)
if h.endswith('%'): h = (float(h[:-1])/100 * IH)
w = int(w)
h = int(h)
method = ("bilinear" if w >= IW and (h <= 0 or h >= IH) else "area") if len(args) <= 4 else args[4]
wrap_mode = "reflect" if len(args) <= 5 else args[5]
u_sx, u_sy = (1.0, 1.0) if len(args) <= 6 else [float(x) for x in args[6].split(',')]
u_tx, u_ty = (0.0, 0.0) if len(args) <= 7 else [float(x) for x in args[7].split(',')]
if w <= 0: w = h / (IW/IH)
if h <= 0: h = w * (IW/IH)
w *= u_sx
h *= u_sy
uv = vgrid(w, h)
uv = uv * vzip( u_sx, u_sy)
uv = uv + vzip( u_tx, -u_ty)
img2 = sample(img, uv, method=method, wrap_mode=wrap_mode)
img2 = u8(clamp(img2, 0, 255))
if args[1] == '-' and sys.stdout.isatty():
import imgcat
imgcat.imgcat(img2.numpy())
else:
data = tf.image.encode_png(img2).numpy()
readwrite(args[1], 'wb', data)
| [
"[email protected]"
] | |
e21782c742d83cd1fd0c8e9c99a1ccce363fc50a | d14be9a07437f395c36c73aebfd1c5434ff4300e | /vmware_static_dhcp/hosts_file.py | ea682e177a5b8fc9e3ef5432174408c596cf3e8a | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | zcutlip/vmware-static-dhcp | f6122cdb7ca0fcd895c536d3a23e2469bfceaedc | 4f7747703bca8f440c56407c5e1437cfe9ff8cba | refs/heads/master | 2020-09-08T11:13:41.192702 | 2019-11-12T03:11:03 | 2019-11-12T03:11:03 | 221,117,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,573 | py | class MalformedHostsEntryException(Exception):
pass
class MalformedHostsFileException(Exception):
pass
class HostsEntry:
ADDR_COLUMN_WIDTH = 24
FOUR_SPACES = " "
def __init__(self, addr, hostname, comment):
self._sanity_check(addr, hostname)
self.address = addr
self.hostname = hostname
self.comment = comment
self.blank_line = self._is_blank_line(addr, hostname, comment)
@classmethod
def parse_hostline(cls, hostline):
addr = None
hostname = None
comment = None
if hostline.startswith("#"):
comment = hostline
else:
parts = hostline.split(maxsplit=2)
if len(parts):
addr = parts.pop(0)
if len(parts):
hostname = parts.pop(0)
if len(parts):
comment = parts.pop(0)
return (addr, hostname, comment)
def _sanity_check(self, addr, hostname):
addr_hostname = [addr, hostname]
if None in addr_hostname:
if [None, None] != addr_hostname:
raise MalformedHostsEntryException(
"Malformed address/hostname pair: {}".format(str(addr_hostname)))
def _is_blank_line(self, addr, hostname, comment):
return [None, None, None] == [addr, hostname, comment]
def __str__(self):
_str = None
if [self.address, self.hostname] == [None, None] and self.comment is not None:
_str = self.comment
elif self.blank_line:
_str = ""
else:
fmt = "{:%ds}" + self.FOUR_SPACES + "{:s}"
fmt = fmt % self.ADDR_COLUMN_WIDTH
_str = fmt.format(self.address, self.hostname)
if self.comment is not None:
_str += self.FOUR_SPACES + self.comment
return _str
class HostsFile:
def __init__(self, path="/etc/hosts"):
hosts, address_map = self._parse_hosts_file(path)
self.hosts = hosts
self.address_map = address_map
def _parse_hosts_file(self, path):
entries = []
address_map = {}
lines = open(path, "r").read().splitlines()
for line in lines:
addr, hostname, comment = HostsEntry.parse_hostline(line)
entry = HostsEntry(addr, hostname, comment)
if entry.address is not None:
if entry.address in address_map:
raise MalformedHostsFileException("Duplicate address: {}".format(line))
else:
address_map[entry.address] = entry
entries.append(entry)
return (entries, address_map)
def add_host_entry(self, addr, hostname, comment):
if addr in self.address_map:
raise MalformedHostsEntryException("Address already in hosts file: {}".format(addr))
entry = HostsEntry(addr, hostname, comment)
self.address_map[addr] = entry
self.hosts.append(entry)
def remove_host_entry(self, addr):
if addr not in self.address_map:
raise Exception("Address not found: {}".format(addr))
entry = self.address_map[addr]
self.hosts.remove(entry)
def write_hosts(self, outpath):
print("Writing updated hosts file to {}".format(outpath))
with open(outpath, "w") as outhosts:
for entry in self.hosts:
outhosts.write("{}\n".format(str(entry)))
if __name__ == "__main__":
hosts = HostsFile(path="/etc/hosts")
for host in hosts.hosts:
print(str(host))
| [
"[email protected]"
] | |
975c2da1189e6e45391dd92d380c8e1342d46cca | abf31091690614f7949c1ddc50ef36d4a46658f8 | /Old Sspider/Gomine_DOC_utf/zhuan_weiyi_run.py | ee3258e8cd083db50a9d680df0af86d3f3e605bd | [] | no_license | SuperShen9/work_by_sh | 7132196b0b40ab88c4baaac6bb4813babc2f3dcb | 60a3693eb040b2f86165bfa439f3a718f38bae44 | refs/heads/master | 2021-01-20T01:50:34.795903 | 2018-06-19T06:14:11 | 2018-06-19T06:14:11 | 89,328,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,154 | py | # -*- coding: utf-8 -*-
# author:Super
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import pandas as pd
import time,os,shutil
import codecs
pd.set_option('expand_frame_repr',False)
os.chdir('C:\\Users\Administrator\Desktop')
# #openpyxl模块
# wb = openpyxl.load_workbook('sheet1.xlsx')
# sheet = wb.get_sheet_by_name('Sheet1')
# print sheet['A2'].value
# print type(sheet['A2'].value.encode("gbk"))
# exit()
# # 查看text文件
# file=open('text.txt')
# lines=file.readlines()
# print lines
# for i in lines:
# print i
# exit()
df=pd.read_excel('sheet1.xlsx')
if os.path.exists('RUN'):
shutil.rmtree('RUN')
os.makedirs('C:\\Users\Administrator\Desktop\\RUN')
os.chdir('C:\\Users\Administrator\Desktop\\RUN')
# df.shape[0]
for i in range(df.shape[0]):
count=0
for x in df.columns:
count+=1
val = df[x].loc[i]
if isinstance(val, float):
val = ''
else:
val = val.encode('gbk')
fl = open('%s-%s-%s.txt' % (df['name'].loc[i],df['organization'].loc[i],df['webName'].loc[i]), 'a')
if count<=len(df.columns)-4:
if x == 'webName':
fl.write('{')
fl.write('\r"webUrl": "http://www.chictr.org.cn/searchproj.aspx",')
fl.write('\r"{}": "{}",'.format(x, str(val)))
fl.write("")
elif x == 'remark':
fl.write('\r"remark":"",')
else:
fl.write('\r"{}": "{}",'.format(x, str(val)))
elif count==len(df.columns)-3:
fl.write('\r"info": {')
fl.write('\r"name": "{}",'.format(str(val)))
elif count==len(df.columns)-1:
if val[-1]=='0':
fl.write('\r"{}": "{}",'.format(x, str(val)[:4]))
else:
fl.write('\r"{}": "{}",'.format(x, str(val)[4:]))
elif count == len(df.columns):
fl.write('\r"{}": "{}"'.format(x, str(val)))
fl.write('}')
fl.write('}')
else:
fl.write('\r"{}": "{}",'.format(x, str(val)))
| [
"[email protected]"
] | |
ddcdbe45016ee01b23f44ed1f9d03021eae00fb4 | dc30b6ecea0a1ad2342244871817a5882f506fda | /Tentamen/Opdracht1.py | 5247492fd3260ea011617607a84e6974c049e3cd | [] | no_license | CasperHagenaars/WISB256 | 26e4b272a80d687b22ce7fd48a6040e45e152b85 | d0871a2221f71fe64d7aba4efcd3a1e276c22f34 | refs/heads/master | 2020-05-20T17:03:27.182900 | 2015-06-18T12:43:33 | 2015-06-18T12:43:33 | 34,313,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | import math
import time
tekst = input()
lijn = "Ug"
try:
if(int(tekst) > 1):
for i in range(1,int(tekst)):
lijn += " ug"
lijn += "!"
lijn = str(lijn)
else:
lijn = "Ug!"
except:
lengte = (len(tekst) / 3)
lijn = int(lengte)
print(lijn) | [
"[email protected]"
] | |
732064d7c77be799ca1907b7c19d461d4303dbf1 | 543fc91aa311e39b3119a509b8151c31c1bfdb27 | /code/BalancedBinaryTree.py | ea7ddfe29074ce64631957588924d753eee6795d | [] | no_license | shinrain/leetcode-python | 399879ea8ebffdc73575c897228e33d7a62825de | 5497f496fb6b87b387f0d2eb316d152446dfc8cc | refs/heads/master | 2021-01-01T06:00:23.517122 | 2015-01-03T07:15:26 | 2015-01-03T07:15:26 | 26,843,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a boolean
def isBalanced(self, root):
return helper(root)!=-1
def helper(root):
if root is None:
return 0
left = helper(root.left)
right = helper(root.right)
if left==-1 or right==-1 or abs(left-right)>1:
return -1
return max(left, right)+1
| [
"[email protected]"
] | |
22c37ae2c3ab66a66b668cd84fcf3b548b17736a | bf94a22b20a81d0557488357bc1ceaebb9742d50 | /bc/utils/dask_grenoble.py | a1cc516402f4df15c1e25d5cfebb650b3645782f | [
"MIT"
] | permissive | ikalevatykh/rlbc | ccbcb73034d0f9b638b26abbe6d97479b3494903 | feb31bf5e8442335ce4b4a06a7171b1f64afe9b5 | refs/heads/master | 2022-03-05T04:03:36.053834 | 2019-11-05T10:04:05 | 2019-11-05T10:04:05 | 261,451,513 | 2 | 0 | MIT | 2020-05-05T11:58:14 | 2020-05-05T11:58:13 | null | UTF-8 | Python | false | false | 2,318 | py | import os
import datetime
from dask_jobqueue import OARCluster
class AlpesCluster(OARCluster):
def __init__(
self,
cores,
name,
processes=1,
mem_req=4000,
walltime='72:00:00',
venv=None,
to_source='~/.bashrc',
log_dir='/home/apashevi/Logs/dask/',
spill_dir='/home/apashevi/Logs/dask/',
env_extra=None,
besteffort=False,
job_extra=None,
interface_node=None,
extra='',
**kwargs):
if name == 'dask-cpu':
resource_spec = 'nodes=1/core={}'.format(cores)
elif name == 'dask-gpu':
resource_spec = None
else:
raise NotImplementedError
name += '_' + datetime.datetime.now().strftime('%Y%m%dT%H%M%S')
os.path.join(log_dir, 'logs')
if besteffort:
if job_extra is None:
job_extra = []
job_extra += [' -t besteffort -t idempotent']
job_extra += [
'--stdout={}'.format(os.path.join(log_dir, '%jobid%_stdout.txt'))
]
job_extra += [
'--stderr={}'.format(os.path.join(log_dir, '%jobid%_stderr.txt'))
]
OARCluster.__init__(
self,
resource_spec=resource_spec,
walltime=walltime,
name=name,
cores=cores,
processes=processes,
memory='{}m'.format(mem_req),
local_directory=spill_dir,
extra=extra,
env_extra=env_extra,
job_extra=job_extra,
interface_node=interface_node,
**kwargs)
class CPUCluster(AlpesCluster):
def __init__(self, ncpus=1, **kwargs):
cores = ncpus
AlpesCluster.__init__(self, cores=cores, name='dask-cpu', **kwargs)
class GPUCluster(AlpesCluster):
def __init__(self, **kwargs):
job_extra = [
'-p \'not host=\'\"\'\"\'gpuhost23\'\"\'\"\' and not host=\'\"\'\"\'gpuhost24\'\"\'\"\' and not host=\'\"\'\"\'gpuhost25\'\"\'\"\' and not host=\'\"\'\"\'gpuhost26\'\"\'\"\' and not host=\'\"\'\"\'gpuhost27\'\"\'\"\'\''
]
AlpesCluster.__init__(
self, cores=1, name='dask-gpu', job_extra=job_extra, **kwargs)
| [
"[email protected]"
] | |
b6e513d9b67fab154e3291d6d8ba74ca9d7dc038 | e3ec5f1898ae491fa0afcdcc154fb306fd694f83 | /src/components/typeSpec/buildSupportedTypeTable.py | 87539e6cdfab95db01ac91e1a94ab0de06baf74b | [
"CC-BY-4.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | phoebezhung/raytk | 42397559a76a9ba39308ac03344b4446f64ea04d | b91483ce88b2956d7b23717b11e223d332ca8395 | refs/heads/master | 2023-08-27T05:20:38.062360 | 2021-10-21T04:33:18 | 2021-10-21T04:33:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60 | py | def onCook(dat):
mod.typeSpec.buildSupportedTypeTable(dat)
| [
"[email protected]"
] | |
241ef0bf4ffd075de96f0a553cfb8c7cb1005833 | 78c3b9ca4d10b84ea9ec853da39324ba38ad224e | /commentonx/__init__.py | ebff6b27414a0c3e8e9c9028f145047909b976d3 | [
"MIT"
] | permissive | trickeydan/commentonx | 2a892c47b1a535b23bc8ef044e14b4ef58839135 | b7dbfd6af0f58503acba576033a43be3aabbb0e9 | refs/heads/master | 2022-12-10T06:34:16.528489 | 2018-09-29T23:20:57 | 2018-09-29T23:20:57 | 150,874,538 | 0 | 1 | MIT | 2021-03-20T00:10:25 | 2018-09-29T14:56:32 | HTML | UTF-8 | Python | false | false | 441 | py | from flask import Flask
from flask_scss import Scss
from commentonx.config import config
app = Flask(__name__)
app.config.update(config)
if 'VIEW_CONFIG' in app.config:
# Allow view config access in templates
app.jinja_env.globals['VIEW_CONFIG'] = app.config['VIEW_CONFIG']
else:
app.jinja_env.globals['VIEW_CONFIG'] = {}
app.secret_key = app.config["SESSION_KEY"]
Scss(app)
from commentonx import views # noqa F401, E402
| [
"[email protected]"
] | |
1531be40d6bc6d91bd6b92fa06158a74798f9ea7 | 4f75ac595ef58c8e42b2a711180bbb1832b30aad | /articles/data_python_c/listings_py/mod_immut_parameter.py | bf5a3b308c0b2ebfc75a520857e06e99d15589ed | [] | no_license | amitsaha/notes | 4548554a164a4a423795c3103f3462f3fea9b28b | 6fffe2529d390418616eff71d9b44afb40474278 | refs/heads/master | 2021-05-04T10:56:05.266501 | 2018-01-26T07:11:59 | 2018-01-26T07:11:59 | 8,493,168 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | #!/usr/bin/env python
""" Passing immutable data objects
and returning a modified version.
"""
from __future__ import print_function
def func(astr):
print('In func() before modification')
print('{0} : {1}'.format(astr,id(astr)))
print()
astr = astr.replace('a','b')
print('In func() after modification')
print('{0} : {1}'.format(astr,id(astr)))
print()
# return the new string
return astr
if __name__ == '__main__':
s = str('a string')
print('Before func()')
print('{0} : {1}'.format(s,id(s)))
print()
# since s is an immutbale object, modifications
# are not possible without creating a new object
# with the modified string
# recieve the modified string back as the
# return value
s = func(s)
print('After func()')
print('{0} : {1}'.format(s,id(s)))
print()
| [
"[email protected]"
] | |
f0be020b789a57fa3ec5391f4ac6e442fe06921d | 26d37aa0560ecc5725b17e965e16d528bce161ff | /schedule/migrations/0016_course_course_active.py | ab457d8c32e3d849dc5154823ba04405c584b5c8 | [] | no_license | dmic23/hga | 25dfaa177f788b309eeb84a77d3ac126d092bac1 | aea67fdb8e5baad1206ecca93aadbea427b7af28 | refs/heads/master | 2021-01-21T14:23:16.402218 | 2017-10-23T18:07:10 | 2017-10-23T18:07:10 | 57,150,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-07-01 07:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0015_auto_20160630_0333'),
]
operations = [
migrations.AddField(
model_name='course',
name='course_active',
field=models.BooleanField(default=True),
),
]
| [
"[email protected]"
] | |
fef2048a4983d5ccdfe2eb7d26d44112fc13d2fa | 96aac6f1b81a06faf9cf4ca5b30c1ae84223adb4 | /pytorch3d/transforms/transform3d.py | b3bc2f445f19daa530ad2ea3e05a03219b8571f0 | [
"BSD-3-Clause"
] | permissive | rmaries/pytorch3d | 91dc4fc8b70cfdc910ec230c0d4e0405e1b7f41e | 33390b36a2f2e76034ae9569e740a222f63e38a5 | refs/heads/master | 2021-02-18T05:11:04.819684 | 2020-08-28T08:59:52 | 2020-08-28T08:59:52 | 245,164,246 | 0 | 0 | NOASSERTION | 2020-08-28T08:59:53 | 2020-03-05T13:00:30 | null | UTF-8 | Python | false | false | 24,955 | py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import math
import warnings
from typing import Optional
import torch
from .rotation_conversions import _axis_angle_rotation
class Transform3d:
"""
A Transform3d object encapsulates a batch of N 3D transformations, and knows
how to transform points and normal vectors. Suppose that t is a Transform3d;
then we can do the following:
.. code-block:: python
N = len(t)
points = torch.randn(N, P, 3)
normals = torch.randn(N, P, 3)
points_transformed = t.transform_points(points) # => (N, P, 3)
normals_transformed = t.transform_normals(normals) # => (N, P, 3)
BROADCASTING
Transform3d objects supports broadcasting. Suppose that t1 and tN are
Transform3D objects with len(t1) == 1 and len(tN) == N respectively. Then we
can broadcast transforms like this:
.. code-block:: python
t1.transform_points(torch.randn(P, 3)) # => (P, 3)
t1.transform_points(torch.randn(1, P, 3)) # => (1, P, 3)
t1.transform_points(torch.randn(M, P, 3)) # => (M, P, 3)
tN.transform_points(torch.randn(P, 3)) # => (N, P, 3)
tN.transform_points(torch.randn(1, P, 3)) # => (N, P, 3)
COMBINING TRANSFORMS
Transform3d objects can be combined in two ways: composing and stacking.
Composing is function composition. Given Transform3d objects t1, t2, t3,
the following all compute the same thing:
.. code-block:: python
y1 = t3.transform_points(t2.transform_points(t1.transform_points(x)))
y2 = t1.compose(t2).compose(t3).transform_points(x)
y3 = t1.compose(t2, t3).transform_points(x)
Composing transforms should broadcast.
.. code-block:: python
if len(t1) == 1 and len(t2) == N, then len(t1.compose(t2)) == N.
We can also stack a sequence of Transform3d objects, which represents
composition along the batch dimension; then the following should compute the
same thing.
.. code-block:: python
N, M = len(tN), len(tM)
xN = torch.randn(N, P, 3)
xM = torch.randn(M, P, 3)
y1 = torch.cat([tN.transform_points(xN), tM.transform_points(xM)], dim=0)
y2 = tN.stack(tM).transform_points(torch.cat([xN, xM], dim=0))
BUILDING TRANSFORMS
We provide convenience methods for easily building Transform3d objects
as compositions of basic transforms.
.. code-block:: python
# Scale by 0.5, then translate by (1, 2, 3)
t1 = Transform3d().scale(0.5).translate(1, 2, 3)
# Scale each axis by a different amount, then translate, then scale
t2 = Transform3d().scale(1, 3, 3).translate(2, 3, 1).scale(2.0)
t3 = t1.compose(t2)
tN = t1.stack(t3, t3)
BACKPROP THROUGH TRANSFORMS
When building transforms, we can also parameterize them by Torch tensors;
in this case we can backprop through the construction and application of
Transform objects, so they could be learned via gradient descent or
predicted by a neural network.
.. code-block:: python
s1_params = torch.randn(N, requires_grad=True)
t_params = torch.randn(N, 3, requires_grad=True)
s2_params = torch.randn(N, 3, requires_grad=True)
t = Transform3d().scale(s1_params).translate(t_params).scale(s2_params)
x = torch.randn(N, 3)
y = t.transform_points(x)
loss = compute_loss(y)
loss.backward()
with torch.no_grad():
s1_params -= lr * s1_params.grad
t_params -= lr * t_params.grad
s2_params -= lr * s2_params.grad
CONVENTIONS
We adopt a right-hand coordinate system, meaning that rotation about an axis
with a positive angle results in a counter clockwise rotation.
This class assumes that transformations are applied on inputs which
are row vectors. The internal representation of the Nx4x4 transformation
matrix is of the form:
.. code-block:: python
M = [
[Rxx, Ryx, Rzx, 0],
[Rxy, Ryy, Rzy, 0],
[Rxz, Ryz, Rzz, 0],
[Tx, Ty, Tz, 1],
]
To apply the transformation to points which are row vectors, the M matrix
can be pre multiplied by the points:
.. code-block:: python
points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point
transformed_points = points * M
"""
def __init__(
self,
dtype: torch.dtype = torch.float32,
device="cpu",
matrix: Optional[torch.Tensor] = None,
):
"""
Args:
dtype: The data type of the transformation matrix.
to be used if `matrix = None`.
device: The device for storing the implemented transformation.
If `matrix != None`, uses the device of input `matrix`.
matrix: A tensor of shape (4, 4) or of shape (minibatch, 4, 4)
representing the 4x4 3D transformation matrix.
If `None`, initializes with identity using
the specified `device` and `dtype`.
"""
if matrix is None:
self._matrix = torch.eye(4, dtype=dtype, device=device).view(1, 4, 4)
else:
# pyre-fixme[16]: `Tensor` has no attribute `ndim`.
if matrix.ndim not in (2, 3):
raise ValueError('"matrix" has to be a 2- or a 3-dimensional tensor.')
if matrix.shape[-2] != 4 or matrix.shape[-1] != 4:
raise ValueError(
'"matrix" has to be a tensor of shape (minibatch, 4, 4)'
)
# set the device from matrix
device = matrix.device
self._matrix = matrix.view(-1, 4, 4)
self._transforms = [] # store transforms to compose
self._lu = None
self.device = device
def __len__(self):
return self.get_matrix().shape[0]
def compose(self, *others):
"""
Return a new Transform3d with the tranforms to compose stored as
an internal list.
Args:
*others: Any number of Transform3d objects
Returns:
A new Transform3d with the stored transforms
"""
out = Transform3d(device=self.device)
out._matrix = self._matrix.clone()
for other in others:
if not isinstance(other, Transform3d):
msg = "Only possible to compose Transform3d objects; got %s"
raise ValueError(msg % type(other))
out._transforms = self._transforms + list(others)
return out
def get_matrix(self):
"""
Return a matrix which is the result of composing this transform
with others stored in self.transforms. Where necessary transforms
are broadcast against each other.
For example, if self.transforms contains transforms t1, t2, and t3, and
given a set of points x, the following should be true:
.. code-block:: python
y1 = t1.compose(t2, t3).transform(x)
y2 = t3.transform(t2.transform(t1.transform(x)))
y1.get_matrix() == y2.get_matrix()
Returns:
A transformation matrix representing the composed inputs.
"""
composed_matrix = self._matrix.clone()
if len(self._transforms) > 0:
for other in self._transforms:
other_matrix = other.get_matrix()
composed_matrix = _broadcast_bmm(composed_matrix, other_matrix)
return composed_matrix
def _get_matrix_inverse(self):
"""
Return the inverse of self._matrix.
"""
return torch.inverse(self._matrix)
def inverse(self, invert_composed: bool = False):
"""
Returns a new Transform3D object that represents an inverse of the
current transformation.
Args:
invert_composed:
- True: First compose the list of stored transformations
and then apply inverse to the result. This is
potentially slower for classes of transformations
with inverses that can be computed efficiently
(e.g. rotations and translations).
- False: Invert the individual stored transformations
independently without composing them.
Returns:
A new Transform3D object contaning the inverse of the original
transformation.
"""
tinv = Transform3d(device=self.device)
if invert_composed:
# first compose then invert
tinv._matrix = torch.inverse(self.get_matrix())
else:
# self._get_matrix_inverse() implements efficient inverse
# of self._matrix
i_matrix = self._get_matrix_inverse()
# 2 cases:
if len(self._transforms) > 0:
# a) Either we have a non-empty list of transforms:
# Here we take self._matrix and append its inverse at the
# end of the reverted _transforms list. After composing
# the transformations with get_matrix(), this correctly
# right-multiplies by the inverse of self._matrix
# at the end of the composition.
tinv._transforms = [t.inverse() for t in reversed(self._transforms)]
last = Transform3d(device=self.device)
last._matrix = i_matrix
tinv._transforms.append(last)
else:
# b) Or there are no stored transformations
# we just set inverted matrix
tinv._matrix = i_matrix
return tinv
def stack(self, *others):
transforms = [self] + list(others)
matrix = torch.cat([t._matrix for t in transforms], dim=0)
out = Transform3d()
out._matrix = matrix
return out
def transform_points(self, points, eps: Optional[float] = None):
"""
Use this transform to transform a set of 3D points. Assumes row major
ordering of the input points.
Args:
points: Tensor of shape (P, 3) or (N, P, 3)
eps: If eps!=None, the argument is used to clamp the
last coordinate before peforming the final division.
The clamping corresponds to:
last_coord := (last_coord.sign() + (last_coord==0)) *
torch.clamp(last_coord.abs(), eps),
i.e. the last coordinates that are exactly 0 will
be clamped to +eps.
Returns:
points_out: points of shape (N, P, 3) or (P, 3) depending
on the dimensions of the transform
"""
points_batch = points.clone()
if points_batch.dim() == 2:
points_batch = points_batch[None] # (P, 3) -> (1, P, 3)
if points_batch.dim() != 3:
msg = "Expected points to have dim = 2 or dim = 3: got shape %r"
raise ValueError(msg % repr(points.shape))
N, P, _3 = points_batch.shape
ones = torch.ones(N, P, 1, dtype=points.dtype, device=points.device)
points_batch = torch.cat([points_batch, ones], dim=2)
composed_matrix = self.get_matrix()
points_out = _broadcast_bmm(points_batch, composed_matrix)
denom = points_out[..., 3:] # denominator
if eps is not None:
denom_sign = denom.sign() + (denom == 0.0).type_as(denom)
denom = denom_sign * torch.clamp(denom.abs(), eps)
points_out = points_out[..., :3] / denom
# When transform is (1, 4, 4) and points is (P, 3) return
# points_out of shape (P, 3)
if points_out.shape[0] == 1 and points.dim() == 2:
points_out = points_out.reshape(points.shape)
return points_out
def transform_normals(self, normals):
"""
Use this transform to transform a set of normal vectors.
Args:
normals: Tensor of shape (P, 3) or (N, P, 3)
Returns:
normals_out: Tensor of shape (P, 3) or (N, P, 3) depending
on the dimensions of the transform
"""
if normals.dim() not in [2, 3]:
msg = "Expected normals to have dim = 2 or dim = 3: got shape %r"
raise ValueError(msg % (normals.shape,))
composed_matrix = self.get_matrix()
# TODO: inverse is bad! Solve a linear system instead
mat = composed_matrix[:, :3, :3]
normals_out = _broadcast_bmm(normals, mat.transpose(1, 2).inverse())
# This doesn't pass unit tests. TODO investigate further
# if self._lu is None:
# self._lu = self._matrix[:, :3, :3].transpose(1, 2).lu()
# normals_out = normals.lu_solve(*self._lu)
# When transform is (1, 4, 4) and normals is (P, 3) return
# normals_out of shape (P, 3)
if normals_out.shape[0] == 1 and normals.dim() == 2:
normals_out = normals_out.reshape(normals.shape)
return normals_out
def translate(self, *args, **kwargs):
return self.compose(Translate(device=self.device, *args, **kwargs))
def scale(self, *args, **kwargs):
return self.compose(Scale(device=self.device, *args, **kwargs))
def rotate_axis_angle(self, *args, **kwargs):
return self.compose(RotateAxisAngle(device=self.device, *args, **kwargs))
def clone(self):
"""
Deep copy of Transforms object. All internal tensors are cloned
individually.
Returns:
new Transforms object.
"""
other = Transform3d(device=self.device)
if self._lu is not None:
other._lu = [elem.clone() for elem in self._lu]
other._matrix = self._matrix.clone()
other._transforms = [t.clone() for t in self._transforms]
return other
def to(self, device, copy: bool = False, dtype=None):
"""
Match functionality of torch.Tensor.to()
If copy = True or the self Tensor is on a different device, the
returned tensor is a copy of self with the desired torch.device.
If copy = False and the self Tensor already has the correct torch.device,
then self is returned.
Args:
device: Device id for the new tensor.
copy: Boolean indicator whether or not to clone self. Default False.
dtype: If not None, casts the internal tensor variables
to a given torch.dtype.
Returns:
Transform3d object.
"""
if not copy and self.device == device:
return self
other = self.clone()
if self.device != device:
other.device = device
other._matrix = self._matrix.to(device=device, dtype=dtype)
for t in other._transforms:
t.to(device, copy=copy, dtype=dtype)
return other
def cpu(self):
return self.to(torch.device("cpu"))
def cuda(self):
return self.to(torch.device("cuda"))
class Translate(Transform3d):
def __init__(self, x, y=None, z=None, dtype=torch.float32, device: str = "cpu"):
"""
Create a new Transform3d representing 3D translations.
Option I: Translate(xyz, dtype=torch.float32, device='cpu')
xyz should be a tensor of shape (N, 3)
Option II: Translate(x, y, z, dtype=torch.float32, device='cpu')
Here x, y, and z will be broadcast against each other and
concatenated to form the translation. Each can be:
- A python scalar
- A torch scalar
- A 1D torch tensor
"""
super().__init__(device=device)
xyz = _handle_input(x, y, z, dtype, device, "Translate")
N = xyz.shape[0]
mat = torch.eye(4, dtype=dtype, device=device)
mat = mat.view(1, 4, 4).repeat(N, 1, 1)
mat[:, 3, :3] = xyz
self._matrix = mat
def _get_matrix_inverse(self):
"""
Return the inverse of self._matrix.
"""
inv_mask = self._matrix.new_ones([1, 4, 4])
inv_mask[0, 3, :3] = -1.0
i_matrix = self._matrix * inv_mask
return i_matrix
class Scale(Transform3d):
def __init__(self, x, y=None, z=None, dtype=torch.float32, device: str = "cpu"):
"""
A Transform3d representing a scaling operation, with different scale
factors along each coordinate axis.
Option I: Scale(s, dtype=torch.float32, device='cpu')
s can be one of
- Python scalar or torch scalar: Single uniform scale
- 1D torch tensor of shape (N,): A batch of uniform scale
- 2D torch tensor of shape (N, 3): Scale differently along each axis
Option II: Scale(x, y, z, dtype=torch.float32, device='cpu')
Each of x, y, and z can be one of
- python scalar
- torch scalar
- 1D torch tensor
"""
super().__init__(device=device)
xyz = _handle_input(x, y, z, dtype, device, "scale", allow_singleton=True)
N = xyz.shape[0]
# TODO: Can we do this all in one go somehow?
mat = torch.eye(4, dtype=dtype, device=device)
mat = mat.view(1, 4, 4).repeat(N, 1, 1)
mat[:, 0, 0] = xyz[:, 0]
mat[:, 1, 1] = xyz[:, 1]
mat[:, 2, 2] = xyz[:, 2]
self._matrix = mat
def _get_matrix_inverse(self):
"""
Return the inverse of self._matrix.
"""
xyz = torch.stack([self._matrix[:, i, i] for i in range(4)], dim=1)
ixyz = 1.0 / xyz
imat = torch.diag_embed(ixyz, dim1=1, dim2=2)
return imat
class Rotate(Transform3d):
def __init__(
self, R, dtype=torch.float32, device: str = "cpu", orthogonal_tol: float = 1e-5
):
"""
Create a new Transform3d representing 3D rotation using a rotation
matrix as the input.
Args:
R: a tensor of shape (3, 3) or (N, 3, 3)
orthogonal_tol: tolerance for the test of the orthogonality of R
"""
super().__init__(device=device)
if R.dim() == 2:
R = R[None]
if R.shape[-2:] != (3, 3):
msg = "R must have shape (3, 3) or (N, 3, 3); got %s"
raise ValueError(msg % repr(R.shape))
R = R.to(dtype=dtype).to(device=device)
_check_valid_rotation_matrix(R, tol=orthogonal_tol)
N = R.shape[0]
mat = torch.eye(4, dtype=dtype, device=device)
mat = mat.view(1, 4, 4).repeat(N, 1, 1)
mat[:, :3, :3] = R
self._matrix = mat
def _get_matrix_inverse(self):
"""
Return the inverse of self._matrix.
"""
return self._matrix.permute(0, 2, 1).contiguous()
class RotateAxisAngle(Rotate):
def __init__(
self,
angle,
axis: str = "X",
degrees: bool = True,
dtype=torch.float64,
device: str = "cpu",
):
"""
Create a new Transform3d representing 3D rotation about an axis
by an angle.
Assuming a right-hand coordinate system, positive rotation angles result
in a counter clockwise rotation.
Args:
angle:
- A torch tensor of shape (N,)
- A python scalar
- A torch scalar
axis:
string: one of ["X", "Y", "Z"] indicating the axis about which
to rotate.
NOTE: All batch elements are rotated about the same axis.
"""
axis = axis.upper()
if axis not in ["X", "Y", "Z"]:
msg = "Expected axis to be one of ['X', 'Y', 'Z']; got %s"
raise ValueError(msg % axis)
angle = _handle_angle_input(angle, dtype, device, "RotateAxisAngle")
angle = (angle / 180.0 * math.pi) if degrees else angle
# We assume the points on which this transformation will be applied
# are row vectors. The rotation matrix returned from _axis_angle_rotation
# is for transforming column vectors. Therefore we transpose this matrix.
# R will always be of shape (N, 3, 3)
R = _axis_angle_rotation(axis, angle).transpose(1, 2)
super().__init__(device=device, R=R)
def _handle_coord(c, dtype, device):
"""
Helper function for _handle_input.
Args:
c: Python scalar, torch scalar, or 1D torch tensor
Returns:
c_vec: 1D torch tensor
"""
if not torch.is_tensor(c):
c = torch.tensor(c, dtype=dtype, device=device)
if c.dim() == 0:
c = c.view(1)
return c
def _handle_input(x, y, z, dtype, device, name: str, allow_singleton: bool = False):
"""
Helper function to handle parsing logic for building transforms. The output
is always a tensor of shape (N, 3), but there are several types of allowed
input.
Case I: Single Matrix
In this case x is a tensor of shape (N, 3), and y and z are None. Here just
return x.
Case II: Vectors and Scalars
In this case each of x, y, and z can be one of the following
- Python scalar
- Torch scalar
- Torch tensor of shape (N, 1) or (1, 1)
In this case x, y and z are broadcast to tensors of shape (N, 1)
and concatenated to a tensor of shape (N, 3)
Case III: Singleton (only if allow_singleton=True)
In this case y and z are None, and x can be one of the following:
- Python scalar
- Torch scalar
- Torch tensor of shape (N, 1) or (1, 1)
Here x will be duplicated 3 times, and we return a tensor of shape (N, 3)
Returns:
xyz: Tensor of shape (N, 3)
"""
# If x is actually a tensor of shape (N, 3) then just return it
if torch.is_tensor(x) and x.dim() == 2:
if x.shape[1] != 3:
msg = "Expected tensor of shape (N, 3); got %r (in %s)"
raise ValueError(msg % (x.shape, name))
if y is not None or z is not None:
msg = "Expected y and z to be None (in %s)" % name
raise ValueError(msg)
return x
if allow_singleton and y is None and z is None:
y = x
z = x
# Convert all to 1D tensors
xyz = [_handle_coord(c, dtype, device) for c in [x, y, z]]
# Broadcast and concatenate
sizes = [c.shape[0] for c in xyz]
N = max(sizes)
for c in xyz:
if c.shape[0] != 1 and c.shape[0] != N:
msg = "Got non-broadcastable sizes %r (in %s)" % (sizes, name)
raise ValueError(msg)
xyz = [c.expand(N) for c in xyz]
xyz = torch.stack(xyz, dim=1)
return xyz
def _handle_angle_input(x, dtype, device: str, name: str):
"""
Helper function for building a rotation function using angles.
The output is always of shape (N,).
The input can be one of:
- Torch tensor of shape (N,)
- Python scalar
- Torch scalar
"""
if torch.is_tensor(x) and x.dim() > 1:
msg = "Expected tensor of shape (N,); got %r (in %s)"
raise ValueError(msg % (x.shape, name))
else:
return _handle_coord(x, dtype, device)
def _broadcast_bmm(a, b):
"""
Batch multiply two matrices and broadcast if necessary.
Args:
a: torch tensor of shape (P, K) or (M, P, K)
b: torch tensor of shape (N, K, K)
Returns:
a and b broadcast multipled. The output batch dimension is max(N, M).
To broadcast transforms across a batch dimension if M != N then
expect that either M = 1 or N = 1. The tensor with batch dimension 1 is
expanded to have shape N or M.
"""
if a.dim() == 2:
a = a[None]
if len(a) != len(b):
if not ((len(a) == 1) or (len(b) == 1)):
msg = "Expected batch dim for bmm to be equal or 1; got %r, %r"
raise ValueError(msg % (a.shape, b.shape))
if len(a) == 1:
a = a.expand(len(b), -1, -1)
if len(b) == 1:
b = b.expand(len(a), -1, -1)
return a.bmm(b)
def _check_valid_rotation_matrix(R, tol: float = 1e-7):
"""
Determine if R is a valid rotation matrix by checking it satisfies the
following conditions:
``RR^T = I and det(R) = 1``
Args:
R: an (N, 3, 3) matrix
Returns:
None
Emits a warning if R is an invalid rotation matrix.
"""
N = R.shape[0]
eye = torch.eye(3, dtype=R.dtype, device=R.device)
eye = eye.view(1, 3, 3).expand(N, -1, -1)
orthogonal = torch.allclose(R.bmm(R.transpose(1, 2)), eye, atol=tol)
det_R = torch.det(R)
no_distortion = torch.allclose(det_R, torch.ones_like(det_R))
if not (orthogonal and no_distortion):
msg = "R is not a valid rotation matrix"
warnings.warn(msg)
return
| [
"[email protected]"
] | |
8e2fe8f139ca859c953ff1c57cb43a25d9d8b472 | a479a5773fd5607f96c3b84fed57733fe39c3dbb | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs_/tag64/__init__.py | d8c353855f42c909c46d6f43473dde4e55eae14c | [
"Apache-2.0"
] | permissive | napalm-automation/napalm-yang | 839c711e9294745534f5fbbe115e0100b645dbca | 9148e015b086ebe311c07deb92e168ea36fd7771 | refs/heads/develop | 2021-01-11T07:17:20.226734 | 2019-05-15T08:43:03 | 2019-05-15T08:43:03 | 69,226,025 | 65 | 64 | Apache-2.0 | 2019-05-15T08:43:24 | 2016-09-26T07:48:42 | Python | UTF-8 | Python | false | false | 12,230 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class tag64(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/subTLVs/subTLVs/tag64. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines sub-TLV 2.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "tag64"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv6-reachability",
"prefixes",
"prefix",
"subTLVs",
"subTLVs",
"tag64",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs/tag64/state (container)
YANG Description: State parameters of sub-TLV 2.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs/tag64/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of sub-TLV 2.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
from . import state
class tag64(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/subTLVs/subTLVs/tag64. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines sub-TLV 2.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "tag64"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv6-reachability",
"prefixes",
"prefix",
"subTLVs",
"subTLVs",
"tag64",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs/tag64/state (container)
YANG Description: State parameters of sub-TLV 2.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs/tag64/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of sub-TLV 2.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
| [
"[email protected]"
] | |
882fd9151d05de09700e7726253804eba88370c1 | 4c424214344a3d8cc6fe0978f462410597619e71 | /archer_apps/accounts/serializers.py | 156d880a23db7748bbae53fdcc9297944bcc0f08 | [] | no_license | SergeZazik/Archer-Test | 65e903b7859c4f9a852d0a88c31a2222f0aebd43 | 6c858f401b95f52551fbf8096cdb17507da399cf | refs/heads/master | 2020-04-08T12:17:49.941503 | 2018-11-27T15:33:09 | 2018-11-27T15:33:09 | 159,340,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,308 | py | from .models import CustomUser
from django.contrib.auth import authenticate
from rest_framework import serializers
from rest_framework_jwt.serializers import JSONWebTokenSerializer
class CustomJSONWebTokenSerializer(JSONWebTokenSerializer):
def validate(self, attrs):
credentials = {
self.username_field: attrs.get(self.username_field),
'password': attrs.get('password')
}
if all(credentials.values()):
user = authenticate(**credentials)
if user:
if not user.is_active:
msg = 'User account is disabled.'
raise serializers.ValidationError(msg)
return {
'token': user.jwt_token,
'user': user.id
}
else:
msg = 'Unable to log in with provided credentials.'
raise serializers.ValidationError(msg)
else:
msg = 'Must include "{username_field}" and "password".'
msg = msg.format(username_field=self.username_field)
raise serializers.ValidationError(msg)
class CustomUserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
exclude = ('groups', 'user_permissions', 'last_login')
| [
"[email protected]"
] | |
9d35cc4ff98d0ade4747b6614d96798f75ee21ff | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-drs/huaweicloudsdkdrs/v3/model/user_role_vo.py | b2e30bea30f52f1267a762acaee3d4e2c5cb897b | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,203 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UserRoleVO:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'role': 'str',
'comment': 'str',
'is_transfer': 'bool',
'privileges': 'str',
'inherits_roles': 'list[str]',
'selected': 'bool'
}
attribute_map = {
'role': 'role',
'comment': 'comment',
'is_transfer': 'is_transfer',
'privileges': 'privileges',
'inherits_roles': 'inherits_roles',
'selected': 'selected'
}
def __init__(self, role=None, comment=None, is_transfer=None, privileges=None, inherits_roles=None, selected=None):
"""UserRoleVO - a model defined in huaweicloud sdk"""
self._role = None
self._comment = None
self._is_transfer = None
self._privileges = None
self._inherits_roles = None
self._selected = None
self.discriminator = None
self.role = role
if comment is not None:
self.comment = comment
self.is_transfer = is_transfer
self.privileges = privileges
if inherits_roles is not None:
self.inherits_roles = inherits_roles
if selected is not None:
self.selected = selected
@property
def role(self):
"""Gets the role of this UserRoleVO.
角色
:return: The role of this UserRoleVO.
:rtype: str
"""
return self._role
@role.setter
def role(self, role):
"""Sets the role of this UserRoleVO.
角色
:param role: The role of this UserRoleVO.
:type: str
"""
self._role = role
@property
def comment(self):
"""Gets the comment of this UserRoleVO.
说明
:return: The comment of this UserRoleVO.
:rtype: str
"""
return self._comment
@comment.setter
def comment(self, comment):
"""Sets the comment of this UserRoleVO.
说明
:param comment: The comment of this UserRoleVO.
:type: str
"""
self._comment = comment
@property
def is_transfer(self):
"""Gets the is_transfer of this UserRoleVO.
是否支持迁移。
:return: The is_transfer of this UserRoleVO.
:rtype: bool
"""
return self._is_transfer
@is_transfer.setter
def is_transfer(self, is_transfer):
"""Sets the is_transfer of this UserRoleVO.
是否支持迁移。
:param is_transfer: The is_transfer of this UserRoleVO.
:type: bool
"""
self._is_transfer = is_transfer
@property
def privileges(self):
"""Gets the privileges of this UserRoleVO.
权限
:return: The privileges of this UserRoleVO.
:rtype: str
"""
return self._privileges
@privileges.setter
def privileges(self, privileges):
"""Sets the privileges of this UserRoleVO.
权限
:param privileges: The privileges of this UserRoleVO.
:type: str
"""
self._privileges = privileges
@property
def inherits_roles(self):
"""Gets the inherits_roles of this UserRoleVO.
继承角色列表
:return: The inherits_roles of this UserRoleVO.
:rtype: list[str]
"""
return self._inherits_roles
@inherits_roles.setter
def inherits_roles(self, inherits_roles):
"""Sets the inherits_roles of this UserRoleVO.
继承角色列表
:param inherits_roles: The inherits_roles of this UserRoleVO.
:type: list[str]
"""
self._inherits_roles = inherits_roles
@property
def selected(self):
"""Gets the selected of this UserRoleVO.
是否选择。
:return: The selected of this UserRoleVO.
:rtype: bool
"""
return self._selected
@selected.setter
def selected(self, selected):
"""Sets the selected of this UserRoleVO.
是否选择。
:param selected: The selected of this UserRoleVO.
:type: bool
"""
self._selected = selected
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UserRoleVO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
c780bb18ed86e9e63411028da9535cea55bfe611 | 0607fa7255fa47608407b3cfd819e83d55ba9eab | /InvenTree/part/test_part.py | 84d9900aff2a8c4914aec16079bbfd4198715ff4 | [
"MIT"
] | permissive | IsThisNameGoodEnough/InvenTree | f7d71aa8c33f69654b2bb4d3827d4a60290df8ad | fa789036e0ae7d56ced3c9e1f2d2ff596983a365 | refs/heads/master | 2020-07-26T02:31:34.316571 | 2019-09-13T14:14:45 | 2019-09-13T14:14:45 | 208,505,299 | 0 | 0 | MIT | 2019-09-14T21:20:24 | 2019-09-14T21:20:24 | null | UTF-8 | Python | false | false | 2,536 | py | # Tests for the Part model
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
import os
from .models import Part
from .models import rename_part_image, match_part_names
from .templatetags import inventree_extras
class TemplateTagTest(TestCase):
""" Tests for the custom template tag code """
def test_multiply(self):
self.assertEqual(inventree_extras.multiply(3, 5), 15)
def test_version(self):
self.assertEqual(type(inventree_extras.inventree_version()), str)
def test_hash(self):
hash = inventree_extras.inventree_commit()
self.assertEqual(len(hash), 7)
def test_github(self):
self.assertIn('github.com', inventree_extras.inventree_github())
class PartTest(TestCase):
""" Tests for the Part model """
fixtures = [
'category',
'part',
'location',
]
def setUp(self):
self.R1 = Part.objects.get(name='R_2K2_0805')
self.R2 = Part.objects.get(name='R_4K7_0603')
self.C1 = Part.objects.get(name='C_22N_0805')
def test_str(self):
p = Part.objects.get(pk=100)
self.assertEqual(str(p), "BOB | Bob | A2 - Can we build it?")
def test_metadata(self):
self.assertEqual(self.R1.name, 'R_2K2_0805')
self.assertEqual(self.R1.get_absolute_url(), '/part/3/')
def test_category(self):
self.assertEqual(str(self.C1.category), 'Electronics/Capacitors - Capacitors')
orphan = Part.objects.get(name='Orphan')
self.assertIsNone(orphan.category)
self.assertEqual(orphan.category_path, '')
def test_rename_img(self):
img = rename_part_image(self.R1, 'hello.png')
self.assertEqual(img, os.path.join('part_images', 'part_3_img.png'))
img = rename_part_image(self.R2, 'test')
self.assertEqual(img, os.path.join('part_images', 'part_4_img'))
def test_stock(self):
# No stock of any resistors
res = Part.objects.filter(description__contains='resistor')
for r in res:
self.assertEqual(r.total_stock, 0)
self.assertEqual(r.available_stock, 0)
def test_barcode(self):
barcode = self.R1.format_barcode()
self.assertIn('InvenTree', barcode)
self.assertIn(self.R1.name, barcode)
def test_copy(self):
self.R2.deepCopy(self.R1, image=True, bom=True)
def test_match_names(self):
matches = match_part_names('M2x5 LPHS')
self.assertTrue(len(matches) > 0)
| [
"[email protected]"
] | |
10e80f5b24cd38ea993eb8e06b1e35bb38dbfead | 4505ae4b6fee0e32d799f22c32b18f79884daef4 | /src/keras/examples/cifar10_cnn_capsule.py | f5a2dea0a3da8048b1fbbdac9eb14dc7af02a594 | [
"MIT",
"Apache-2.0"
] | permissive | lu791019/iii_HA_Image_Recognition_DL | 5cde9c2d0c06f8fe3fb69991b27fda87d42450e1 | d5f56d62af6d3aac1c216ca4ff309db08a8c9072 | refs/heads/master | 2020-08-03T06:56:05.345175 | 2019-09-29T13:20:24 | 2019-09-29T13:20:24 | 211,660,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,408 | py | """
This example trains a simple CNN-Capsule Network on the CIFAR10 data set.
Without Data Augmentation:
It gets to 75% validation accuracy in 10 epochs, 79% after 15 epochs,
and overfitting after 20 epochs
With Data Augmentation:
It gets to 75% validation accuracy in 10 epochs, 79% after 15 epochs,
and 83% after 30 epochs.
The highest achieved validation accuracy is 83.79% after 50 epochs.
This is a fast implementation that takes just 20s/epoch on a GTX 1070 GPU.
The paper "Dynamic Routing Between Capsules": https://arxiv.org/abs/1710.09829
"""
from __future__ import print_function
from keras import activations
from keras import backend as K
from keras import layers
from keras import utils
from keras.datasets import cifar10
from keras.models import Model
from keras.preprocessing.image import ImageDataGenerator
def squash(x, axis=-1):
"""The Squashing Function.
The nonlinear activation function used in Capsule Network
# Arguments
x: Input Tensor.
axis: Integer axis along which the squashing function is to be applied.
# Returns
Tensor with scaled value of the input tensor
"""
s_squared_norm = K.sum(K.square(x), axis, keepdims=True) + K.epsilon()
scale = K.sqrt(s_squared_norm) / (0.5 + s_squared_norm)
return scale * x
def margin_loss(y_true, y_pred):
"""Margin loss
# Arguments
y_true: tensor of true targets.
y_pred: tensor of predicted targets.
# Returns
Tensor with one scalar loss entry per sample.
"""
lamb, margin = 0.5, 0.1
return K.sum(y_true * K.square(K.relu(1 - margin - y_pred)) + lamb * (
1 - y_true) * K.square(K.relu(y_pred - margin)), axis=-1)
class Capsule(layers.Layer):
"""Capsule Network
A Capsule Network Layer implementation in Keras
There are two versions of Capsule Networks.
One is similar to dense layer (for the fixed-shape input),
and the other is similar to time distributed dense layer
(for inputs of varied length).
The input shape of Capsule must be (batch_size,
input_num_capsule,
input_dim_capsule
)
and the output shape is (batch_size,
num_capsule,
dim_capsule
)
The Capsule implementation is from https://github.com/bojone/Capsule/
# Arguments
num_capsule: An integer, the number of capsules.
dim_capsule: An integer, the dimensions of the capsule.
routings: An integer, the number of routings.
share_weights: A boolean, sets weight sharing between layers.
activation: A string, the activation function to be applied.
"""
def __init__(self,
num_capsule,
dim_capsule,
routings=3,
share_weights=True,
activation='squash',
**kwargs):
super(Capsule, self).__init__(**kwargs)
self.num_capsule = num_capsule
self.dim_capsule = dim_capsule
self.routings = routings
self.share_weights = share_weights
if activation == 'squash':
self.activation = squash
else:
self.activation = activations.get(activation)
def build(self, input_shape):
input_dim_capsule = input_shape[-1]
if self.share_weights:
self.kernel = self.add_weight(
name='capsule_kernel',
shape=(1, input_dim_capsule,
self.num_capsule * self.dim_capsule),
initializer='glorot_uniform',
trainable=True)
else:
input_num_capsule = input_shape[-2]
self.kernel = self.add_weight(
name='capsule_kernel',
shape=(input_num_capsule, input_dim_capsule,
self.num_capsule * self.dim_capsule),
initializer='glorot_uniform',
trainable=True)
def call(self, inputs, **kwargs):
"""Following the routing algorithm from Hinton's paper,
but replace b = b + <u,v> with b = <u,v>.
This change can improve the feature representation of the capsule.
However, you can replace
b = K.batch_dot(outputs, hat_inputs, [2, 3])
with
b += K.batch_dot(outputs, hat_inputs, [2, 3])
to get standard routing.
"""
if self.share_weights:
hat_inputs = K.conv1d(inputs, self.kernel)
else:
hat_inputs = K.local_conv1d(inputs, self.kernel, [1], [1])
batch_size = K.shape(inputs)[0]
input_num_capsule = K.shape(inputs)[1]
hat_inputs = K.reshape(hat_inputs,
(batch_size, input_num_capsule,
self.num_capsule, self.dim_capsule))
hat_inputs = K.permute_dimensions(hat_inputs, (0, 2, 1, 3))
b = K.zeros_like(hat_inputs[:, :, :, 0])
print(self.routings)
for i in range(self.routings):
c = K.softmax(b, 1)
o = self.activation(K.batch_dot(c, hat_inputs, [2, 2]))
if i < self.routings - 1:
b = K.batch_dot(o, hat_inputs, [2, 3])
if K.backend() == 'theano':
o = K.sum(o, axis=1)
return o
def compute_output_shape(self, input_shape):
return None, self.num_capsule, self.dim_capsule
batch_size = 128
num_classes = 10
epochs = 100
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
y_train = utils.to_categorical(y_train, num_classes)
y_test = utils.to_categorical(y_test, num_classes)
# A simple Conv2D model
input_image = layers.Input(shape=(None, None, 3))
x = layers.Conv2D(64, (3, 3), activation='relu')(input_image)
x = layers.Conv2D(64, (3, 3), activation='relu')(x)
x = layers.AveragePooling2D((2, 2))(x)
x = layers.Conv2D(128, (3, 3), activation='relu')(x)
x = layers.Conv2D(128, (3, 3), activation='relu')(x)
# Now, we reshape it to (batch_size, input_num_capsule, input_dim_capsule)
# then connect a capsule layer.
# The output of final model is the lengths of 10 capsules, which have 16 dimensions.
# The length of the output vector of the capsule expresses the probability of
# existence of the entity, so the problem becomes a 10 two-classification problem.
x = layers.Reshape((-1, 128))(x)
capsule = Capsule(10, 16, 3, True)(x)
output = layers.Lambda(lambda z: K.sqrt(K.sum(K.square(z), 2)))(capsule)
model = Model(inputs=input_image, outputs=output)
# Margin loss is used
model.compile(loss=margin_loss, optimizer='adam', metrics=['accuracy'])
model.summary()
# Compare the performance with and without data augmentation
data_augmentation = True
if not data_augmentation:
print('Not using data augmentation.')
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test),
shuffle=True)
else:
print('Using real-time data augmentation.')
# This will do preprocessing and real-time data augmentation:
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by dataset std
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
zca_epsilon=1e-06, # epsilon for ZCA whitening
rotation_range=0, # randomly rotate images in 0 to 180 degrees
width_shift_range=0.1, # randomly shift images horizontally
height_shift_range=0.1, # randomly shift images vertically
shear_range=0., # set range for random shear
zoom_range=0., # set range for random zoom
channel_shift_range=0., # set range for random channel shifts
# set mode for filling points outside the input boundaries
fill_mode='nearest',
cval=0., # value used for fill_mode = "constant"
horizontal_flip=True, # randomly flip images
vertical_flip=False, # randomly flip images
# set rescaling factor (applied before any other transformation)
rescale=None,
# set function that will be applied on each input
preprocessing_function=None,
# image data format, either "channels_first" or "channels_last"
data_format=None,
# fraction of images reserved for validation (strictly between 0 and 1)
validation_split=0.0)
# Compute quantities required for feature-wise normalization
# (std, mean, and principal components if ZCA whitening is applied).
datagen.fit(x_train)
# Fit the model on the batches generated by datagen.flow().
model.fit_generator(
datagen.flow(x_train, y_train, batch_size=batch_size),
epochs=epochs,
validation_data=(x_test, y_test),
workers=4)
| [
"[email protected]"
] | |
f304df27684fea353834a729f98e0123e8e0d1a3 | 1f8812be38ff5dfc2bf8488e757077ebae1791be | /apps/askfm/migrations/0002_question_time.py | c67c1931e178fe5fd55e4d4a8669cc226355fdf9 | [
"MIT"
] | permissive | Morsa11/AskFmClone | d51e28a2568a2678af488fcbda63c2b1a23943e3 | 50ded5126926989627b7aa0fb445da5a8a4a5d68 | refs/heads/master | 2020-04-25T21:46:03.899930 | 2016-12-13T07:51:57 | 2016-12-13T07:51:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.utils.timezone import utc
import datetime
class Migration(migrations.Migration):
dependencies = [
('askfm', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='question',
name='time',
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2016, 8, 15, 18, 14, 24, 455606, tzinfo=utc)),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
61e6312cb372d66857c1f7cfdaa20a156aaf971c | 82e18cc7b20e98b90b739618d6517b384f1f0cf5 | /tests/test_defaults.py | 85f5a74c223c24d2a6a2aa0c8b5d9e7364408c41 | [
"MIT"
] | permissive | atng/draftjs_exporter | 9480600f04843bce700dce73080d5527c896a78e | df5e24a69301e6c78fae74ec62615d6444773980 | refs/heads/master | 2020-12-28T11:24:01.953669 | 2020-02-04T22:56:45 | 2020-02-04T22:56:45 | 238,312,334 | 0 | 0 | MIT | 2020-02-04T21:38:33 | 2020-02-04T21:38:33 | null | UTF-8 | Python | false | false | 615 | py | import unittest
from draftjs_exporter.defaults import BLOCK_MAP, STYLE_MAP, code_block, render_children
from draftjs_exporter.dom import DOM
class TestDefaults(unittest.TestCase):
def test_default_block_map(self):
self.assertIsInstance(BLOCK_MAP, object)
def test_default_style_map(self):
self.assertIsInstance(STYLE_MAP, object)
def test_render_children(self):
self.assertEqual(render_children({'children': 'test'}), 'test')
def test_render_code_block(self):
self.assertEqual(DOM.render_debug(code_block({'children': 'test'})), '<pre><code>test</code></pre>')
| [
"[email protected]"
] | |
1caefba7b7e4fdb55fbe74db57fc8e404ddd15a7 | e1e08ca2df1caadc30b5b62263fa1e769d4904d8 | /cps/modules/usercp.py | c42b820652056203e9e39da4353a725554cbf8b9 | [] | no_license | tiench189/ClassbookStore | 509cedad5cc4109b8fb126ad59e25b922dfae6be | 4fff9bc6119d9ec922861cbecf23a3f676551485 | refs/heads/master | 2020-12-02T07:48:26.575023 | 2017-07-10T02:45:09 | 2017-07-10T02:45:09 | 96,728,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,411 | py | # -*- coding: utf-8 -*-
__author__ = 'tanbm'
from datetime import datetime
def addzero(num):
if num < 10:
return "0" + str(num)
else:
return str(num)
def user_get_id_cp(member_id, db):
info = user_get_info(member_id, db)
try:
return info['user_cp_info']['cp_id']
except:
return info['user_info']['id']
#return 1
def user_get_info(member_id, db):
data_user = db(db.auth_user.id == member_id).select()
if len(data_user) <= 0:
return "Không tìm thấy thông tin"
data_user = data_user[0]
info = dict()
info['user_info'] = dict(id=data_user.id, first_name=data_user.first_name, last_name=data_user.last_name,
email=data_user.email, user_name=data_user.username)
try:
user_cp = db(db.auth_user.id == data_user.created_by).select()[0]
info['user_cp_info'] = dict(cp_id=user_cp.id, cp_first_name=user_cp.first_name, cp_last_name=user_cp.last_name,
cp_email=user_cp.email, cp_user_name=user_cp.username)
info['user_info']['is_admin'] = False
except:
info['user_cp_info'] = "Không có thông tin"
info['user_info']['is_admin'] = True
return info
def user_gen_product_code(user_cp, cp_type):
now = datetime.now()
token = addzero(now.year) + addzero(now.month) + addzero(now.day) + addzero(now.hour) + addzero(
now.minute) + addzero(now.second)
code = user_cp + cp_type[:3] + token
return code
def info_by_token(username, token, db):
user_info = db(db.auth_user.username == username)(db.auth_user.token == token).select()
if len(user_info) <= 0:
return dict(error="Hết phiên làm việc. Bạn vui lòng đăng nhập lại!")
return dict(result=user_get_info(user_info[0].id, db))
def check_is_root(token, db):
info = db((db.auth_user.token.like(token)) & (db.auth_user.is_root == True)).select()
if len(info) <= 0:
return False
else:
return True
def get_user_token(user_email, db):
token = ""
try:
if user_email is None or user_email == '':
return token
rows = db(db.clsb_user.email == user_email).select()
if len(rows) < 1:
return token
row = rows.first()
token = row['user_token']
except Exception as e:
print str(e)
return token
| [
"[email protected]"
] | |
bbcdaaea17213f01b3f94b787a9cfd3ff2f7b015 | f474d500b7da4f4069e24fddcde97783a4f3664b | /vagrantEnv/lib/python3.5/importlib/__init__.py | d7bb4b5e8438d8a32315f3618e2ac20a1b2ab0ec | [
"Apache-2.0"
] | permissive | Thanh-Lai/chat-bot | 220a0fd6383181f0cdaf732b5c02f645bd960a28 | e3007fa6e034d3cccff4615a7eccf0e75bbc1708 | refs/heads/master | 2020-04-23T09:39:04.509356 | 2019-02-18T04:56:25 | 2019-02-18T04:56:25 | 171,075,880 | 0 | 0 | Apache-2.0 | 2019-02-18T04:56:26 | 2019-02-17T03:00:39 | Python | UTF-8 | Python | false | false | 40 | py | /usr/lib/python3.5/importlib/__init__.py | [
"[email protected]"
] | |
a262ab878556b9c9c939d132c8e39567a05531ab | fc80698bcbae0b94907697fc4df023101bd19887 | /xptracker/settings/base.py | 1dbb792983757d847a3bab19d103e031d550cf0c | [
"Apache-2.0"
] | permissive | Django-Lessons/xptracker | 1ee982440ab71110f1d228479cd016ccb167db01 | c152de874872857c8896787eeea35744e1f3e02f | refs/heads/master | 2023-08-13T17:34:09.180379 | 2020-06-25T07:04:44 | 2020-06-25T07:04:44 | 270,729,777 | 2 | 1 | NOASSERTION | 2021-09-22T19:10:54 | 2020-06-08T15:55:14 | Python | UTF-8 | Python | false | false | 2,961 | py | import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'py8=&ynt*vq4s5^$b4u!fij9=3+)qal_*xn&u6^-v&2@+ahuwn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SITE_ID = 1
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'core',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'allauth',
'allauth.account',
'allauth.socialaccount',
'django_extensions'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'xptracker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'xptracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'core.User'
| [
"[email protected]"
] | |
107043c3897bddc4c62e4fe077338a1d50b32d6f | 43470b9aab53ae3d0f5e63402bb5727981619ddb | /Python/image/migrations/0002_auto_20200906_1916.py | 4f60badb4789e3f1ea5ad3246a0dd927499d48e4 | [] | no_license | nnocturnnn/Pixelizator | 04baa2cda666a581c6c24ca6b2d74f5999a07785 | 1386235f6e7bc01fced684e3922ef51b3f64bd59 | refs/heads/master | 2023-03-06T11:27:03.241013 | 2021-02-17T13:19:49 | 2021-02-17T13:19:49 | 315,136,923 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 3.1.1 on 2020-09-06 19:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('image', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='file',
field=models.ImageField(upload_to='origin/'),
),
]
| [
"[email protected]"
] | |
18eac29dd0f67198041e3f6ea97457ba2bcd9a1d | b82057c77dd4d00ff9bca9a979a1a3075f0528c4 | /Exicom_gateway/checks/rectifierindiv_recline1_input_voltage | a2a1215661ecd5575e47a479f335c922630d5c47 | [] | no_license | subhash-007/photography-blog | 7ee0c4f930fee29d76106c45b09e6b76cb19cf56 | b1ae66794b48bfe3862cb6e727a3a15a6ef79024 | refs/heads/master | 2020-03-31T04:33:00.276628 | 2019-07-12T06:00:39 | 2019-07-12T06:00:39 | 151,910,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,497 | #!/usr/bin/python
import binascii
"""
vlan Poller script.
This is part of device application.
Poller script determines the vlan.
poller script takes the snmp value of OID .1.3.6.1.4.1.161.19.3.2.1.55.0 from snmp agent of device at specific interval.
all ports status are sent to device application
"""
# ######################################################################
# Function : check_exicom_model_no_invent
#
# Parameters: info (SNMP Output) _no_params(No Parameters)
#
# Output: service state and plugin output
# #####################################################################
def check_rectifierindiv_recline1_input_voltage(item, _no_params, info):
"""
check_exicom_model_no_invent function calculates vlan
Args:
item (str) Specific item on SNMP output on which we want to filter results
Kwargs:
params (tuple) Check parameters for critical and warning state of service
Returns:
state (int) :
0 : OK
1 : Warning
2: Critical
3: unknown
infotext(string):
plugin output
Example : OK - vlan=1;;;;
Raises:
Exception
"""
state = 3
infotext = "unknown_value"
input_voltage = None
index =0
perfdata = []
try:
for line in info:
index= index + 1
input_voltage = line[0]
try:
input_voltage = float(input_voltage)
except Exception,e:
input_voltage = line[0].replace(' ','@')
state = 0
perfdata.append(("recline1_%d_input_voltage" %index,input_voltage))
infotext = "recline1_input_voltage=%s" % input_voltage
except Exception,e:
infotext = "unknown_value"
return (state,infotext,perfdata)
check_info["rectifierindiv_recline1_input_voltage"] = {
'check_function': check_rectifierindiv_recline1_input_voltage,
'service_description': 'rectifierindiv_recline1_input_voltage',
'has_perfdata': True,
'snmp_info': ('.1.3.6.1.4.1.38016.14.2.8.6', ['1.7']),
'snmp_scan_function': lambda oid: "m1000" in oid(".1.3.6.1.4.1.38016.14.1.1.0").lower(),
}
| [
"[email protected]"
] | ||
1b8ea813e6adfcf05a77b0fcdb6d9cb30cc95657 | 21bbc3fbeb7a1616dbd6993b66dc44d9b30df3e7 | /python_training/samp_proj1/day_011118/plotting1.py | 803db5cf4412574052bef093c3c79f673bf6c82a | [] | no_license | PoornimaDevii/python_training | 6124640608d8bf14289ae61b2b28e0db3b473b6f | 42b535590a6a244a91bd48b4451b74a29c1aaa80 | refs/heads/master | 2020-04-05T19:55:49.723114 | 2018-12-04T11:49:59 | 2018-12-04T11:49:59 | 157,157,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,420 | py |
import matplotlib.pyplot as plt
import numpy as np
X = np.linspace(-np.pi, np.pi, 256)
Y1 = np.sin(X)
Y2 = np.cos(X)
Y3 = np.tan(X)
Y4 = np.sqrt(1 - X*X)
plt.figure(figsize=(6,4), dpi=80) # 6 for x axis and 4 for y axis, dpi decides how large the plot will be,
# figsize is proportional x and y values
plt.plot(X,Y1, color='blue',linewidth=2.5, linestyle=':', label='sin')
#plt.show()
plt.plot(X,Y2,color='red', linewidth=2.5, label='cos')
plt.xlim(X.min()*1.2)
plt.xticks([-np.pi,-np.pi/2,0,np.pi/2,np.pi],[r'$-\pi$',r'$-\pi/2$',r'$0$',r'$\pi/2$',r'$\pi$'],rotation=30) # to view the actual values
plt.yticks([+1,0,-1],rotation=30)
ax = plt.gca()
ax.spines['right'].set_color(None)
ax.spines['top'].set_color(None)
ax.xaxis.set_ticks_position('bottom') # top means the x-axis values will be floating at the top
ax.spines['left'].set_position(('data',0))
ax.spines['bottom'].set_position(('data',0))
plt.legend(loc='best')
for labels in ax.get_xticklabels() + ax.get_yticklabels():
labels.set_fontsize(16)
labels.set_bbox(dict(facecolor='grey', # to create a box around the values ( color of box -> facecolor,
edgecolor='red', # outline of box -> edgecolor, alpha-> transparency of box
alpha=0.35))
plt.savefig('myplot.png') #savefig before show
plt.show()
plt.plot(X,Y3)
plt.ylim(Y3.min()*1.5)
plt.show()
plt.plot(X,Y4)
plt.show()
| [
"[email protected]"
] | |
f104f06f86d3958325c83c0ba9b5beffdb89c3ec | 4b7e282fe480415f5d52c0fc0429f144156190fe | /google/ads/googleads/v8/services/types/campaign_simulation_service.py | 9c4efef727ac0a515e6de9bbdcf66a4dcaca2149 | [
"Apache-2.0"
] | permissive | Z2Xsoft/google-ads-python | c4750357bb19da91bb3b6bf2fa84bef9d2df36d3 | 1779d52a0446c8afb2437b0a9e103dcb849f5590 | refs/heads/main | 2023-08-18T15:22:17.840364 | 2021-09-26T04:08:53 | 2021-09-26T04:08:53 | 410,444,398 | 0 | 0 | Apache-2.0 | 2021-09-26T04:08:53 | 2021-09-26T03:55:38 | null | UTF-8 | Python | false | false | 1,265 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v8.services",
marshal="google.ads.googleads.v8",
manifest={"GetCampaignSimulationRequest",},
)
class GetCampaignSimulationRequest(proto.Message):
r"""Request message for
[CampaignSimulationService.GetCampaignSimulation][google.ads.googleads.v8.services.CampaignSimulationService.GetCampaignSimulation].
Attributes:
resource_name (str):
Required. The resource name of the campaign
simulation to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
fd09d4b9e667639b7e0480f72e689ede4c098f7a | b5ea580240b372297b2b61922decff1aa15375e8 | /dico/emailer.py | 33f71525ae927efc7cddcd4ecb99ce01f0714e55 | [] | no_license | michaelcrubenstein/IDoDeclare | ca7e1c1ef8c89a330e0cb97e961a757039f68f66 | 1b8f4a4ca8d2bd953389d47786144502c9347921 | refs/heads/master | 2021-01-18T01:42:00.889655 | 2015-07-28T14:15:06 | 2015-07-28T14:15:06 | 31,086,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | from django.core.mail import send_mail
class Emailer():
# Sends a reset password message to the specified email recipient.
def sendResetPasswordEmail(recipientEMail, resetURL):
htmlMessage = """\
<p>There has been a request to reset your password for I Do Declare.</p>
<p>Click <a href="%s">here</a> to reset your password.</p>
<b>The I Do Declare Team</b>
""" % resetURL
message = """\
There has been a request to reset your password for I Do Declare.
Open the following link in your web browser to reset your password:
%s
Thanks.
The I Do Declare Team
""" % resetURL
send_mail('Password Reset', message, '[email protected]',
[recipientEMail], fail_silently=False, html_message=htmlMessage)
def merge(html, dir):
p = re.compile(r'{{\s*([^}\s]+)\s*}}')
def f(match):
s = match.group(1)
if s in dir:
return dir[s]
else:
return s
return p.sub(f, html) | [
"[email protected]"
] | |
0964e11e2ecb168539be6160295c46e5dbc284b5 | cfa4c756333d2a321b1a85126c74f9728649281c | /tests/test_parse/test_blast.py | d11e934ab1dfc25aa89f548dbf182f00143ad694 | [
"BSD-3-Clause"
] | permissive | genomematt/cogent3 | f86d8d0f906387064ebd588ef7194a91c3faa5a8 | d4b561c6ad06a44f82a79940e7699b3ffce71442 | refs/heads/develop | 2023-01-30T10:12:38.233989 | 2022-01-23T08:20:47 | 2022-01-23T08:20:47 | 240,843,994 | 0 | 0 | BSD-3-Clause | 2023-01-23T19:02:17 | 2020-02-16T06:33:35 | Python | UTF-8 | Python | false | false | 14,725 | py | from unittest import TestCase, main
from cogent3.parse.blast import (
FastacmdTaxonomyParser,
GenericBlastParser9,
LastProteinIds9,
PsiBlastFinder,
PsiBlastParser9,
PsiBlastQueryFinder,
PsiBlastTableParser,
QMEBlast9,
QMEPsiBlast9,
TableToValues,
fastacmd_taxonomy_splitter,
is_blast_junk,
is_blat_junk,
iter_finder,
iteration_set_finder,
make_label,
query_finder,
)
__author__ = "Micah Hamady"
__copyright__ = "Copyright 2007-2016, The Cogent Project"
__credits__ = ["Micah Hamady", "Rob Knight"]
__license__ = "GPL"
__version__ = "2021.10.12a1"
__maintainer__ = "Micah Hamady"
__email__ = "[email protected]"
__status__ = "Production"
from numpy.testing import assert_allclose, assert_equal
class BlastTests(TestCase):
"""Tests of top-level functions"""
def setUp(self):
"""Define some standard data"""
self.rec = """# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 1
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ece:Z4181 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 cvi:CV2421 41.67 72 42 0 39 110 29 100 2e-06 52.8
# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 2
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ece:Z4181 100.00 110 0 0 1 110 1 110 3e-54 211
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-54 211
ece:Z4181 cvi:CV2421 41.67 72 42 0 39 110 29 100 2e-08 59.0
ece:Z4181 sfl:CP0138 33.98 103 57 2 8 110 6 97 6e-06 50.5
ece:Z4181 spt:SPA2730 37.50 72 45 0 39 110 30 101 1e-05 49.8
ece:Z4181 sec:SC2804 37.50 72 45 0 39 110 30 101 1e-05 49.8
ece:Z4181 stm:STM2872 37.50 72 45 0 39 110 30 101 1e-05 49.8""".split(
"\n"
)
self.rec2 = """# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 1
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ece:Z4181 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 cvi:CV2421 41.67 72 42 0 39 110 29 100 2e-06 52.8
# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 2
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ece:Z4181 100.00 110 0 0 1 110 1 110 3e-54 211
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-54 211
ece:Z4181 cvi:CV2421 41.67 72 42 0 39 110 29 100 2e-08 59.0
ece:Z4181 sfl:CP0138 33.98 103 57 2 8 110 6 97 6e-06 50.5
ece:Z4181 spt:SPA2730 37.50 72 45 0 39 110 30 101 1e-05 49.8
ece:Z4181 sec:SC2804 37.50 72 45 0 39 110 30 101 1e-05 49.8
ece:Z4181 stm:STM2872 37.50 72 45 0 39 110 30 101 1e-05 49.8
# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 1
# Query: ece:Z4182
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4182 ece:Z4182 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4182 ecs:ECs3718 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4182 cvi:CV2422 41.67 72 42 0 39 110 29 100 2e-06 52.8""".split(
"\n"
)
self.rec3 = """# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 1
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ece:Z4181 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4181 spt:SPA2730 37.50 72 45 0 39 110 30 101 1e-05 49.8
# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 2
# Query: ece:Z4181
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4181 ecs:ECs3717 100.00 110 0 0 1 110 1 110 3e-54 211
ece:Z4181 cvi:CV2421 41.67 72 42 0 39 110 29 100 2e-08 59.0
# BLASTP 2.2.10 [Oct-19-2004]
# Iteration: 1
# Query: ece:Z4182
# Database: db/everything.faa
# Fields: Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score
ece:Z4182 ece:Z4182 100.00 110 0 0 1 110 1 110 3e-47 187
ece:Z4182 cvi:CV2422 41.67 72 42 0 39 110 29 100 2e-06 52.8""".split(
"\n"
)
def test_iter_finder(self):
"""iter_finder should split on lines starting with '# Iteration:'"""
lines = "abc\n# Iteration: 3\ndef".splitlines()
self.assertEqual(list(map(iter_finder, lines)), [False, True, False])
def test_query_finder(self):
"""query_finder should split on lines starting with '# Query:'"""
lines = "abc\n# Query: dfdsffsd\ndef".split("\n")
self.assertEqual(list(map(query_finder, lines)), [False, True, False])
def test_iteration_set_finder(self):
"""iter_finder should split on lines starting with '# Iteration:'"""
lines = "abc\n# Iteration: 3\ndef\n# Iteration: 1".split("\n")
self.assertEqual(
list(map(iteration_set_finder, lines)), [False, False, False, True]
)
def test_is_junk(self):
"""is_junk should reject an assortment of invalid lines"""
# Note: testing two functions that call it instead of function itself
lines = "abc\n# BLAST blah blah\n \n# BLAT blah\n123".split("\n")
self.assertEqual(
list(map(is_blast_junk, lines)), [False, True, True, False, False]
)
self.assertEqual(
list(map(is_blat_junk, lines)), [False, False, True, True, False]
)
def test_make_label(self):
"""make_label should turn comment lines into (key, val) pairs"""
a = "this test will fail: no # at start"
b = "#this test will fail because no colon"
c = "# Iteration: 1"
d = "# Query: ece:Z4147 ygdP; putative invasion protein [EC:3.6.1.-]"
e = "#Iteration: 1" # no space after the hash
self.assertRaises(ValueError, make_label, a)
self.assertRaises(ValueError, make_label, b)
# Note that we _do_ map the data type of known values value, so the
# value of the iteration will be 1, not '1'
self.assertEqual(make_label(c), ("ITERATION", 1))
self.assertEqual(
make_label(d),
("QUERY", "ece:Z4147 ygdP; putative invasion protein [EC:3.6.1.-]"),
)
self.assertEqual(make_label(e), ("ITERATION", 1))
def test_TableToValues(self):
"""TableToValues should convert itself into the correct type."""
constructors = {"a": int, "b": float, "c": str}
table = [["c", "b", "a", "d"], ["1.5", "3.5", "2", "2.5"], ["1", "2", "3", "4"]]
self.assertEqual(
TableToValues(table, constructors),
([["1.5", 3.5, 2, "2.5"], ["1", 2.0, 3, "4"]], ["c", "b", "a", "d"]),
)
# check that it works with supplied header
self.assertEqual(
TableToValues(table[1:], constructors, list("cbad")),
([["1.5", 3.5, 2, "2.5"], ["1", 2.0, 3, "4"]], ["c", "b", "a", "d"]),
)
def test_PsiBlastTableParser(self):
"""PsiBlastTableParser should wrap values in table."""
fields = [
v.strip()
for v in "Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score".split(
","
)
]
table = [
v.split()
for v in """ece:Z4147 ece:Z4147 100.00 176 0 0 1 176 1 176 2e-89 328
ece:Z4147 ecs:ECs3687 100.00 176 0 0 1 176 1 176 2e-89 328
ece:Z4147 ecc:c3425 100.00 176 0 0 1 176 1 176 2e-89 328
ece:Z4147 sfl:SF2840 100.00 176 0 0 1 176 1 176 2e-89 328""".splitlines()
]
headed_table = [fields] + table
new_table, new_fields = PsiBlastTableParser(headed_table)
self.assertEqual(new_fields, fields)
self.assertEqual(len(new_table), 4)
self.assertEqual(
new_table[1],
["ece:Z4147", "ecs:ECs3687", 100.0, 176, 0, 0, 1, 176, 1, 176, 2e-89, 328],
)
def test_GenericBlastParser9(self):
"""GenericBlastParser9 should read blast's tabular format (#9)."""
rec = self.rec
p = GenericBlastParser9(rec, PsiBlastFinder)
result = list(p)
self.assertEqual(len(result), 2)
first, second = result
self.assertEqual(
first[0],
{
"ITERATION": 1,
"QUERY": "ece:Z4181",
"DATABASE": "db/everything.faa",
"FIELDS": "Query id, Subject id, % identity, alignment length, mismatches, gap openings, q. start, q. end, s. start, s. end, e-value, bit score",
},
)
self.assertEqual(len(first[1]), 3)
self.assertEqual(second[0]["ITERATION"], 2)
self.assertEqual(len(second[1]), 7)
self.assertEqual(
second[1][-1],
"ece:Z4181 stm:STM2872 37.50 72 45 0 39 110 30 101 1e-05 49.8".split(),
)
def test_PsiBlastParser9(self):
"""PsiBlastParser9 should provide convenient results for format #9."""
result = PsiBlastParser9(self.rec2)
self.assertEqual(len(result), 2)
assert "ece:Z4181" in result
assert "ece:Z4182" in result
first = result["ece:Z4181"]
second = result["ece:Z4182"]
self.assertEqual(len(first), 2)
self.assertEqual(len(second), 1)
iter_1 = first[0]
iter_2 = first[1]
self.assertEqual(len(iter_1), 3)
self.assertEqual(len(iter_2), 7)
iter_1_2 = second[0]
self.assertEqual(len(iter_1_2), 3)
self.assertEqual(len(result["ece:Z4181"][1][3]), 12)
self.assertEqual(result["ece:Z4181"][1][3]["ALIGNMENT LENGTH"], 103)
def test_LastProteinIds9(self):
"""LastProteinIds9 should give last protein ids in iter"""
result = LastProteinIds9(self.rec)
self.assertEqual(
result,
[
"ece:Z4181",
"ecs:ECs3717",
"cvi:CV2421",
"sfl:CP0138",
"spt:SPA2730",
"sec:SC2804",
"stm:STM2872",
],
)
# should also work if threshold set
result = LastProteinIds9(self.rec, False, threshold=8e-6)
self.assertEqual(
result, ["ece:Z4181", "ecs:ECs3717", "cvi:CV2421", "sfl:CP0138"]
)
# should work on multiple records
result = list(map(LastProteinIds9, PsiBlastQueryFinder(self.rec2)))
self.assertEqual(len(result), 2)
self.assertEqual(
result[0],
[
"ece:Z4181",
"ecs:ECs3717",
"cvi:CV2421",
"sfl:CP0138",
"spt:SPA2730",
"sec:SC2804",
"stm:STM2872",
],
)
self.assertEqual(result[1], ["ece:Z4182", "ecs:ECs3718", "cvi:CV2422"])
def test_QMEBlast9(self):
"""QMEBlast9 should return expected lines from all iterations"""
expect = list(
zip(
*[
("ece:Z4181", "ece:Z4181", 3e-47),
("ece:Z4181", "ecs:ECs3717", 3e-47),
("ece:Z4181", "spt:SPA2730", 1e-5),
("ece:Z4181", "ecs:ECs3717", 3e-54), # WARNING: allows duplicates
("ece:Z4181", "cvi:CV2421", 2e-8),
("ece:Z4182", "ece:Z4182", 3e-47),
("ece:Z4182", "cvi:CV2422", 2e-6),
],
)
)
got = list(zip(*QMEBlast9(self.rec3)))
assert_equal(got[:-1], expect[:-1])
assert_allclose(got[-1], expect[-1])
def test_QMEPsiBlast9(self):
"""QMEPsiBlast9 should only return items from last iterations"""
expect = list(
zip(
*[
("ece:Z4181", "ecs:ECs3717", 3e-54),
("ece:Z4181", "cvi:CV2421", 2e-8),
("ece:Z4182", "ece:Z4182", 3e-47),
("ece:Z4182", "cvi:CV2422", 2e-6),
]
)
)
got = list(zip(*QMEPsiBlast9(self.rec3)))
assert_equal(got[:-1], expect[:-1])
assert_allclose(got[-1], expect[-1])
def test_fastacmd_taxonomy_splitter(self):
"""fastacmd_taxonomy_splitter should split records into groups"""
text = """NCBI sequence id: gi|3021565|emb|AJ223314.1|PSAJ3314
NCBI taxonomy id: 3349
Common name: Scots pine
Scientific name: Pinus sylvestris
NCBI sequence id: gi|37777029|dbj|AB108787.1|
NCBI taxonomy id: 228610
Common name: cf. Acremonium sp. KR21-2
Scientific name: cf. Acremonium sp. KR21-2
""".splitlines()
recs = list(fastacmd_taxonomy_splitter(text))
self.assertEqual(len(recs), 2)
self.assertEqual(recs[0], text[:5]) # includes trailing blank
def test_FastaCmdTaxonomyParser(self):
"""FastaCmdTaxonomyParser should parse taxonomy record to dict"""
text = """NCBI sequence id: gi|3021565|emb|AJ223314.1|PSAJ3314
NCBI taxonomy id: 3349
Common name: Scots pine
Scientific name: Pinus sylvestris
NCBI sequence id: gi|37777029|dbj|AB108787.1|
NCBI taxonomy id: 228610
Common name: cf. Acremonium sp. KR21-2
Scientific name: cf. Acremonium sp. KR21-2
""".splitlines()
recs = list(FastacmdTaxonomyParser(text))
self.assertEqual(len(recs), 2)
for r in recs:
self.assertEqual(
sorted(r.keys()), ["common_name", "scientific_name", "seq_id", "tax_id"]
)
r0, r1 = recs
self.assertEqual(r0["tax_id"], "3349")
self.assertEqual(r0["common_name"], "Scots pine")
self.assertEqual(r0["scientific_name"], "Pinus sylvestris")
self.assertEqual(r0["seq_id"], "gi|3021565|emb|AJ223314.1|PSAJ3314")
self.assertEqual(r1["tax_id"], "228610")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
3ee38094628c51ffb37b52ecd815c4075878c2a5 | eacff46eda2c6b509449979a16002b96d4645d8e | /Collections-a-installer/community-general-2.4.0/tests/unit/plugins/module_utils/xenserver/conftest.py | 52f654bcc6ab90e4315772942e04f9f690466651 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later"
] | permissive | d-amien-b/simple-getwordpress | 5e6d4d15d5f87124ab591e46b63fec552998fdc3 | da90d515a0aa837b633d50db4d91d22b031c04a2 | refs/heads/master | 2023-04-08T22:13:37.347545 | 2021-04-06T09:25:51 | 2021-04-06T09:25:51 | 351,698,069 | 0 | 0 | MIT | 2021-03-31T16:16:45 | 2021-03-26T07:30:00 | HTML | UTF-8 | Python | false | false | 3,744 | py | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import importlib
import os
import json
import pytest
from .FakeAnsibleModule import FakeAnsibleModule
from ansible.module_utils import six
from mock import MagicMock
@pytest.fixture
def fake_ansible_module(request):
"""Returns fake AnsibleModule with fake module params."""
if hasattr(request, 'param'):
return FakeAnsibleModule(request.param)
else:
params = {
"hostname": "somehost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
}
return FakeAnsibleModule(params)
@pytest.fixture(autouse=True)
def XenAPI():
"""Imports and returns fake XenAPI module."""
# Import of fake XenAPI module is wrapped by fixture so that it does not
# affect other unit tests which could potentialy also use XenAPI module.
# First we use importlib.import_module() to import the module and assign
# it to a local symbol.
fake_xenapi = importlib.import_module('ansible_collections.community.general.tests.unit.plugins.module_utils.xenserver.FakeXenAPI')
# Now we populate Python module cache with imported fake module using the
# original module name (XenAPI). That way, any 'import XenAPI' statement
# will just load already imported fake module from the cache.
sys.modules['XenAPI'] = fake_xenapi
return fake_xenapi
@pytest.fixture(autouse=True)
def xenserver(XenAPI):
"""Imports and returns xenserver module util."""
# Since we are wrapping fake XenAPI module inside a fixture, all modules
# that depend on it have to be imported inside a test function. To make
# this easier to handle and remove some code repetition, we wrap the import
# of xenserver module util with a fixture.
from ansible_collections.community.general.plugins.module_utils import xenserver
return xenserver
@pytest.fixture
def mock_xenapi_failure(XenAPI, mocker):
"""
Returns mock object that raises XenAPI.Failure on any XenAPI
method call.
"""
fake_error_msg = "Fake XAPI method call error!"
# We need to use our MagicMock based class that passes side_effect to its
# children because calls to xenapi methods can generate an arbitrary
# hierarchy of mock objects. Any such object when called should use the
# same side_effect as its parent mock object.
class MagicMockSideEffect(MagicMock):
def _get_child_mock(self, **kw):
child_mock = super(MagicMockSideEffect, self)._get_child_mock(**kw)
child_mock.side_effect = self.side_effect
return child_mock
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', new=MagicMockSideEffect(), create=True)
mocked_xenapi.side_effect = XenAPI.Failure(fake_error_msg)
return mocked_xenapi, fake_error_msg
@pytest.fixture
def fixture_data_from_file(request):
"""Loads fixture data from files."""
if not hasattr(request, 'param'):
return {}
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
if isinstance(request.param, six.string_types):
request.param = [request.param]
for fixture_name in request.param:
path = os.path.join(fixture_path, fixture_name)
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[fixture_name] = data
return fixture_data
| [
"[email protected]"
] | |
b641e7b93e1bb7f9ec80f41c4f8e3637ff747b65 | 9ec1242ae20b6f407f25a266456d83fb8a3d5f73 | /src/nellCoin/wallet/mempool.py | a758f7fcafe749ff4cf141cec51a789771c5f8b0 | [
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain",
"MIT"
] | permissive | Nell-MDCoin/Nell-MDCoin | 5b6d6af7e141844ba22970adacd4877d024e872b | 9a1be366aba13539132dc7d0a9f0fdeaa2e19044 | refs/heads/master | 2020-03-21T23:17:23.329553 | 2018-06-29T17:32:53 | 2018-06-29T17:32:53 | 139,177,535 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | from wallet import Wallet
wallet = Wallet()
wallet.dumpmempool()
| [
"[email protected]"
] | |
f2fe8040e6b6e400ff5b3e70fd88c10f94a82945 | 75f0580af1734b9edb9e06bfadfe48f45b057872 | /2019/8/sol.py | 6052c056bbeb201b13ed23d24370c34a7c3d6a11 | [] | no_license | penteract/adventofcode | 5bb317f8093f60c1d776d0983016a5288d059603 | 7b7344708ef1d58caa339a32a13f3390556b664c | refs/heads/master | 2023-01-29T16:08:13.541190 | 2023-01-16T20:21:02 | 2023-01-16T20:21:02 | 160,901,373 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,104 | py | from functools import *
from itertools import *
from collections import defaultdict
print ("hi")
f = open("input")
l = list(f)
print(len(l))
print(len(l[0]))
dat = l[0][:-1]
x=0
lays=[]
while x<len(dat):
lays.append(dat[x:x+25*6])
x+=25*6
print(x)
print("here")
print(min([(l.count("0"),l.count("1")*l.count("2")) for l in lays ]))
dd=["2" for i in range(25*6)]
for l in lays:
for i,x in enumerate(l):
if dd[i]=="2": dd[i]=x
for i in range(6):
print("".join(dd[25*i:25*(i+1)]))
for i in range(6):
print("".join(" " if c== "0" else "#" for c in dd[25*i:25*(i+1)]))
##d={"COM":[]}
##
##for a,b in inp:#b orbits a
## if a not in d:
## d[a]=[]
## d[a].append(b)
## if b not in d:
## d[b]=[]
##
##def f(c,n=0):
## tot=n
## for x in d[c]:
## tot += f(x,n+1)
## return tot
##print (f("COM"))
##
##
##def dist(x,y):
## if x==y:
## return 0
## return min([10000]+[1+dist(z,y) for z in d[x]])
##
##print(min(dist(x,"SAN")+dist(x,"YOU")-2 for x in d))
#lll = list(map(int,l[0].split(",")) )#+[0 for i in range(3850694)]
| [
"[email protected]"
] | |
25ebe5fafa9fe727571d9b95d86bb183de99cf83 | b76615ff745c6d66803506251c3d4109faf50802 | /pyobjc-framework-ApplicationServices/PyObjCTest/test_axactionconstants.py | 0fd99b65a921cc20e9aef2f2d09a1649d4f9ee0a | [
"MIT"
] | permissive | danchr/pyobjc-git | 6ef17e472f54251e283a0801ce29e9eff9c20ac0 | 62b787fddeb381184043c7ff136f1c480755ab69 | refs/heads/master | 2021-01-04T12:24:31.581750 | 2020-02-02T20:43:02 | 2020-02-02T20:43:02 | 240,537,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 858 | py | import HIServices
from PyObjCTools.TestSupport import *
class TestAXActionConstants(TestCase):
def testConstants(self):
self.assertEqual(HIServices.kAXPressAction, "AXPress")
self.assertEqual(HIServices.kAXIncrementAction, "AXIncrement")
self.assertEqual(HIServices.kAXDecrementAction, "AXDecrement")
self.assertEqual(HIServices.kAXConfirmAction, "AXConfirm")
self.assertEqual(HIServices.kAXCancelAction, "AXCancel")
self.assertEqual(HIServices.kAXShowAlternateUIAction, "AXShowAlternateUI")
self.assertEqual(HIServices.kAXShowDefaultUIAction, "AXShowDefaultUI")
self.assertEqual(HIServices.kAXRaiseAction, "AXRaise")
self.assertEqual(HIServices.kAXShowMenuAction, "AXShowMenu")
self.assertEqual(HIServices.kAXPickAction, "AXPick")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
45cd58d8d0c0c9156a82c5d76e32631b23dd7d34 | 76f8f0c28ed2c306352b9e8836792e78199eaaa0 | /textworld/envs/wrappers/tw_inform7.py | af72873912c639947c8e6005e44995d599585f41 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | shivgarg/TextWorld | bff638361512bdf9d135f36207c4bf17cb990516 | 4ba9fba8a0e24e37056d5b5437c1b59a38c16c1a | refs/heads/master | 2020-08-27T14:33:24.738305 | 2019-12-04T01:07:49 | 2019-12-04T01:07:49 | 217,407,158 | 0 | 0 | NOASSERTION | 2019-12-04T01:07:51 | 2019-10-24T22:36:53 | null | UTF-8 | Python | false | false | 11,862 | py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
# -*- coding: utf-8 -*-
import os
import re
from typing import Mapping, Tuple, List
import textworld
from textworld.generator.game import Game, GameProgression
from textworld.generator.inform7 import Inform7Game
AVAILABLE_INFORM7_EXTRA_INFOS = ["description", "inventory", "score", "moves"]
class MissingGameInfosError(NameError):
"""
Thrown if an action requiring GameInfos is used on a game without GameInfos, such as a Frotz game or a
Glulx game not generated by TextWorld.
"""
def __init__(self, env):
msg = ("Can only use '{}' with games generated by "
" TextWorld. Make sure the generated .json file is in the same "
" folder as the .ulx or .z8 game file.")
super().__init__(msg.format(env.__class__.__name__))
def _detect_extra_infos(text: str) -> Mapping[str, str]:
""" Detect extra information printed out at every turn.
Extra information can be enabled via the special command:
`tw-extra-infos COMMAND`. The extra information is displayed
between tags that look like this: <COMMAND> ... </COMMAND>.
Args:
text: Text outputted by the game.
Returns:
A dictionary where the keys are text commands and the corresponding
values are the extra information displayed between tags.
"""
matches = {}
for tag in AVAILABLE_INFORM7_EXTRA_INFOS:
regex = re.compile(r"<{tag}>\n(.*)</{tag}>".format(tag=tag), re.DOTALL)
match = re.search(regex, text)
if match:
_, cleaned_text = _detect_i7_events_debug_tags(match.group(1))
matches[tag] = cleaned_text.strip()
text = re.sub(regex, "", text)
return matches, text
def _detect_i7_events_debug_tags(text: str) -> Tuple[List[str], str]:
""" Detect all Inform7 events debug tags.
In Inform7, debug tags look like this: [looking], [looking - succeeded].
Args:
text: Text outputted by the game.
Returns:
A tuple containing a list of Inform 7 events that were detected
in the text, and a cleaned text without Inform 7 debug infos.
"""
matches = []
for match in re.findall(r"\[[^]]+\]\n?", text):
text = text.replace(match, "") # Remove i7 debug tags.
tag_name = match.strip()[1:-1] # Strip starting '[' and trailing ']'.
if " - succeeded" in tag_name:
tag_name = tag_name[:tag_name.index(" - succeeded")]
matches.append(tag_name)
# If it's got either a '(' or ')' in it, it's a subrule,
# so it doesn't count.
matches = [m for m in matches if "(" not in m and ")" not in m]
return matches, text
class TWInform7(textworld.core.Wrapper):
"""
Wrapper to play Inform7 games generated by TextWorld.
"""
def _wrap(self, env):
super()._wrap(env)
self._wrapped_env = GameData(self._wrapped_env)
self._wrapped_env = Inform7Data(self._wrapped_env)
self._wrapped_env = StateTracking(self._wrapped_env)
@classmethod
def compatible(cls, path: str) -> bool:
""" Check if path point to a TW Inform7 compatible game. """
return os.path.isfile(os.path.splitext(path)[0] + ".json")
class Inform7Data(textworld.core.Wrapper):
"""
Wrapper that exposes additional information for Inform7 games generated by TextWorld.
"""
def _gather_infos(self):
# Carry over information from previous game step.
if self.prev_state is not None:
for attr in ["description", "inventory"]:
if getattr(self.infos, attr) and self.state.get(attr) is None:
self.state[attr] = self.prev_state[attr]
# Always track moves and score.
for attr in ["moves", "score"]:
self.state[attr] = self.state.get(attr, self.prev_state.get(attr))
if self.state["score"] is not None and type(self.state["score"]) is not int:
self.state["score"] = int(self.state["score"].strip())
self.state["won"] = '*** The End ***' in self.state["feedback"]
self.state["lost"] = '*** You lost! ***' in self.state["feedback"]
def step(self, command: str):
self.prev_state = self.state
self.state, _, _, = self._wrapped_env.step(command)
extra_infos, self.state["feedback"] = _detect_extra_infos(self.state["feedback"])
self.state.update(extra_infos)
self._gather_infos()
self.state["done"] = self.state["won"] or self.state["lost"]
return self.state, self.state["score"], self.state["done"]
def _send(self, command: str) -> str:
""" Send a command to the game without affecting the Environment's state. """
return self.unwrapped._send(command)
def reset(self):
self.prev_state = None
self.state = self._wrapped_env.reset()
extra_infos = {}
if self.infos.inventory:
extra_infos, _ = _detect_extra_infos(self._send('tw-extra-infos inventory'))
if self.infos.description:
extra_infos, _ = _detect_extra_infos(self._send('tw-extra-infos description'))
# Always track moves and score.
extra_infos, _ = _detect_extra_infos(self._send('tw-extra-infos moves'))
extra_infos, _ = _detect_extra_infos(self._send('tw-extra-infos score'))
self.state.update(extra_infos)
self._gather_infos()
return self.state
class StateTracking(textworld.core.Wrapper):
"""
Wrapper that enables state tracking for Inform7 games generated by TextWorld.
"""
@property
def tracking(self):
return (self.infos.intermediate_reward
or self.infos.policy_commands
or self.infos.admissible_commands
or self.infos.facts
or self.infos.last_action)
def load(self, gamefile: str) -> None:
self._wrapped_env.load(gamefile)
self._gamefile = os.path.splitext(gamefile)[0] + ".json"
try:
self._game = self._wrapped_env._game
except AttributeError:
if not os.path.isfile(self._gamefile):
raise MissingGameInfosError(self)
self._game = Game.load(self._gamefile)
self._game_progression = None
self._inform7 = Inform7Game(self._game)
def _gather_infos(self):
self.state["_game_progression"] = self._game_progression
self.state["_facts"] = list(self._game_progression.state.facts)
self.state["won"] = '*** The End ***' in self.state["feedback"]
self.state["lost"] = '*** You lost! ***' in self.state["feedback"]
self.state["_winning_policy"] = self._current_winning_policy
if self.infos.policy_commands:
self.state["policy_commands"] = []
if self._game_progression.winning_policy is not None:
self.state["policy_commands"] = self._inform7.gen_commands_from_actions(self._current_winning_policy)
if self.infos.intermediate_reward:
self.state["intermediate_reward"] = 0
if self.state["won"]:
# The last action led to winning the game.
self.state["intermediate_reward"] = 1
elif self.state["lost"]:
# The last action led to losing the game.
self.state["intermediate_reward"] = -1
elif self._previous_winning_policy is None:
self.state["intermediate_reward"] = 0
else:
diff = len(self._previous_winning_policy) - len(self._current_winning_policy)
self.state["intermediate_reward"] = int(diff > 0) - int(diff < 0) # Sign function.
if self.infos.facts:
self.state["facts"] = list(map(self._inform7.get_human_readable_fact, self.state["_facts"]))
self.state["_last_action"] = self._last_action
if self.infos.last_action and self._last_action is not None:
self.state["last_action"] = self._inform7.get_human_readable_action(self._last_action)
self.state["_valid_actions"] = self._game_progression.valid_actions
if self.infos.admissible_commands:
all_valid_commands = self._inform7.gen_commands_from_actions(self._game_progression.valid_actions)
# To guarantee the order from one execution to another, we sort the commands.
# Remove any potential duplicate commands (they would lead to the same result anyway).
self.state["admissible_commands"] = sorted(set(all_valid_commands))
if self.infos.moves:
self.state["moves"] = self._moves
def _send(self, command: str) -> str:
""" Send a command to the game without affecting the Environment's state. """
return self.unwrapped._send(command)
def reset(self):
self.state = self._wrapped_env.reset()
if not self.tracking:
return self.state # State tracking not needed.
self._send('tw-trace-actions') # Turn on print for Inform7 action events.
track_quests = (self.infos.intermediate_reward or self.infos.policy_commands)
self._game_progression = GameProgression(self._game, track_quests=track_quests)
self._last_action = None
self._previous_winning_policy = None
self._current_winning_policy = self._game_progression.winning_policy
self._moves = 0
self._gather_infos()
return self.state
def step(self, command: str):
self.state, score, done = self._wrapped_env.step(command)
if not self.tracking:
return self.state, score, done # State tracking not needed.
# Detect what events just happened in the game.
i7_events, self.state["feedback"] = _detect_i7_events_debug_tags(self.state["feedback"])
self._previous_winning_policy = self._current_winning_policy
for i7_event in i7_events:
valid_actions = self._game_progression.valid_actions
self._last_action = self._inform7.detect_action(i7_event, valid_actions)
if self._last_action is not None:
# An action that affects the state of the game.
self._game_progression.update(self._last_action)
self._current_winning_policy = self._game_progression.winning_policy
self._moves += 1
self._gather_infos()
self.state["done"] = self.state["won"] or self.state["lost"]
return self.state, score, self.state["done"]
class GameData(textworld.core.Wrapper):
"""
Wrapper that exposes information contained in the game .json file..
"""
def load(self, gamefile: str) -> None:
self._gamefile = os.path.splitext(gamefile)[0] + ".json"
if not os.path.isfile(self._gamefile):
raise MissingGameInfosError(self)
try:
self._game = self._wrapped_env._game
except AttributeError:
self._game = Game.load(self._gamefile)
self._wrapped_env.load(gamefile)
def _gather_infos(self):
self.state["game"] = self._game
self.state["command_templates"] = self._game.command_templates
self.state["verbs"] = self._game.verbs
self.state["entities"] = self._game.entity_names
self.state["objective"] = self._game.objective
self.state["max_score"] = self._game.max_score
for k, v in self._game.extras.items():
self.state["extra.{}".format(k)] = v
def reset(self):
self.state = self._wrapped_env.reset()
self._gather_infos()
return self.state
def step(self, command: str):
self.state, score, done = self._wrapped_env.step(command)
self._gather_infos()
return self.state, score, done
| [
"[email protected]"
] | |
a7d3c422b559f44ac3e3715060cf05f7dd47073a | f879a7bc37da3fa998fc6925b8cede788cde6a70 | /lunch/migrations/0001_initial.py | 81c83ebf338d11f881cf9a075d340bfbfeee507b | [
"MIT"
] | permissive | pythondev0101/eats_easy_ordering_system | 65672c90d6937cdf3173b0e3445dc2967b296fe4 | f65a88b4a46e056be35909799a01784f741cdfac | refs/heads/master | 2021-08-22T22:32:39.358470 | 2019-05-08T15:50:56 | 2019-05-08T15:50:56 | 168,003,924 | 0 | 0 | MIT | 2021-06-10T21:18:36 | 2019-01-28T17:22:58 | Python | UTF-8 | Python | false | false | 1,703 | py | # Generated by Django 2.1.5 on 2019-03-28 22:30
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('core', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=255)),
('total', models.DecimalField(decimal_places=2, max_digits=9, verbose_name='Total')),
('status', models.CharField(blank=True, choices=[('new', 'New'), ('received', 'Received'), ('ordered', 'Ordered'), ('cancelled', 'Cancelled')], default='new', max_length=10, verbose_name='Status')),
('date', models.DateField()),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='OrderLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(null=True, verbose_name='Date')),
('order', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='lunch.Order')),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.Product')),
],
),
]
| [
"[email protected]"
] | |
74008430a1be80fcb8c52208585fda0fb87e88c3 | 9716798c8ede92793d5dece271f7d2d2cb8f718e | /django-backend/grants/migrations/0003_auto_20160622_2128.py | c562bd57d5e9e1ab867f5406efa6b9c716039607 | [] | no_license | experiment/experiment-grant-scapie-mcscrapeface | 020066f42be1503dcaecb43a9e90b1091bf67d87 | 43388d3a621df1fcaf5ae57656b7537a384b4ed0 | refs/heads/master | 2021-01-18T04:51:31.871971 | 2016-06-26T20:41:54 | 2016-06-26T20:41:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-22 21:28
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('grants', '0002_auto_20160615_1928'),
]
operations = [
migrations.CreateModel(
name='Funder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
],
),
migrations.RemoveField(
model_name='grant',
name='organization',
),
migrations.AddField(
model_name='grant',
name='funder',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='grants.Funder'),
),
]
| [
"[email protected]"
] | |
b963ffcb149ec78ba7a71444eab00afebe6f07f9 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_21397.py | 5962a774fc28320f71f33418de6cc69378b3b646 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | # PyQt/Pyside - also return the previous value with valueChanged
QTableWidget
| [
"[email protected]"
] | |
8980dbace4b9b52b7f3d2df6afa42b4cb87ba0e1 | de4da7c45581f72adaf8e328a89cb3d57fe3613f | /fundamentos/slides/marcoandre/pyMordida0-fontes/py33.py | 12654acc12581c53d9458751e59ef50bb3a132ab | [] | no_license | ramalho/propython | 2469be7492554762d05f9b0ce5c0dc3a51bd3a18 | 76c2b52755e08d49929cdc2a523db72735240e72 | refs/heads/master | 2022-06-01T22:51:07.659074 | 2022-05-22T18:22:21 | 2022-05-22T18:22:21 | 140,458 | 39 | 13 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
from random import shuffle
palavra = 'python'
print palavra
l = list(palavra)
print l
shuffle(l)
print l
palavraEmbaralhada = ''.join(l)
print palavraEmbaralhada
| [
"[email protected]"
] | |
6a183eba351a084f4cfb4b32477ae15de49b2df7 | b2a3328ec0caeb4231528094ec374f8168b08e91 | /Scence/Scence/authrouter.py | 5fb3b54537fa43dd3338f9df122cb9becc294916 | [] | no_license | sugerStill/ScenceWeb | 1185d10b300d57af22cc72cbc6b50e1840bdc127 | b189ea27d9ca383528d095ab3d81c79c87fbaea2 | refs/heads/master | 2020-06-05T19:37:50.446509 | 2019-06-18T11:47:12 | 2019-06-18T11:47:12 | 192,527,209 | 1 | 0 | null | 2019-06-18T11:31:28 | 2019-06-18T11:31:28 | null | UTF-8 | Python | false | false | 1,642 | py | from django.conf import settings
DATABASE_MAPPING = settings.DATABASE_APPS_MAPPING
class AuthRouter:
def db_for_read(self, model, **hints):
if model._meta.app_label == 'TrafficView':
return 'trafficdatabase'
if model._meta.app_label == "ScenceView":
return "webdata"
if model._meta.app_label == "weather":
return "weather"
if model._meta.app_label == "internetdata":
return "internetdata"
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'TrafficView':
return 'trafficdatabase'
if model._meta.app_label == "ScenceView":
return "webdata"
if model._meta.app_label == "weather":
return "weather"
if model._meta.app_label == "internetdata":
return "internetdata"
return None
def allow_relation(self, obj1, obj2, **hints):
db_list = ['trafficdatabase', 'webdata', 'weather', 'internetdata']
if obj1._state.db in db_list and obj2._state.db in db_list:
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == 'TrafficView':
return 'trafficdatabase ' if db == "trafficdatabase" else False
elif app_label == 'ScenceView':
return 'webdata' if db == "webdata" else False
elif app_label == 'weather':
return 'weather' if db == "weather" else False
elif app_label == "internet":
return "internetdata" if db == "internetdata" else False
return None
| [
"[email protected]"
] | |
607b91307ba4c6a0fa4d05b36bfd7a002c462ad5 | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/aio/operations/_load_balancer_network_interfaces_operations.py | 433e1097609eab6b6c10cee1962289bb757c81d4 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 5,623 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations:
"""LoadBalancerNetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["models.NetworkInterfaceListResult"]:
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_11_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore
| [
"[email protected]"
] | |
9766fca774c3ffe621b37970eadc241e084e6d66 | 7a4da5ec2196bf975a9e6115846244788b36b952 | /3.7.0/lldb-3.7.0.src/test/expression_command/call-function/TestCallStdStringFunction.py | c36577a54133afe6418d68e03c4acf03aee2a13d | [
"NCSA",
"MIT"
] | permissive | androm3da/clang_sles | ca4ada2ec85d625c65818ca9b60dcf1bc27f0756 | 2ba6d0711546ad681883c42dfb8661b842806695 | refs/heads/master | 2021-01-10T13:50:25.353394 | 2016-03-31T21:38:29 | 2016-03-31T21:38:29 | 44,787,977 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,117 | py | """
Test calling std::String member functions.
"""
import unittest2
import lldb
import lldbutil
from lldbtest import *
class ExprCommandCallFunctionTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.c.
self.line = line_number('main.cpp',
'// Please test these expressions while stopped at this line:')
@skipUnlessDarwin
@dsym_test
@expectedFailureDarwin(16361880) # <rdar://problem/16361880>, we get the result correctly, but fail to invoke the Summary formatter.
def test_with_dsym(self):
"""Test calling std::String member function."""
self.buildDsym()
self.call_function()
@dwarf_test
@expectedFailureFreeBSD('llvm.org/pr17807') # Fails on FreeBSD buildbot
@expectedFailureIcc # llvm.org/pr14437, fails with ICC 13.1
@expectedFailureDarwin(16361880) # <rdar://problem/16361880>, we get the result correctly, but fail to invoke the Summary formatter.
def test_with_dwarf(self):
"""Test calling std::String member function."""
self.buildDwarf()
self.call_function()
def call_function(self):
"""Test calling std::String member function."""
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
# Some versions of GCC encode two locations for the 'return' statement in main.cpp
lldbutil.run_break_set_by_file_and_line (self, "main.cpp", self.line, num_expected_locations=-1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
self.expect("print str",
substrs = ['Hello world'])
# Calling this function now succeeds, but we follow the typedef return type through to
# const char *, and thus don't invoke the Summary formatter.
self.expect("print str.c_str()",
substrs = ['Hello world'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| [
"[email protected]"
] | |
e0921d86405956f11e9c99f975f6602dfe3062d4 | 498e99bae2b0a107a4f1c8563a74470e8516f465 | /apps/project/forms.py | 4a69db81801e8540a92517735406212b5511a46e | [
"MIT"
] | permissive | xiaoxiaolulu/MagicTestPlatform | 91bcf9125c4c7f254bf8aaf425b7c72ca40b7a49 | dc9b4c55f0b3ace180c30b7f080eb5d88bb38fdb | refs/heads/master | 2022-05-29T00:05:48.030392 | 2020-01-20T09:16:44 | 2020-01-20T09:16:44 | 219,256,372 | 5 | 1 | MIT | 2022-05-25T02:24:22 | 2019-11-03T05:31:53 | Python | UTF-8 | Python | false | false | 2,153 | py | """
项目管理模块表单验证器
~~~~~~~~~~~~~~~~~~~~~~~~~~~
DESCRIPTION
:copyright: (c) 2019 by Null.
"""
from wtforms import (
StringField,
TextAreaField,
IntegerField
)
from wtforms.validators import DataRequired
from wtforms_tornado import Form
class ProjectForm(Form):
name = StringField("项目名称", validators=[DataRequired("请输入项目名称")])
env = IntegerField("测试环境", validators=[DataRequired("请选择环境")])
desc = TextAreaField("项目描述", validators=[DataRequired(message="请输入项目描述")])
class TestEnvironmentForm(Form):
name = StringField("测试环境名称", validators=[DataRequired("请输入测试环境名称")])
host_address = StringField("测试环境地址", validators=[DataRequired("请输入测试环境地址")])
desc = TextAreaField("测试环境描述", validators=[DataRequired(message="请输入测试环境描述")])
class DBSettingForm(Form):
name = StringField("数据库名称", validators=[DataRequired("请输入数据库名称")])
db_type = StringField("数据库类型", validators=[DataRequired("请输入数据库类型")])
db_user = StringField("数据库账号", validators=[DataRequired("请输入数据库账号")])
db_password = StringField("数据库密码", validators=[DataRequired("请输入数据库密码")])
db_host = StringField("数据库境地址", validators=[DataRequired("请输入数据库地址")])
db_port = IntegerField("数据库端口号", validators=[DataRequired("请输入数据库端口号")])
desc = TextAreaField("数据库描述", validators=[DataRequired(message="请输入数据库描述")])
class FunctionGeneratorForm(Form):
name = StringField("函数名称", validators=[DataRequired("请输入函数名称")])
function = StringField("函数方法", validators=[DataRequired("请输入方法名称")])
desc = TextAreaField("方法描述", validators=[DataRequired(message="请输入方法描述")])
class FunctionDebugForm(Form):
function = StringField("函数方法", validators=[DataRequired("请输入方法名称")])
| [
"[email protected]"
] | |
51762a5493480f20d4c0ec1971b890da62221661 | 10e94d77e56d9cbb979174795c465b679d03d6b3 | /tensorflow/contrib/session_bundle/example/export_half_plus_two.py | a17617db12e50c46eebfbd7fd97418342833e856 | [
"Apache-2.0"
] | permissive | pint1022/tf-coriander | 68939732c1ec0f052929c13ef6d8f49e44d423e4 | 197a685accca4a3f38285d6ac3ccf3998a200090 | refs/heads/master | 2020-04-14T18:56:40.334257 | 2019-01-11T00:40:11 | 2019-01-11T00:40:11 | 164,038,861 | 1 | 0 | Apache-2.0 | 2019-01-04T00:53:40 | 2019-01-04T00:53:40 | null | UTF-8 | Python | false | false | 5,405 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exports a toy linear regression inference graph.
Exports a TensorFlow graph to /tmp/half_plus_two/ based on the Exporter
format.
This graph calculates,
y = a*x + b
where a and b are variables with a=0.5 and b=2.
Output from this program is typically used to exercise Session
loading and execution code.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.session_bundle import exporter
tf.app.flags.DEFINE_string("export_dir", "/tmp/half_plus_two",
"Directory where to export inference model.")
FLAGS = tf.app.flags.FLAGS
def Export():
with tf.Session() as sess:
# Make model parameters a&b variables instead of constants to
# exercise the variable reloading mechanisms.
a = tf.Variable(0.5, name="a")
b = tf.Variable(2.0, name="b")
# Create a placeholder for serialized tensorflow.Example messages to be fed.
serialized_tf_example = tf.placeholder(tf.string, name="tf_example")
# Parse the tensorflow.Example looking for a feature named "x" with a single
# floating point value.
feature_configs = {"x": tf.FixedLenFeature([1], dtype=tf.float32),}
tf_example = tf.parse_example(serialized_tf_example, feature_configs)
# Use tf.identity() to assign name
x = tf.identity(tf_example["x"], name="x")
# Calculate, y = a*x + b
y = tf.add(tf.mul(a, x), b, name="y")
# Setup a standard Saver for our variables.
save = tf.train.Saver({"a": a, "b": b}, sharded=True)
# asset_path contains the base directory of assets used in training (e.g.
# vocabulary files).
original_asset_path = tf.constant("/tmp/original/export/assets")
# Ops reading asset files should reference the asset_path tensor
# which stores the original asset path at training time and the
# overridden assets directory at restore time.
asset_path = tf.Variable(original_asset_path,
name="asset_path",
trainable=False,
collections=[])
assign_asset_path = asset_path.assign(original_asset_path)
# Use a fixed global step number.
global_step_tensor = tf.Variable(123, name="global_step")
# Create a RegressionSignature for our input and output.
regression_signature = exporter.regression_signature(
input_tensor=serialized_tf_example,
# Use tf.identity here because we export two signatures here.
# Otherwise only graph for one of the signatures will be loaded
# (whichever is created first) during serving.
output_tensor=tf.identity(y))
named_graph_signature = {
"inputs": exporter.generic_signature({"x": x}),
"outputs": exporter.generic_signature({"y": y})
}
# Create two filename assets and corresponding tensors.
# TODO(b/26254158) Consider adding validation of file existance as well as
# hashes (e.g. sha1) for consistency.
original_filename1 = tf.constant("hello1.txt")
tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1)
filename1 = tf.Variable(original_filename1,
name="filename1",
trainable=False,
collections=[])
assign_filename1 = filename1.assign(original_filename1)
original_filename2 = tf.constant("hello2.txt")
tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2)
filename2 = tf.Variable(original_filename2,
name="filename2",
trainable=False,
collections=[])
assign_filename2 = filename2.assign(original_filename2)
# Init op contains a group of all variables that we assign.
init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2)
# CopyAssets is used as a callback during export to copy files to the
# given export directory.
def CopyAssets(filepaths, export_path):
print("copying asset files to: %s" % export_path)
for filepath in filepaths:
print("copying asset file: %s" % filepath)
# Run an export.
tf.initialize_all_variables().run()
export = exporter.Exporter(save)
export.init(
sess.graph.as_graph_def(),
init_op=init_op,
default_graph_signature=regression_signature,
named_graph_signatures=named_graph_signature,
assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS),
assets_callback=CopyAssets)
export.export(FLAGS.export_dir, global_step_tensor, sess)
def main(_):
Export()
if __name__ == "__main__":
tf.app.run()
| [
"[email protected]"
] | |
10d7946badfb7b23a22dc35a49596ac686505e5d | 9b0c7f1ad4c314d2873663effb976e69504e1d99 | /snippets/api/delete-resource.py | 37dfbe0131e647acdd7fcb76cea34dc8db2b7bda | [
"MIT"
] | permissive | etalab/doc.data.gouv.fr | 4251b4b3bf69bec2247cf31f36e549a3a11bd8e9 | 04da5b3937b424475a2de0dca772514c5e6ce36c | refs/heads/master | 2023-08-31T07:42:09.571391 | 2023-08-21T11:37:39 | 2023-08-21T11:37:39 | 112,334,095 | 7 | 61 | MIT | 2023-08-21T11:37:40 | 2017-11-28T12:43:25 | SCSS | UTF-8 | Python | false | false | 119 | py | url = api_url('/datasets/{}/resources/{}/'.format(DATASET, RESOURCE))
response = requests.delete(url, headers=HEADERS)
| [
"[email protected]"
] | |
fd163de569441d3f1a78fed668627ac3739d01cf | bc441bb06b8948288f110af63feda4e798f30225 | /resource_manage_sdk/model/container/ingress_rule_pb2.pyi | 90c7370882a3274a625add18341209e9ed58abc9 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,236 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from resource_manage_sdk.model.container.http_ingress_path_pb2 import (
HTTPIngressPath as resource_manage_sdk___model___container___http_ingress_path_pb2___HTTPIngressPath,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class IngressRule(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Http(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def paths(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_manage_sdk___model___container___http_ingress_path_pb2___HTTPIngressPath]: ...
def __init__(self,
*,
paths : typing___Optional[typing___Iterable[resource_manage_sdk___model___container___http_ingress_path_pb2___HTTPIngressPath]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> IngressRule.Http: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> IngressRule.Http: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"paths",b"paths"]) -> None: ...
host = ... # type: typing___Text
@property
def http(self) -> IngressRule.Http: ...
def __init__(self,
*,
host : typing___Optional[typing___Text] = None,
http : typing___Optional[IngressRule.Http] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> IngressRule: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> IngressRule: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"http",b"http"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"host",b"host",u"http",b"http"]) -> None: ...
| [
"[email protected]"
] | |
3102ccd9e183c24e8daa5cbd7e8868a6f317f940 | ccdeae68e468ad399a89181c37bba4490bcdc259 | /scripts/bestExpressions_L_TOP19_RELATIONAL_LASSO_1.py | 0c2d4fa5873998320cbf31a5be86b22b638b1471 | [] | no_license | jameshughes89/NonlinearModelsFMRI-2 | 19262d4494aa6adc0e9bd9592069ad6b757dda6b | a507a41d0a0a728d02616023aea0e66fafc1c387 | refs/heads/master | 2021-09-06T17:05:38.086733 | 2018-02-07T15:19:23 | 2018-02-07T15:19:23 | 109,417,040 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,853 | py | from math import *
def funcL_RELATIONAL_100307(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.59757039936e-13 * 1 + 0.0 * v0 + 0.131469391865 * v1 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v5 + 0.0 * v7 + 0.216628234038 * v8 + 0.199118824101 * v15 + 0.0 * v16 + 0.324393935032 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0105781263223 * v27 + 0.0427132574975 * v28
def funcL_RELATIONAL_100408(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.24865433276e-13 * 1 + 0.0 * v0 + 0.0366818847642 * v1 + -0.0 * v2 + 0.0 * v4 + 0.0 * v5 + 0.0 * v7 + 0.0 * v8 + 0.0 * v12 + 0.206491698112 * v13 + 0.390134268809 * v15 + 0.147684911183 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + -0.0 * v22 + 0.0 * v23 + 0.179659196885 * v27 + 0.0 * v28
def funcL_RELATIONAL_101006(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 4.62040678336e-14 * 1 + 0.0 * v0 + 0.227135183715 * v1 + 0.0 * v4 + 0.0 * v7 + 0.138719418469 * v8 + 0.0 * v10 + 0.0 * v11 + 0.0 * v12 + 0.0510847878851 * v13 + 0.0 * v15 + 0.0 * v16 + 0.25841765741 * v17 + 0.0763153951604 * v18 + -0.256689942011 * v19 + 0.100466912013 * v21 + 0.0 * v24 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_101107(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -2.61050117827e-14 * 1 + 0.0470101554014 * v1 + 0.0 * v3 + 0.0 * v4 + 0.0 * v7 + 0.0 * v11 + 0.0 * v12 + 0.32169649217 * v13 + 0.0 * v14 + 0.0 * v15 + 0.203230388343 * v17 + 0.0 * v19 + 0.0 * v20 + 0.0992045630406 * v21 + 0.0 * v22 + 0.0 * v24 + 0.0 * v26 + 0.22975843361 * v27 + 0.0 * v28
def funcL_RELATIONAL_101309(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.23138223906e-13 * 1 + 0.0821864296459 * v1 + 0.0 * v2 + -0.0 * v3 + 0.0 * v4 + 0.103349955246 * v6 + -0.0 * v7 + 0.348682991531 * v8 + -0.0 * v9 + 0.160230108487 * v11 + 0.159919173568 * v12 + 0.0 * v15 + 0.0 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v25 + 0.13592547708 * v26 + 0.0 * v27
def funcL_RELATIONAL_101410(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.61140065408e-13 * 1 + 0.0 * v1 + 0.0 * v2 + 0.0249241223424 * v3 + 0.112546522605 * v4 + 0.0 * v5 + 0.0 * v8 + 0.0 * v9 + 0.0 * v12 + 0.0875629837651 * v13 + 0.304317682247 * v15 + -0.0 * v16 + 0.356808374842 * v17 + -0.0 * v20 + -0.0 * v21 + 0.0 * v22 + -0.0 * v23 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_101915(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.81851650371e-13 * 1 + 0.0 * v0 + 0.0 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0479171753972 * v8 + 0.0 * v9 + 0.0 * v10 + 0.0 * v12 + 0.358069034977 * v13 + 0.283353282393 * v15 + 0.296996087175 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v23 + 0.0 * v24 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_102008(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -6.12370438252e-14 * 1 + 0.0 * v0 + 0.0 * v2 + 0.0 * v3 + 0.0 * v5 + 0.133984285391 * v6 + 0.035054661463 * v7 + 0.0 * v8 + 0.0 * v11 + 0.0 * v12 + 0.210615417381 * v13 + 0.0 * v15 + 0.0 * v16 + 0.486642755439 * v17 + 0.0 * v18 + 0.0 * v19 + 0.0 * v21 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_102311(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 3.02605229024e-14 * 1 + 0.0203012940868 * v1 + 0.0 * v2 + 0.058416040896 * v4 + 0.0 * v5 + 0.267007861641 * v6 + 0.0 * v7 + 0.336485946171 * v8 + 0.0 * v9 + 0.0 * v12 + 0.0564887489232 * v13 + 0.0 * v16 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v23 + 0.0 * v25 + 0.0201703933473 * v26 + 0.188866319641 * v27
def funcL_RELATIONAL_102816(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -3.00938255377e-14 * 1 + 0.634038143101 * v1 + 0.0 * v2 + 0.0467561130527 * v4 + 0.0448094628104 * v6 + 0.0 * v7 + 0.0 * v8 + 0.0 * v9 + 0.0179717455849 * v11 + 0.0 * v12 + 0.0 * v14 + 0.0 * v15 + 0.0 * v18 + -0.0 * v19 + 0.0 * v20 + 0.0 * v23 + 0.0 * v25 + 0.0 * v26 + 0.138578473761 * v27
def funcL_RELATIONAL_103111(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -6.94097399941e-14 * 1 + 0.0725475252654 * v1 + 0.0 * v4 + 0.0 * v5 + 0.0 * v7 + 0.0 * v8 + -0.0 * v9 + 0.0 * v11 + -0.0 * v12 + 0.0 * v13 + 0.380429902793 * v15 + 0.242330133186 * v17 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v25 + 0.235455952601 * v26 + 0.0 * v27 + -0.0 * v28
def funcL_RELATIONAL_103414(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 3.0250325552e-14 * 1 + 0.0 * v0 + 0.163878500162 * v1 + 0.1248757458 * v4 + 0.0597467782214 * v6 + 0.275827056543 * v8 + 0.0 * v9 + 0.0 * v10 + 0.0 * v11 + 0.0 * v12 + 0.0 * v13 + 0.110942298349 * v15 + 0.0 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0106753898604 * v21 + 0.0 * v22 + 0.0941914926123 * v26 + 0.113020342351 * v27
def funcL_RELATIONAL_103515(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -5.45671859935e-14 * 1 + 0.108478137791 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0 * v5 + 0.0 * v6 + 0.0 * v7 + 0.106936613624 * v8 + 0.0 * v9 + 0.126177528518 * v11 + 0.0455173074336 * v13 + 0.228063357297 * v15 + 0.178697125472 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0264495034164 * v26 + 0.153607710317 * v27 + 0.0 * v28
def funcL_RELATIONAL_103818(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -8.74444564344e-15 * 1 + 0.0 * v0 + 0.0400069288228 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0150181470345 * v6 + 0.0 * v7 + 0.0 * v8 + 0.0 * v11 + 0.444541835591 * v15 + 0.0187430180341 * v17 + 0.0600022355159 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0 * v25 + 0.342894885198 * v27 + 0.0 * v28
def funcL_RELATIONAL_104012(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.69963069385e-13 * 1 + 0.0 * v0 + 0.0336095359674 * v4 + 0.0 * v5 + 0.212106374613 * v8 + 0.0901163017963 * v9 + 0.0 * v11 + 0.0 * v12 + 0.143582819148 * v13 + 0.038928025802 * v15 + 0.0426312792853 * v16 + 0.0 * v18 + 0.13644498423 * v20 + 0.0 * v21 + 0.06961871848 * v22 + 0.0 * v23 + 0.0 * v24 + 0.165253198144 * v27 + 0.0232688445404 * v28
def funcL_RELATIONAL_104820(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.52738917588e-13 * 1 + 0.219499114724 * v1 + 0.0 * v2 + 0.0 * v4 + 0.154433007031 * v5 + 0.0 * v8 + 0.0 * v9 + 0.0 * v10 + -0.0 * v12 + 0.0 * v13 + 0.0 * v15 + 0.398809565947 * v17 + 0.0 * v20 + 0.13565170217 * v21 + 0.0 * v22 + -0.0 * v23 + -0.0 * v24 + 0.0 * v27 + -0.0 * v28
def funcL_RELATIONAL_105014(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 4.75255189956e-14 * 1 + 0.0 * v1 + -0.0 * v2 + 0.0 * v4 + -0.0 * v5 + 0.0 * v8 + -0.0 * v11 + 0.0 * v12 + 0.0824514387478 * v13 + 0.161040433237 * v15 + -0.0 * v16 + 0.413881260909 * v17 + -0.0 * v18 + -0.0 * v20 + -0.0 * v21 + 0.0 * v22 + 0.0 * v25 + 0.243224460239 * v27 + -0.0 * v28
def funcL_RELATIONAL_105115(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -7.17813697443e-14 * 1 + 0.0 * v0 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v5 + 0.0666430927462 * v7 + 0.0 * v8 + 0.112061807545 * v12 + 0.028654416203 * v13 + 0.201801067926 * v15 + 0.138293481992 * v16 + 0.351546526922 * v17 + 0.0 * v18 + 0.00274939220369 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0 * v28
def funcL_RELATIONAL_105216(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -3.48783151867e-14 * 1 + 0.0 * v1 + 0.0 * v2 + 0.0448609207105 * v4 + 0.0 * v5 + 0.308531366539 * v6 + 0.237154192515 * v8 + 0.0 * v12 + 0.100410800904 * v13 + 0.0731817868132 * v15 + 0.0 * v16 + 0.106912983551 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0646192006845 * v27 + 0.0 * v28
def funcL_RELATIONAL_105923(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 7.89985404341e-14 * 1 + 0.0 * v0 + 0.0 * v1 + 0.0 * v4 + 0.0 * v5 + 0.0 * v8 + 0.0 * v9 + 0.0 * v11 + 0.244156277061 * v12 + 0.101541082745 * v13 + 0.183055292179 * v15 + 0.0 * v16 + 0.0796060673588 * v17 + 0.122287783058 * v18 + 0.141985344725 * v20 + 0.0 * v21 + 0.0 * v24 + 0.0605703152148 * v27 + 0.0 * v28
def funcL_RELATIONAL_106016(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 4.07526228952e-14 * 1 + 0.0 * v2 + 0.0 * v3 + 0.162660864926 * v4 + 0.0 * v5 + 0.0 * v7 + 0.0 * v8 + 0.0 * v11 + 0.0 * v13 + 0.178093923067 * v15 + 0.0 * v16 + 0.168665879605 * v17 + 0.0403580952076 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.274160945893 * v25 + 0.0566751673002 * v27 + 0.0 * v28
def funcL_RELATIONAL_106319(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.06641075258e-13 * 1 + 0.00306788637273 * v0 + 0.321802383368 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0 * v6 + 0.0 * v7 + 0.0 * v8 + 0.0 * v9 + 0.0 * v13 + 0.348512046476 * v15 + 0.0738909348902 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v23 + 0.0 * v24 + 0.171737097117 * v27 + 0.0 * v28
def funcL_RELATIONAL_106521(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -3.10183670602e-14 * 1 + 0.0 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0 * v5 + 0.00276633012053 * v6 + 0.0 * v7 + 0.335870089462 * v8 + 0.0 * v11 + 0.0 * v12 + 0.0114771914023 * v13 + 0.119929924266 * v15 + 0.0 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v25 + 0.32683734758 * v26 + 0.133834139845 * v27
def funcL_RELATIONAL_107321(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.84906196542e-13 * 1 + 0.0 * v0 + 0.0 * v1 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v8 + 0.0 * v10 + 0.0 * v12 + 0.0616599665581 * v13 + 0.190515160754 * v15 + 0.0 * v16 + 0.617885569294 * v17 + 0.0 * v18 + 0.0 * v20 + -0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0 * v28
def funcL_RELATIONAL_107422(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -4.94614444765e-14 * 1 + 0.0 * v0 + 0.0 * v1 + -0.0 * v3 + 0.0 * v4 + -0.0 * v7 + 0.258759425061 * v8 + 0.0 * v9 + 0.0 * v12 + 0.197208726687 * v13 + 0.376812933549 * v15 + 0.0 * v16 + 0.0 * v17 + -0.0 * v18 + -0.0 * v20 + -0.0 * v21 + -0.0 * v23 + 0.0 * v24 + 0.0 * v28
def funcL_RELATIONAL_108121(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -9.87137034248e-15 * 1 + 0.0 * v0 + 0.0800070120342 * v1 + 0.0 * v4 + 0.0 * v5 + 0.0093095457049 * v6 + 0.0 * v7 + 0.143313554508 * v8 + 0.0 * v9 + 0.0269982185226 * v13 + 0.0 * v15 + 0.0 * v16 + 0.46397464401 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v23 + 0.225093115859 * v27 + 0.0 * v28
def funcL_RELATIONAL_108323(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -1.52044457484e-13 * 1 + 0.194646410821 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0630158736228 * v5 + 0.0381774442451 * v7 + 0.0 * v8 + 0.0 * v9 + 0.0 * v11 + 0.177617747701 * v13 + 0.0 * v15 + 0.145646094776 * v17 + 0.120998316595 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0494211475689 * v23 + 0.0 * v25 + 0.0711067443725 * v26 + 0.0826310297074 * v27
def funcL_RELATIONAL_108525(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 7.58955211116e-14 * 1 + 0.260816362004 * v1 + 0.0 * v3 + 0.0 * v4 + 0.0285998887767 * v5 + 0.0 * v6 + 0.0 * v8 + 0.0 * v9 + 0.116566838575 * v12 + 0.327608308422 * v13 + 0.0 * v15 + 0.0 * v17 + 0.0 * v20 + 0.0381247024001 * v21 + 0.0 * v22 + 0.0 * v23 + 0.106769453049 * v26 + 0.0619513197044 * v27 + 0.0 * v28
def funcL_RELATIONAL_108828(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -6.58209802896e-14 * 1 + 0.317330944855 * v1 + 0.0063176496756 * v4 + 0.0 * v5 + 0.0 * v6 + 0.0 * v8 + 0.0995247483433 * v9 + 0.0 * v11 + 0.0 * v12 + 0.229982225409 * v13 + -0.0 * v14 + 0.195294772007 * v15 + 0.0986686447042 * v17 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.0 * v25 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_109123(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 5.20405286404e-14 * 1 + 0.0 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0321123635654 * v6 + 0.0 * v8 + 0.0 * v9 + 0.0 * v11 + 0.0 * v13 + 0.0 * v14 + 0.0 * v15 + 0.0 * v16 + 0.555045660988 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.156776092654 * v26 + 0.167995049451 * v27 + 0.0 * v28
def funcL_RELATIONAL_109325(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -5.14270091578e-14 * 1 + 0.0 * v1 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v5 + 0.139466656325 * v6 + 0.0898394340122 * v8 + 0.0 * v9 + 0.0 * v12 + 0.380339869151 * v13 + 0.197100897731 * v15 + 0.0168739209927 * v17 + -0.0 * v20 + 0.0 * v21 + 0.0 * v25 + 0.229829231885 * v26 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_110411(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.1837853238e-13 * 1 + 0.0559270440262 * v0 + 0.159317549094 * v1 + 0.0 * v2 + 0.0 * v4 + 0.0526013349178 * v5 + 0.00293055054279 * v6 + 0.000427410686812 * v8 + 0.0 * v11 + 0.0 * v12 + 0.0 * v13 + 0.112266096647 * v15 + 0.0638231235565 * v16 + 0.450347126833 * v17 + 0.0 * v18 + 0.0 * v20 + 0.0 * v21 + 0.0 * v23 + 0.0 * v27
def funcL_RELATIONAL_111312(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.46002307131e-13 * 1 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v5 + 0.0 * v8 + 0.0 * v9 + 0.0 * v11 + 0.0940815103494 * v12 + 0.0241706421537 * v13 + 0.343009754152 * v15 + 0.0 * v16 + 0.26286068814 * v17 + 0.0 * v19 + 0.0 * v20 + 0.0 * v21 + 0.0 * v22 + 0.158203532889 * v27 + 0.0 * v28
def funcL_RELATIONAL_111413(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 6.25345574577e-14 * 1 + 0.138286450103 * v1 + 0.0 * v4 + 0.043852350829 * v5 + 0.210894383711 * v6 + 0.0 * v7 + 0.0 * v8 + -0.0 * v9 + -0.0 * v10 + 0.0 * v11 + 0.0197332102288 * v13 + 0.108048464179 * v14 + 0.0 * v15 + 0.207422563833 * v17 + 0.0 * v18 + 0.0 * v21 + 0.0377154147011 * v25 + 0.0 * v26 + 0.229585065518 * v27
def funcL_RELATIONAL_111514(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -2.37661568884e-13 * 1 + 0.0468825966739 * v1 + 0.0 * v3 + 0.0 * v5 + 0.0 * v6 + 0.157691677364 * v7 + 0.112838159686 * v8 + 0.0 * v9 + 0.0 * v10 + 0.0 * v12 + 0.110294651842 * v15 + 0.0 * v16 + 0.101958660049 * v17 + 0.408763079826 * v18 + 0.0 * v19 + 0.0 * v20 + 0.0 * v21 + 0.0 * v27 + 0.0 * v28
def funcL_RELATIONAL_111716(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 2.33921035709e-13 * 1 + 0.0 * v0 + 0.25320621321 * v1 + 0.0 * v4 + 0.0 * v5 + 0.111688707274 * v7 + 0.0 * v8 + 0.0 * v9 + -0.0 * v12 + 0.0917494587524 * v13 + 0.440385007511 * v15 + -0.0 * v16 + 0.0 * v17 + 0.0 * v18 + 0.0 * v20 + 0.00572738297748 * v21 + 0.0 * v23 + -0.0 * v24 + 0.00325846210259 * v27
def funcL_RELATIONAL_113215(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -5.97644046432e-14 * 1 + 0.0 * v1 + -0.0 * v4 + 0.0 * v5 + 0.188452728141 * v7 + 0.0 * v8 + 0.0 * v9 + 0.222953863134 * v13 + 0.0 * v15 + 0.224342851861 * v17 + 0.0 * v18 + 0.0 * v19 + 0.0 * v20 + 0.0 * v21 + -0.0 * v22 + 0.0 * v23 + 0.0 * v25 + 0.226573206318 * v27 + 0.0 * v28
def funcL_RELATIONAL_113619(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return 1.6306079447e-13 * 1 + 0.0 * v0 + 0.137066363189 * v1 + -0.0 * v5 + 0.221242866389 * v6 + 0.0 * v7 + 0.0 * v9 + -0.0403386979051 * v11 + 0.193798116724 * v13 + 0.249071142556 * v15 + 0.0 * v17 + 0.0 * v18 + -0.0 * v20 + 0.0 * v21 + -0.0 * v22 + -0.0 * v23 + 0.0 * v24 + 0.0 * v27 + -0.0 * v28
def funcL_RELATIONAL_113922(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -2.35246942534e-14 * 1 + 0.0 * v0 + 0.0572718604415 * v1 + 0.0 * v2 + 0.0 * v3 + 0.202165577907 * v6 + 0.0 * v7 + 0.0 * v8 + 0.0230693819433 * v9 + 0.0 * v10 + 0.0939734946701 * v12 + 0.498474326565 * v15 + 0.0 * v16 + 0.0422191926579 * v17 + 0.0 * v18 + 0.0 * v19 + 0.0 * v21 + 0.0 * v23 + 0.0 * v24
def funcL_RELATIONAL_114419(v0,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29): return -2.20202310172e-14 * 1 + 0.0 * v0 + 0.0 * v1 + 0.0 * v2 + 0.0 * v3 + 0.0 * v4 + 0.0 * v5 + 0.0 * v8 + 0.0122612670698 * v12 + 0.0 * v13 + 0.152720694671 * v15 + 0.391827668685 * v17 + 0.0 * v18 + 0.0 * v21 + 0.0 * v22 + 0.0 * v23 + 0.0 * v24 + 0.300807970014 * v27 + 0.0 * v28
funcs = [funcL_RELATIONAL_100307,funcL_RELATIONAL_100408,funcL_RELATIONAL_101006,funcL_RELATIONAL_101107,funcL_RELATIONAL_101309,funcL_RELATIONAL_101410,funcL_RELATIONAL_101915,funcL_RELATIONAL_102008,funcL_RELATIONAL_102311,funcL_RELATIONAL_102816,funcL_RELATIONAL_103111,funcL_RELATIONAL_103414,funcL_RELATIONAL_103515,funcL_RELATIONAL_103818,funcL_RELATIONAL_104012,funcL_RELATIONAL_104820,funcL_RELATIONAL_105014,funcL_RELATIONAL_105115,funcL_RELATIONAL_105216,funcL_RELATIONAL_105923,funcL_RELATIONAL_106016,funcL_RELATIONAL_106319,funcL_RELATIONAL_106521,funcL_RELATIONAL_107321,funcL_RELATIONAL_107422,funcL_RELATIONAL_108121,funcL_RELATIONAL_108323,funcL_RELATIONAL_108525,funcL_RELATIONAL_108828,funcL_RELATIONAL_109123,funcL_RELATIONAL_109325,funcL_RELATIONAL_110411,funcL_RELATIONAL_111312,funcL_RELATIONAL_111413,funcL_RELATIONAL_111514,funcL_RELATIONAL_111716,funcL_RELATIONAL_113215,funcL_RELATIONAL_113619,funcL_RELATIONAL_113922,funcL_RELATIONAL_114419,]
def getFuncs(): return funcs
| [
"[email protected]"
] | |
084424cd5ba296c1622253af22231705a68e5b7b | 1a9a62b3feb53c7f87352587a774eb772948ebc9 | /service2/application/routes.py | 405564d539c122c26757d1b65970fc1ed9043f10 | [] | no_license | vuchenna/SFIAproject2 | ff1e643cec2947931176af2f77d7b24032d80aed | a5d82331636f49d3f1978989eb5428b4a20e57a8 | refs/heads/master | 2022-12-24T13:09:05.392030 | 2020-03-09T16:36:39 | 2020-03-09T16:36:39 | 244,886,380 | 0 | 0 | null | 2022-12-08T03:46:07 | 2020-03-04T11:45:54 | Python | UTF-8 | Python | false | false | 462 | py | from application import app, db
from application.models import Gender
#from application.forms import GenerateForm
import random,requests
@app.route('/')
@app.route('/home')
def home():
return render_template('home.html', title='Home')
@app.route('/gender', methods=['GET', 'POST'])
def gender():
id = random.randint(0,2)
#gender = Gender.query.filter_by(id=id).first()
#g = str(gender.id)
g = str(id)
return g
#return(selectgender)
| [
"[email protected]"
] | |
79ecbaa165b4ab251b36e00d45242e63bfd51e85 | 0487c30d3d2a26ee62eb9e82c1b1e6edb7cb8b36 | /tests/sai_qualify/community_cases.py | b21af337103cbad72b54347c09e156056917d5e7 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | gord1306/sonic-mgmt | e4047cbcdb600591816215e765c7f30664cc4543 | 05094321ed58270ac06d1a0ef575a4ab9ea3ddd6 | refs/heads/master | 2022-12-17T08:05:58.944208 | 2022-06-06T02:34:48 | 2022-06-06T02:34:48 | 195,778,851 | 1 | 0 | NOASSERTION | 2019-07-08T09:21:07 | 2019-07-08T09:21:07 | null | UTF-8 | Python | false | false | 5,025 | py | COMMUN_TEST_CASE = [
"saiacl.IPAclTest",
"saiacl.MACSrcAclTest",
"saiacl.L3AclTest",
"saiacl.SeqAclTableGroupTest",
"saiacl.MultBindAclTableGroupTest",
"saiacl.BindAclTableInGroupTest",
"saiacl.L3AclTableTestI",
"saiacl.L3AclTableGroupTestI",
"saiacl.L3AclTableGroupTestII",
"saiacl.L3AclTableTestII",
"saidebugcounters.DropMCSMAC",
"saidebugcounters.DropSMACequalsDMAC",
"saidebugcounters.DropDMACReserved",
"saidebugcounters.DropIngressVLANFilter",
"saidebugcounters.DropL2LoopbackFilter",
"saidebugcounters.DropL3LoopbackFilter",
"saidebugcounters.DropNonRoutable",
"saidebugcounters.DropNoL3Header",
"saidebugcounters.DropIPHeaderError",
"saidebugcounters.DropUCDIPMCDMAC",
"saidebugcounters.DropDIPLoopback",
"saidebugcounters.DropSIPLoopback",
"saidebugcounters.DropMulticastSIP",
"saidebugcounters.DropSIPClassE",
"saidebugcounters.DropSIPUnspecified",
"saidebugcounters.DropMCDMACMismatch",
"saidebugcounters.DropSIPEqualsDIP",
"saidebugcounters.DropSIPBC",
"saidebugcounters.DropDIPLocal",
"saidebugcounters.DropDIPLinkLocal",
"saidebugcounters.DropSIPLinkLocal",
"saidebugcounters.DropIPv6MCScope0",
"saidebugcounters.DropIPv6MCScope1",
"saidebugcounters.DropIRIFDisabled",
"saidebugcounters.DropERIFDisabled",
"saidebugcounters.DropLPM4Miss",
"saidebugcounters.DropLPM6Miss",
"saidebugcounters.DropBlackholeRoute",
"saidebugcounters.DropACLAny",
"saidebugcounters.NoDropIngressVLANFilter",
"saidebugcounters.DropMultipleReasons",
"saidebugcounters.EditingDropReasons",
"saifdb.L2FDBMissUnicastTest",
"saifdb.L2FDBMissBroadcastTest",
"saihostif.NoPolicyTest",
"saihostif.PolicyTest",
"saihostif.ARPTest",
"saihostif.DHCPTest",
"saihostif.LLDPTest",
"saihostif.LACPTest",
"saihostif.SNMPTest",
"saihostif.SSHTest",
"saihostif.IP2METest",
"saihostif.TTLErrorTest",
"saihostif.BGPTest",
"sail2.L2AccessToAccessVlanTest",
"sail2.L2TrunkToTrunkVlanTest",
"sail2.L2AccessToTrunkVlanTest",
"sail2.L2TrunkToAccessVlanTest",
"sail2.L2FloodTest",
"sail2.L2LagTest",
"sail2.LagHashseedTest",
"sail2.L2VlanBcastUcastTest",
"sail2.L2FdbAgingTest",
"sail2.L2ARPRequestReplyFDBLearningTest",
"sail2.L2BridgeSubPortFloodTest",
"sail2.L2BridgePortTestI",
"sail2.L2BridgeSubPortFDBTest",
"sail2.L2MtuTest",
"sail2.L2MacMoveTestI ",
"sail2.L2MacMoveTestII ",
"sail2.L2MacMoveTestIII ",
"sail3.L3IPv4HostTest",
"sail3.L3IPv4LpmTest",
"sail3.L3IPv6HostTest",
"sail3.L3IPv6PrefixTest",
"sail3.L3IPv6LpmTest",
"sail3.L3IPv4EcmpHostTest",
"sail3.L3IPv6EcmpHostTest",
"sail3.L3IPv4EcmpLpmTest",
"sail3.L3IPv6EcmpLpmTest",
"sail3.L3IPv4EcmpHashSeedTest",
"sail3.L3IPv4LagTest",
"sail3.L3IPv6LagTest",
"sail3.L3EcmpLagTest",
"sail3.L3EcmpLagTestMini",
"sail3.L3VIIPv4HostTest",
"sail3.L3IPv4MacRewriteTest",
"sail3.L3VlanNeighborMacUpdateTest",
"sail3.L3MultipleLagTest",
"sail3.L3MultipleEcmpLagTest",
"sail3.L3BridgeAndSubPortRifTest",
"sail3.L3SubPortAndVLANRifTest",
"sail3.L3MtuTest",
"sail3.L3IPv4NeighborMacTest",
"sail3.L3IPv6NeighborMacTest",
"sail3.L3IPv4NeighborFdbAgeoutTest",
"sail3.L3IPv6NeighborFdbAgeoutTest",
"sail3.L3IPv4EcmpGroupMemberTest",
"sail3.L3IPv6EcmpGroupMemberTest",
"sail3.L3IPv4_32Test ",
"sail3.L3LpbkSubnetTest",
"saimirror.IngressLocalMirrorTest",
"saimirror.IngressRSpanMirrorTest",
"saimirror.IngressERSpanMirrorTest",
"saimirror.EgressLocalMirrorTest",
"saimirror.EgressERSpanMirrorTest",
"saitunnel.IpIpEncapTest",
"saitunnel.IpIpP2PTunnelDecapTest",
"saitunnel.IpIpP2PTunnelDecapOnlyTestBase",
"saitunnel.IpIpP2PTunnelDecapTestIpv4inIpv4",
"saitunnel.IpIpP2PTunnelDecapTestIpv6inIpv4",
"saitunnel.IpIpP2PTunnelDecapTestIpv4inIpv6",
"saitunnel.IpIpP2PTunnelDecapTestIpv6inIpv6",
"saitunnel.IpIpP2PTunnelDecapTestIpv4inIpv4GRE",
"saitunnel.IpIpP2PTunnelDecapTestIpv6inIpv4GRE",
"saitunnel.IpIpP2PTunnelDecapTestIpv4inIpv6GRE",
"saitunnel.IpIpP2PTunnelDecapTestIpv6inIpv6GRE"
]
PTF_SAI_TEST_CASE = [
"saisanity.L2TrunkToTrunkVlanTest",
"saisanity.L2TrunkToAccessVlanTest",
"saisanity.L2SanityTest"
]
WARM_BOOT_TEST_CASE = [
"warm_saisanity.WarmL2SanityTest"
]
PROBE_TEST_CASE = "sail3.L3IPv4HostTest"
| [
"[email protected]"
] | |
e00073bc15e99ad9f8df5f66533a616d9a50b004 | 4edbeb3e2d3263897810a358d8c95854a468c3ca | /python3/re/sub1.py | a6845b67a8b3cf82fa3820856df278a7eb085aa4 | [
"MIT"
] | permissive | jtraver/dev | f505d15d45b67a59d11306cc7252114c265f388b | 2197e3443c7619b856470558b737d85fe1f77a5a | refs/heads/master | 2023-08-06T02:17:58.601861 | 2023-08-01T16:58:44 | 2023-08-01T16:58:44 | 14,509,952 | 0 | 1 | MIT | 2020-10-14T18:32:48 | 2013-11-19T00:51:19 | Python | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python3
#!/usr/bin/python
import re
str1 = "An arbitrary string. Literal containing chars like: []{}!#$!@#!%ls813"
print(" ")
print(str1)
# taken from
# citrusleaf/monitoring-console/server/site-packages/pkg_resources.py
print(" ")
print("re.sub('[^A-Za-z0-9]+', '_', str1)")
print(re.sub('[^A-Za-z0-9]+', '_', str1))
print(" ")
print("re.sub('[^A-Za-z0-9]', '_', str1)")
print(re.sub('[^A-Za-z0-9]', '_', str1))
def replaceIt(str1):
print(" ")
print("return re.sub('[^A-Za-z0-9]+', '_', str1)")
return re.sub('[^A-Za-z0-9]+', '_', str1)
print(" ")
print("replaceIt(str1)")
print(replaceIt(str1))
| [
"[email protected]"
] | |
9fadbf5ceb9f56ff1c29d611095486237a3d374a | 6fd893482e664664b7c341964f591dae6b6feddc | /smallestindex.py | 094bde10d98e68ba481cf3a8b370d4064dbd960f | [] | no_license | ramuklihtnes/guviguvi | 066c27488dc282aa17f2de5847cf214d8fbb29b5 | d5369ebe5a3e3fbcb2951ce6e268c6101768dc13 | refs/heads/master | 2022-02-21T15:00:12.942339 | 2019-09-03T10:22:55 | 2019-09-03T10:22:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | n=int(input())
a=list(map(int,input().split()))[:n]
b=min(a)
c=max(a)
b=a.index(b)+1
c=a.index(c)+1
print(b,c)
| [
"[email protected]"
] | |
916f487636f1c022f25759cae3478469254fc569 | fdd050eef1c075965b7717f014ae2eeb51c1483f | /gen_dataset/assemble_rnet_imglist.py | 498b49b60c38c168aa4060135adb76b325bb2eaa | [] | no_license | digital-nomad-cheng/MTCNN_PyTorch_Zero_To_One | a8b33b4b39c6f325280d04f22f0e72c532b33cd3 | 30c3fd34c29aa81c4353029c55721b54cc961534 | refs/heads/master | 2022-11-06T18:30:35.344107 | 2019-10-09T06:30:17 | 2019-10-09T06:30:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | import os
import config
import gen_dataset.assemble as assemble
if __name__ == '__main__':
anno_list = []
rnet_landmark_file = os.path.join(config.ANNO_STORE_DIR,config.RNET_LANDMARK_ANNO_FILENAME)
rnet_postive_file = os.path.join(config.ANNO_STORE_DIR, config.RNET_POSTIVE_ANNO_FILENAME)
rnet_part_file = os.path.join(config.ANNO_STORE_DIR, config.RNET_PART_ANNO_FILENAME)
rnet_neg_file = os.path.join(config.ANNO_STORE_DIR, config.RNET_NEGATIVE_ANNO_FILENAME)
anno_list.append(rnet_postive_file)
anno_list.append(rnet_part_file)
anno_list.append(rnet_neg_file)
anno_list.append(rnet_landmark_file)
imglist_file = os.path.join(config.ANNO_STORE_DIR, config.RNET_TRAIN_IMGLIST_FILENAME)
chose_count = assemble.assemble_data(imglist_file, anno_list)
print("PNet train annotation result file path:%s, total num of imgs: %d" % (imglist_file, chose_count))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.