id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
12845526
|
<reponame>nlpming/tensorflow-DSMM
#!/usr/bin/env python
#encoding=utf-8
'''
@Time : 2020/10/25 22:28:30
@Author : zhiyang.zzy
@Contact : <EMAIL>
@Desc : 训练相似度模型
1. siamese network,分别使用 cosine、曼哈顿距离
2. triplet loss
'''
# here put the import lib
from model.bert_classifier import BertClassifier
import os
import time
from numpy.lib.arraypad import pad
import nni
from tensorflow.python.ops.gen_io_ops import write_file
import yaml
import logging
import argparse
logging.basicConfig(level=logging.INFO)
import data_input
from config import Config
from model.siamese_network import SiamenseRNN, SiamenseBert
from data_input import Vocabulary, get_test
from util import write_file
def train_siamese():
# 读取配置
# conf = Config()
cfg_path = "./configs/config.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
# 读取数据
data_train, data_val, data_test = data_input.get_lcqmc()
# data_train = data_train[:100]
print("train size:{},val size:{}, test size:{}".format(
len(data_train), len(data_val), len(data_test)))
model = SiamenseRNN(cfg)
model.fit(data_train, data_val, data_test)
pass
def predict_siamese(file_='./results/'):
# 加载配置
cfg_path = "./configs/config.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
# 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
test_arr, query_arr = get_test(file_, vocab)
# 加载模型
model = SiamenseRNN(cfg)
model.restore_session(cfg["checkpoint_dir"])
test_label, test_prob = model.predict(test_arr)
out_arr = [x + [test_label[i]] + [test_prob[i]] for i, x in enumerate(query_arr)]
write_file(out_arr, file_ + '.siamese.predict', )
pass
def train_siamese_bert():
# 读取配置
# conf = Config()
cfg_path = "./configs/config_bert.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
# 自动调参的参数,每次会更新一组搜索空间中的参数
tuner_params= nni.get_next_parameter()
cfg.update(tuner_params)
# vocab: 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
# 读取数据
data_train, data_val, data_test = data_input.get_lcqmc_bert(vocab)
# data_train = data_train[:100]
print("train size:{},val size:{}, test size:{}".format(
len(data_train), len(data_val), len(data_test)))
model = SiamenseBert(cfg)
model.fit(data_train, data_val, data_test)
pass
def predict_siamese_bert(file_="./results/input/test"):
# 读取配置
# conf = Config()
cfg_path = "./configs/config_bert.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
os.environ["CUDA_VISIBLE_DEVICES"] = "4"
# vocab: 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
# 读取数据
test_arr, query_arr = data_input.get_test_bert(file_, vocab)
print("test size:{}".format(len(test_arr)))
model = SiamenseBert(cfg)
model.restore_session(cfg["checkpoint_dir"])
test_label, test_prob = model.predict(test_arr)
out_arr = [x + [test_label[i]] + [test_prob[i]] for i, x in enumerate(query_arr)]
write_file(out_arr, file_ + '.siamese.bert.predict', )
pass
def train_bert():
# 读取配置
# conf = Config()
cfg_path = "./configs/bert_classify.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
# 自动调参的参数,每次会更新一组搜索空间中的参数
tuner_params= nni.get_next_parameter()
cfg.update(tuner_params)
# vocab: 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
# 读取数据
data_train, data_val, data_test = data_input.get_lcqmc_bert(vocab, is_merge=1)
# data_train = data_train[:100]
print("train size:{},val size:{}, test size:{}".format(
len(data_train), len(data_val), len(data_test)))
model = BertClassifier(cfg)
model.fit(data_train, data_val, data_test)
pass
def predict_bert(file_="./results/input/test"):
# 读取配置
# conf = Config()
cfg_path = "./configs/bert_classify.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
# vocab: 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
# 读取数据
test_arr, query_arr = data_input.get_test_bert(file_, vocab, is_merge=1)
print("test size:{}".format(len(test_arr)))
model = BertClassifier(cfg)
model.restore_session(cfg["checkpoint_dir"])
test_label, test_prob = model.predict(test_arr)
out_arr = [x + [test_label[i]] + [test_prob[i]] for i, x in enumerate(query_arr)]
write_file(out_arr, file_ + '.bert.predict', )
pass
def siamese_bert_sentence_embedding(file_="./results/input/test.single"):
# 输入一行是一个query,输出是此query对应的向量
# 读取配置
cfg_path = "./configs/config_bert.yml"
cfg = yaml.load(open(cfg_path, encoding='utf-8'), Loader=yaml.FullLoader)
cfg['batch_size'] = 64
os.environ["CUDA_VISIBLE_DEVICES"] = "7"
# vocab: 将 seq转为id,
vocab = Vocabulary(meta_file='./data/vocab.txt', max_len=cfg['max_seq_len'], allow_unk=1, unk='[UNK]', pad='[PAD]')
# 读取数据
test_arr, query_arr = data_input.get_test_bert_single(file_, vocab)
print("test size:{}".format(len(test_arr)))
model = SiamenseBert(cfg)
model.restore_session(cfg["checkpoint_dir"])
test_label = model.predict_embedding(test_arr)
test_label = [",".join([str(y) for y in x]) for x in test_label]
out_arr = [[x, test_label[i]] for i, x in enumerate(query_arr)]
print("write to file...")
write_file(out_arr, file_ + '.siamese.bert.embedding', )
pass
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"] = "4"
ap = argparse.ArgumentParser()
ap.add_argument("--method", default="bert", type=str, help="train/predict")
ap.add_argument("--mode", default="train", type=str, help="train/predict")
ap.add_argument("--file", default="./results/input/test", type=str, help="train/predict")
args = ap.parse_args()
if args.mode == 'train' and args.method == 'rnn':
train_siamese()
elif args.mode == 'predict' and args.method == 'rnn':
predict_siamese(args.file)
elif args.mode == 'train' and args.method == 'bert_siamese':
train_siamese_bert()
elif args.mode == 'predict' and args.method == 'bert_siamese':
predict_siamese_bert(args.file)
elif args.mode == 'train' and args.method == 'bert':
train_bert()
elif args.mode == 'predict' and args.method == 'bert':
predict_bert(args.file)
elif args.mode == 'predict' and args.method == 'bert_siamese_embedding':
# 此处输出句子的 embedding,如果想要使用向量召回
# 建议训练模型的时候,损失函数使用功能和faiss一致的距离度量,例如faiss中使用是l2,那么损失函数用l2
# faiss距离用cos,损失函数用cosin,或者损失中有一项是cosin相似度损失
siamese_bert_sentence_embedding(args.file)
|
StarcoderdataPython
|
176445
|
<gh_stars>10-100
# -*- coding: utf-8 -*-
from .blockchainobject import BlockchainObject
from ..block import Block as SyncBlock, BlockHeader as SyncBlockHeader
from ..exceptions import BlockDoesNotExistsException
class Block(BlockchainObject, SyncBlock):
""" Read a single block from the chain
:param int block: block number
:param instance blockchain_instance: instance to use when accesing a RPC
:param bool lazy: Use lazy loading
:param loop: asyncio event loop
Instances of this class are dictionaries that come with additional
methods (see below) that allow dealing with a block and it's
corresponding functions.
.. code-block:: python
from aio.block import Block
block = await Block(1)
print(block)
"""
async def __init__(self, *args, use_cache=False, **kwargs):
# We allow to hand over use_cache be default, but here,
# we want to change the default to *false* so we don't cache every
# block all the time for eternity
kwargs["use_cache"] = use_cache
await BlockchainObject.__init__(self, *args, **kwargs)
async def refresh(self):
""" Even though blocks never change, you freshly obtain its contents
from an API with this method
"""
block = await self.blockchain.rpc.get_block(self.identifier)
if not block:
raise BlockDoesNotExistsException
await super(Block, self).__init__(
block, blockchain_instance=self.blockchain, use_cache=self._use_cache
)
class BlockHeader(BlockchainObject, SyncBlockHeader):
async def __init__(self, *args, use_cache=False, **kwargs):
# We allow to hand over use_cache be default, but here,
# we want to change the default to *false* so we don't cache every
# block all the time for eternity
kwargs["use_cache"] = use_cache
await BlockchainObject.__init__(self, *args, **kwargs)
async def refresh(self):
""" Even though blocks never change, you freshly obtain its contents
from an API with this method
"""
block = await self.blockchain.rpc.get_block_header(self.identifier)
if not block:
raise BlockDoesNotExistsException
await super(BlockHeader, self).__init__(
block, blockchain_instance=self.blockchain, use_cache=self._use_cache
)
|
StarcoderdataPython
|
3523977
|
<reponame>badouralix/adventofcode-2018<filename>day-07/part-1/david.py
from tool.runners.python import SubmissionPy
from collections import defaultdict
from string import ascii_uppercase
import bisect
class DavidSubmission(SubmissionPy):
def run(self, s):
# :param s: input in string format
# :return: solution flag
# Your code goes here
relationships = []
for line in s.split("\n"):
x = line.split(" ")
relationships.append((x[1], x[7]))
dependencies = defaultdict(set)
reversed_dependencies = defaultdict(set)
for a,b in relationships:
dependencies[b].add(a)
reversed_dependencies[a].add(b)
activated = set()
to_visit = sorted(list(set(reversed_dependencies.keys()) - set(dependencies.keys())))
result = ""
while len(to_visit)>0:
item = to_visit.pop(0)
result += item
activated.add(item)
for c in reversed_dependencies[item]:
if all(dep in activated for dep in dependencies[c]):
bisect.insort(to_visit, c)
return result
|
StarcoderdataPython
|
6702079
|
import json
from json import JSONDecodeError
from typing import List, Dict, Tuple
import requests
from config import Config
from flask_app.match_score import MatchPlayer, ScoreData
from flask_app.player import Player
from flask_app.schedule import schedule, Match
from flask_app.team import UserTeam
from flask_app.user import User
def get_mock_score(match: Match) -> Dict:
try:
with open(f"source/{match.file_name}") as score_file:
score_data = json.load(score_file)
except FileNotFoundError:
score_data = cache_request(match)
return score_data
def cache_request(match: Match) -> Dict:
score_data = get_score(match)
if score_data:
with open(f"source/{match.file_name}", "w") as score_file:
json.dump(score_data, score_file, indent=2)
return score_data
def get_score(match: Match) -> Dict:
response = requests.get("https://cricapi.com/api/fantasySummary",
params={"apikey": Config.API_KEY, "unique_id": match.unique_id})
try:
score_data = response.json()
except JSONDecodeError:
print("Error in server")
return dict()
if "creditsLeft" in score_data:
print(f"Credits Left: {score_data['creditsLeft']}")
if "data" not in score_data:
print(score_data)
return dict()
return score_data
def update_match_points():
matches: List[Match] = schedule.get_matches_being_played()
if not matches:
print("Score: No match currently in progress")
return
teams = [team for match in matches for team in match.teams]
players: List[Player] = Player.objects.filter("team", Player.objects.IN, teams).get()
updated_players = list()
user_teams: List[UserTeam] = UserTeam.objects.filter("game_week", ">=", schedule.get_game_week() - 1).get()
updated_teams = list()
match_ids = [match.unique_id for match in matches]
match_players: List[MatchPlayer] = MatchPlayer.objects.filter("match_id", MatchPlayer.objects.IN, match_ids).get()
updated_match_players = list()
created_match_players = list()
for match in matches:
score_data = get_mock_score(match) if Config.USE_MOCK_SCORE else get_score(match)
if not score_data:
continue
score_data = ScoreData(score_data["data"])
match_id = str(match.unique_id)
playing_xi: List[Tuple[str, str]] = score_data.get_playing_xi()
for player_data in playing_xi:
player = next((player for player in players if player.pid == player_data[0]), None)
if not player:
print(f"Player {player_data[1]} ({player_data[0]}) not found")
continue
game_week = schedule.get_game_week_last_match_played(player.team)
if not game_week:
continue
user_team: UserTeam = next(team for team in user_teams if team.player_name == player.name and
team.game_week == game_week)
match_player = next((mp for mp in match_players if mp.player_id == player_data[0] and
mp.match_id == match_id), None)
if not match_player:
match_player = MatchPlayer()
match_player.player_id = player_data[0]
match_player.player_name = player.name
match_player.team = player.team
match_player.match_id = match_id
match_player.owner = player.owner
match_player.gameweek = game_week
match_player.type = user_team.type
match_player.update_scores(score_data)
created_match_players.append(match_player)
else:
match_player.update_scores(score_data)
updated_match_players.append(match_player)
score = match_player.total_points
if match_id in player.scores and player.scores[match_id] == score:
continue
if match_id not in player.scores and score == 0:
continue
player.scores[match_id] = score
player.score = sum(score for _, score in player.scores.items())
updated_players.append(player)
user_team.update_match_score(match_id, score)
updated_teams.append(user_team)
user_team_next_gw: UserTeam = next((team for team in user_teams if team.player_name == player.name and
team.game_week == user_team.game_week + 1), None)
if user_team_next_gw:
user_team_next_gw.final_score = user_team_next_gw.previous_week_score = user_team.final_score
updated_teams.append(user_team_next_gw)
print(f"{player.name}: points update to {score}")
MatchPlayer.objects.create_all(MatchPlayer.objects.to_dicts(created_match_players))
print(f"MatchPlayer: {len(created_match_players)} players created")
MatchPlayer.objects.save_all(updated_match_players)
print(f"MatchPlayer: {len(updated_match_players)} players updated")
Player.objects.save_all(updated_players)
print(f"Player: {len(updated_players)} players updated")
UserTeam.objects.save_all(updated_teams)
print(f"UserTeam: {len(updated_teams)} user teams updated")
# Update points for all users
updated_users = list()
current_gw = schedule.get_game_week()
for user in User.objects.get():
players_owned = [user_team for user_team in user_teams if user_team.owner == user.username and
user_team.game_week == current_gw]
points = sum(user_team.final_score for user_team in players_owned)
if points != user.points:
user.points = points
updated_users.append(user)
User.objects.save_all(updated_users)
print(f"User: {len(updated_users)} users's points updated")
|
StarcoderdataPython
|
9642778
|
# This script handles the creation of the PEP 376 .dist-info directory for a
# package.
#
# Copyright (c) 2018 Riverbank Computing Limited <<EMAIL>>
#
# This script is distributed under the terms of the GNU General Public License
# v3 as published by the Free Software Foundation.
#
# This script is supplied WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
import base64
import hashlib
import os
import shutil
import sys
def error(message):
""" Display an error message and terminate. """
sys.stderr.write(message + '\n')
sys.exit(1)
# Parse the command line.
if len(sys.argv) != 4:
error("usage: {0} prefix dist-info installed".format(sys.argv[0]))
prefix_dir = sys.argv[1]
distinfo_dir = sys.argv[2]
installed_fn = sys.argv[3]
# Read the list of installed files.
installed_f = open(installed_fn)
installed = installed_f.read().strip().split('\n')
installed_f.close()
# The prefix directory corresponds to DESTDIR or INSTALL_ROOT.
real_distinfo_dir = prefix_dir + distinfo_dir
# Remove any existing dist-info directory and create an empty one.
if os.path.exists(real_distinfo_dir):
try:
shutil.rmtree(real_distinfo_dir)
except:
error("unable to delete existing {0}".format(real_distinfo_dir))
try:
os.mkdir(real_distinfo_dir)
except:
error("unable to create {0}".format(real_distinfo_dir))
# Create the INSTALLER file. We pretend that pip was the installer.
installer_fn = os.path.join(distinfo_dir, 'INSTALLER')
installer_f = open(prefix_dir + installer_fn, 'w')
installer_f.write('pip\n')
installer_f.close()
installed.append(installer_fn)
# Create the METADATA file.
METADATA = '''Metadata-Version: 1.1
Name: {0}
Version: {1}
'''
distinfo_path, distinfo_base = os.path.split(distinfo_dir)
pkg_name, version = os.path.splitext(distinfo_base)[0].split('-')
metadata_fn = os.path.join(distinfo_dir, 'METADATA')
metadata_f = open(prefix_dir + metadata_fn, 'w')
metadata_f.write(METADATA.format(pkg_name, version))
metadata_f.close()
installed.append(metadata_fn)
# Create the RECORD file.
record_fn = os.path.join(distinfo_dir, 'RECORD')
record_f = open(prefix_dir + record_fn, 'w')
for name in installed:
native_name = prefix_dir + name.replace('/', os.sep)
if os.path.isdir(native_name):
all_fns = []
for root, dirs, files in os.walk(native_name):
# Reproducable builds.
dirs.sort()
files.sort()
for f in files:
all_fns.append(os.path.join(root, f).replace(os.sep, '/'))
if '__pycache__' in dirs:
dirs.remove('__pycache__')
else:
all_fns = [prefix_dir + name]
for fn in all_fns:
real_distinfo_path = prefix_dir + distinfo_path
if fn.startswith(real_distinfo_path):
fn_name = fn[len(real_distinfo_path) + 1:].replace('\\', '/')
elif fn.startswith(prefix_dir + sys.prefix):
fn_name = os.path.relpath(
fn, real_distinfo_path).replace('\\', '/')
else:
fn_name = fn[len(prefix_dir):]
fn_f = open(fn, 'rb')
data = fn_f.read()
fn_f.close()
digest = base64.urlsafe_b64encode(
hashlib.sha256(data).digest()).rstrip(b'=').decode('ascii')
record_f.write(
'{0},sha256={1},{2}\n'.format(fn_name, digest, len(data)))
record_f.write('{0}/RECORD,,\n'.format(distinfo_base))
record_f.close()
|
StarcoderdataPython
|
3216977
|
r"""
Group algebras of root lattice realizations
"""
# ****************************************************************************
# Copyright (C) 2013 <NAME> <nthiery at users.sf.net>
# <NAME> <anne at math.ucdavis.edu>
# <NAME> <mshimo at vt.edu>
# <NAME>
#
# Distributed under the terms of the GNU General Public License (GPL)
# https://www.gnu.org/licenses/
# ****************************************************************************
import functools
import operator
from sage.misc.cachefunc import cached_method
from sage.misc.lazy_import import lazy_import
from sage.misc.misc_c import prod
from sage.categories.algebra_functor import AlgebrasCategory
lazy_import('sage.rings.integer_ring', 'ZZ')
from sage.modules.free_module_element import vector
from sage.combinat.root_system.hecke_algebra_representation import HeckeAlgebraRepresentation
class Algebras(AlgebrasCategory):
"""
The category of group algebras of root lattice realizations.
This includes typically weight rings (group algebras of weight lattices).
TESTS::
sage: for ct in CartanType.samples(crystallographic=True): # long time
....: TestSuite(RootSystem(ct).root_lattice().algebra(QQ)).run()
"""
class ParentMethods:
def _repr_(self):
r"""
EXAMPLES::
sage: RootSystem(["A",2,1]).ambient_space().algebra(QQ) # indirect doctest
Algebra of the Ambient space of the Root system of type ['A', 2, 1] over Rational Field
"""
return "Algebra of the %s over %s" % (self.basis().keys(), self.base_ring())
def some_elements(self):
r"""
Return some elements of the algebra ``self``.
EXAMPLES::
sage: A = RootSystem(["A",2,1]).ambient_space().algebra(QQ)
sage: A.some_elements()
[B[2*e[0] + 2*e[1] + 3*e[2]],
B[-e[0] + e[2] + e['delta']],
B[e[0] - e[1]],
B[e[1] - e[2]],
B[e['deltacheck']],
B[e[0] + e['deltacheck']],
B[e[0] + e[1] + e['deltacheck']]]
sage: A = RootSystem(["B",2]).weight_space().algebra(QQ)
sage: A.some_elements()
[B[2*Lambda[1] + 2*Lambda[2]],
B[2*Lambda[1] - 2*Lambda[2]],
B[-Lambda[1] + 2*Lambda[2]],
B[Lambda[1]],
B[Lambda[2]]]
"""
return [self.monomial(weight) for weight in self.basis().keys().some_elements()]
@cached_method
def cartan_type(self):
r"""
Return the Cartan type of ``self``.
EXAMPLES::
sage: A = RootSystem(["A",2,1]).ambient_space().algebra(QQ)
sage: A.cartan_type()
['A', 2, 1]
sage: A = RootSystem(["B",2]).weight_space().algebra(QQ)
sage: A.cartan_type()
['B', 2]
"""
return self.basis().keys().cartan_type()
def from_polynomial(self, p):
"""
Construct an element of ``self`` from a polynomial `p`.
INPUT:
- ``p`` -- a polynomial
EXAMPLES::
sage: L = RootSystem(["A",2]).ambient_lattice()
sage: KL = L.algebra(QQ)
sage: x,y,z = QQ['x,y,z'].gens()
sage: KL.from_polynomial(x)
B[(1, 0, 0)]
sage: KL.from_polynomial(x^2*y + 2*y - z)
B[(2, 1, 0)] + 2*B[(0, 1, 0)] - B[(0, 0, 1)]
TESTS::
sage: KL.from_polynomial(x).leading_support().parent() is L
True
sage: KL.from_polynomial(x-x)
0
sage: KL.from_polynomial(x-x).parent() is KL
True
.. TODO:: make this work for Laurent polynomials too
"""
L = self.basis().keys()
return self.sum_of_terms((L.from_vector(vector(t)), c)
for (t,c) in p.dict().items())
@cached_method
def divided_difference_on_basis(self, weight, i):
r"""
Return the result of applying the `i`-th divided difference on ``weight``.
INPUT:
- ``weight`` -- a weight
- ``i`` -- an element of the index set
.. TODO:: type free definition (Viviane's definition uses that we are in the ambient space)
EXAMPLES::
sage: L = RootSystem(["A",1]).ambient_space()
sage: KL = L.algebra(QQ)
sage: KL.divided_difference_on_basis(L((2,2)), 1) # todo: not implemented
0
sage: KL.divided_difference_on_basis(L((3,0)), 1) # todo: not implemented
B[(2, 0)] + B[(1, 1)] + B[(0, 2)]
sage: KL.divided_difference_on_basis(L((0,3)), 1) # todo: not implemented
-B[(2, 0)] - B[(1, 1)] - B[(0, 2)]
In type `A` and in the ambient lattice, we recover the
usual action of divided differences polynomials::
sage: x,y = QQ['x,y'].gens()
sage: d = lambda p: (p - p(y,x)) / (x-y)
sage: d(x^2*y^2)
0
sage: d(x^3)
x^2 + x*y + y^2
sage: d(y^3)
-x^2 - x*y - y^2
"""
raise NotImplementedError()
@cached_method
def isobaric_divided_difference_on_basis(self, weight, i):
r"""
Return the result of applying the `i`-th isobaric divided difference on ``weight``.
INPUT:
- ``weight`` -- a weight
- ``i`` -- an element of the index set
.. SEEALSO:: :meth:`demazure_operators`
EXAMPLES::
sage: L = RootSystem(["A",1]).ambient_space()
sage: KL = L.algebra(QQ)
sage: KL.isobaric_divided_difference_on_basis(L((2,2)), 1)
B[(2, 2)]
sage: KL.isobaric_divided_difference_on_basis(L((3,0)), 1)
B[(1, 2)] + B[(2, 1)] + B[(3, 0)] + B[(0, 3)]
sage: KL.isobaric_divided_difference_on_basis(L((0,3)), 1)
-B[(1, 2)] - B[(2, 1)]
In type `A` and in the ambient lattice, we recover the
usual action of divided differences on polynomials::
sage: x,y = QQ['x,y'].gens()
sage: d = lambda p: (x*p - (x*p)(y,x)) / (x-y)
sage: d(x^2*y^2)
x^2*y^2
sage: d(x^3)
x^3 + x^2*y + x*y^2 + y^3
sage: d(y^3)
-x^2*y - x*y^2
REFERENCES:
.. [Lascoux2003] <NAME>, Symmetric functions and combinatorial operators on polynomials,
CBMS Regional Conference Series in Mathematics, 99, 2003.
"""
P = self.basis().keys() # the root lattice realization
n = weight.scalar(P.simple_coroot(i))
if n not in ZZ:
raise ValueError("the weight does not have an integral scalar product with the coroot")
alphai = P.simple_root(i)
if n >= 0:
return self.sum_of_monomials(weight-j*alphai for j in range(n + 1))
else:
return -self.sum_of_monomials(weight-j*alphai for j in range(n+1,0))
def demazure_operators(self):
r"""
Return the Demazure operators acting on ``self``.
The `i`-th Demazure operator is defined by:
.. MATH::
\pi_i = \frac{ 1 - e^{-\alpha_i}s_i }{ 1-e^{-\alpha_i} }
It acts on `e^\lambda`, for `\lambda` a weight, by:
.. MATH::
\pi_i e^\lambda = \frac{e^\lambda - e^{-\alpha_i+s_i\lambda}}{1-e^{-\alpha_i}}
This matches with Lascoux' definition [Lascoux2003]_ of `\pi_i`, and
with the `i`-th Demazure operator of [Kumar1987]_, which also works for
general Kac-Moody types.
REFERENCES:
.. [Kumar1987] \<NAME>, Demazure character formula in arbitrary Kac-Moody setting,
Invent. Math. 89 (1987), no. 2, 395-423.
EXAMPLES:
We compute some Schur functions, as images of dominant
monomials under the action of the maximal isobaric divided
difference `\Delta_{w_0}`::
sage: L = RootSystem(["A",2]).ambient_lattice()
sage: KL = L.algebra(QQ)
sage: w0 = tuple(L.weyl_group().long_element().reduced_word())
sage: pi = KL.demazure_operators()
sage: pi0 = pi[w0]
sage: pi0(KL.monomial(L((2,1))))
2*B[(1, 1, 1)] + B[(1, 2, 0)] + B[(1, 0, 2)] + B[(2, 1, 0)] + B[(2, 0, 1)] + B[(0, 1, 2)] + B[(0, 2, 1)]
Let us make the result into an actual polynomial::
sage: P = QQ['x,y,z']
sage: pi0(KL.monomial(L((2,1)))).expand(P.gens())
x^2*y + x*y^2 + x^2*z + 2*x*y*z + y^2*z + x*z^2 + y*z^2
This is indeed a Schur function::
sage: s = SymmetricFunctions(QQ).s()
sage: s[2,1].expand(3, P.variable_names())
x^2*y + x*y^2 + x^2*z + 2*x*y*z + y^2*z + x*z^2 + y*z^2
Let us check this systematically on Schur functions of degree 6::
sage: for p in Partitions(6, max_length=3).list():
....: assert s.monomial(p).expand(3, P.variable_names()) == pi0(KL.monomial(L(tuple(p)))).expand(P.gens())
We check systematically that these operators satisfy the Iwahori-Hecke algebra relations::
sage: for cartan_type in CartanType.samples(crystallographic=True): # long time 12s
....: L = RootSystem(cartan_type).weight_lattice()
....: KL = L.algebra(QQ)
....: T = KL.demazure_operators()
....: T._test_relations()
sage: L = RootSystem(['A',1,1]).weight_lattice()
sage: KL = L.algebra(QQ)
sage: T = KL.demazure_operators()
sage: T._test_relations()
.. WARNING::
The Demazure operators are only defined if all the
elements in the support have integral scalar products
with the coroots (basically, they are in the weight
lattice). Otherwise an error is raised::
sage: L = RootSystem(CartanType(["G",2]).dual()).ambient_space()
sage: KL = L.algebra(QQ)
sage: pi = KL.demazure_operators()
sage: pi[1](KL.monomial(L([0,0,1])))
Traceback (most recent call last):
...
ValueError: the weight does not have an integral scalar product with the coroot
"""
return HeckeAlgebraRepresentation(self, self.isobaric_divided_difference_on_basis, self.cartan_type(), 0, 1, side="left")
def _test_demazure_operators(self, **options):
"""
Test that the Demazure operators satisfy their defining formulas.
EXAMPLES::
sage: RootSystem(["A",2]).root_lattice().algebra(QQ)._test_demazure_operators()
"""
tester = self._tester(**options)
pi = self.demazure_operators()
L = self.basis().keys()
alpha = L.simple_roots()
alphacheck = L.simple_coroots()
s = L.simple_reflections()
for i in self.cartan_type().index_set():
emalphai = self.monomial(-alpha[i]) # X^{-\alpha_i}
for weight in L.some_elements():
if not weight.scalar(alphacheck[i]) in ZZ:
# Demazure operators are not defined in this case
continue
x = self.monomial(weight)
result = pi[i](x)
tester.assertEqual(result * (self.one()-emalphai),
x - emalphai * x.map_support(s[i]))
def demazure_lusztig_operator_on_basis(self, weight, i, q1, q2, convention="antidominant"):
r"""
Return the result of applying the `i`-th Demazure-Lusztig operator on ``weight``.
INPUT:
- ``weight`` -- an element `\lambda` of the weight lattice
- ``i`` -- an element of the index set
- ``q1,q2`` -- two elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
See :meth:`demazure_lusztig_operators` for the details.
EXAMPLES::
sage: L = RootSystem(["A",1]).ambient_space()
sage: K = QQ['q1,q2']
sage: q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: KL.demazure_lusztig_operator_on_basis(L((2,2)), 1, q1, q2)
q1*B[(2, 2)]
sage: KL.demazure_lusztig_operator_on_basis(L((3,0)), 1, q1, q2)
(q1+q2)*B[(1, 2)] + (q1+q2)*B[(2, 1)] + (q1+q2)*B[(3, 0)] + q1*B[(0, 3)]
sage: KL.demazure_lusztig_operator_on_basis(L((0,3)), 1, q1, q2)
(-q1-q2)*B[(1, 2)] + (-q1-q2)*B[(2, 1)] + (-q2)*B[(3, 0)]
At `q_1=1` and `q_2=0` we recover the action of the isobaric divided differences `\pi_i`::
sage: KL.demazure_lusztig_operator_on_basis(L((2,2)), 1, 1, 0)
B[(2, 2)]
sage: KL.demazure_lusztig_operator_on_basis(L((3,0)), 1, 1, 0)
B[(1, 2)] + B[(2, 1)] + B[(3, 0)] + B[(0, 3)]
sage: KL.demazure_lusztig_operator_on_basis(L((0,3)), 1, 1, 0)
-B[(1, 2)] - B[(2, 1)]
Or `1-\pi_i` for ``bar=True``::
sage: KL.demazure_lusztig_operator_on_basis(L((2,2)), 1, 1, 0, convention="bar")
0
sage: KL.demazure_lusztig_operator_on_basis(L((3,0)), 1, 1, 0, convention="bar")
-B[(1, 2)] - B[(2, 1)] - B[(0, 3)]
sage: KL.demazure_lusztig_operator_on_basis(L((0,3)), 1, 1, 0, convention="bar")
B[(1, 2)] + B[(2, 1)] + B[(0, 3)]
At `q_1=1` and `q_2=-1` we recover the action of the simple reflection `s_i`::
sage: KL.demazure_lusztig_operator_on_basis(L((2,2)), 1, 1, -1)
B[(2, 2)]
sage: KL.demazure_lusztig_operator_on_basis(L((3,0)), 1, 1, -1)
B[(0, 3)]
sage: KL.demazure_lusztig_operator_on_basis(L((0,3)), 1, 1, -1)
B[(3, 0)]
"""
if convention == "dominant":
weight = -weight
pi_on_weight = self.isobaric_divided_difference_on_basis(weight, i)
if convention == "bar":
pi_on_weight = self.monomial(weight) - pi_on_weight
result = (q1+q2) * pi_on_weight - self.term(weight.simple_reflection(i), q2)
if convention == "dominant":
return result.map_support(operator.neg)
else:
return result
def demazure_lusztig_operators(self, q1, q2, convention="antidominant"):
r"""
Return the Demazure-Lusztig operators acting on ``self``.
INPUT:
- ``q1,q2`` -- two elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
If `R` is the parent weight ring, the Demazure-Lusztig
operator `T_i` is the linear map `R\rightarrow R` obtained
by interpolating between the isobaric divided difference
operator `\pi_i` (see :meth:`.isobaric_divided_difference_on_basis`)
and the simple reflection `s_i`.
.. MATH::
(q_1+q_2) \pi_i - q_2 s_i
The Demazure-Lusztig operators give the usual
representation of the operator `T_i` of the (affine) Hecke
algebra with eigenvalues `q_1` and `q_2` associated to the
Weyl group.
Several variants are available to match with various
conventions used in the literature:
- "bar" replaces `\pi_i` in the formula above by
`\overline{\pi}_i = (1-\pi_i)`.
- "dominant" conjugates the operator by
`x^\lambda \mapsto x^-\lambda`.
The names dominant and antidominant for the conventions were chosen with regards to
the nonsymmetric Macdonald polynomials. The `Y` operators for the Macdonald polynomials
in the "dominant" convention satisfy `Y_\lambda = T_{t_{\lambda}}` for `\lambda` dominant.
This is also the convention used in [Haiman06]_. For the "antidominant" convention,
`Y_\lambda = T_{t_{\lambda}}` with `\lambda` antidominant.
.. SEEALSO::
- :meth:`demazure_lusztig_operator_on_basis`.
- :class:`~.non_symmetric_macdonald_polynomials.NonSymmetricMacdonaldPolynomials`.
REFERENCES:
.. [Lusztig1985] <NAME>,
*Equivariant K-theory and representations of Hecke algebras*,
Proc. Amer. Math. Soc. 94 (1985), no. 2, 337-342.
.. [Cherednik1995] \<NAME>,
*Nonsymmetric Macdonald polynomials*. IMRN 10, 483-515 (1995).
EXAMPLES::
sage: L = RootSystem(["A",1]).ambient_space()
sage: K = QQ['q1,q2'].fraction_field()
sage: q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: T = KL.demazure_lusztig_operators(q1, q2)
sage: Tbar = KL.demazure_lusztig_operators(q1, q2, convention="bar")
sage: Tdominant = KL.demazure_lusztig_operators(q1, q2, convention="dominant")
sage: x = KL.monomial(L((3,0)))
sage: T[1](x)
(q1+q2)*B[(1, 2)] + (q1+q2)*B[(2, 1)] + (q1+q2)*B[(3, 0)] + q1*B[(0, 3)]
sage: Tbar[1](x)
(-q1-q2)*B[(1, 2)] + (-q1-q2)*B[(2, 1)] + (-q1-2*q2)*B[(0, 3)]
sage: Tbar[1](x) + T[1](x)
(q1+q2)*B[(3, 0)] + (-2*q2)*B[(0, 3)]
sage: Tdominant[1](x)
(-q1-q2)*B[(1, 2)] + (-q1-q2)*B[(2, 1)] + (-q2)*B[(0, 3)]
sage: Tdominant.Tw_inverse(1)(KL.monomial(-L.simple_root(1)))
((-q1-q2)/(q1*q2))*B[(0, 0)] - 1/q2*B[(1, -1)]
We repeat similar computation in the affine setting::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: K = QQ['q1,q2'].fraction_field()
sage: q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: T = KL.demazure_lusztig_operators(q1, q2)
sage: Tbar = KL.demazure_lusztig_operators(q1, q2, convention="bar")
sage: Tdominant = KL.demazure_lusztig_operators(q1, q2, convention="dominant")
sage: e = L.basis()
sage: x = KL.monomial(3*e[0])
sage: T[1](x)
(q1+q2)*B[e[0] + 2*e[1]] + (q1+q2)*B[2*e[0] + e[1]] + (q1+q2)*B[3*e[0]] + q1*B[3*e[1]]
sage: Tbar[1](x)
(-q1-q2)*B[e[0] + 2*e[1]] + (-q1-q2)*B[2*e[0] + e[1]] + (-q1-2*q2)*B[3*e[1]]
sage: Tbar[1](x) + T[1](x)
(q1+q2)*B[3*e[0]] + (-2*q2)*B[3*e[1]]
sage: Tdominant[1](x)
(-q1-q2)*B[e[0] + 2*e[1]] + (-q1-q2)*B[2*e[0] + e[1]] + (-q2)*B[3*e[1]]
sage: Tdominant.Tw_inverse(1)(KL.monomial(-L.simple_root(1)))
((-q1-q2)/(q1*q2))*B[0] - 1/q2*B[e[0] - e[1]]
One can obtain iterated operators by passing a reduced
word or an element of the Weyl group::
sage: T[1,2](x)
(q1^2+2*q1*q2+q2^2)*B[e[0] + e[1] + e[2]] +
(q1^2+2*q1*q2+q2^2)*B[e[0] + 2*e[1]] +
(q1^2+q1*q2)*B[e[0] + 2*e[2]] + (q1^2+2*q1*q2+q2^2)*B[2*e[0] + e[1]] +
(q1^2+q1*q2)*B[2*e[0] + e[2]] + (q1^2+q1*q2)*B[3*e[0]] +
(q1^2+q1*q2)*B[e[1] + 2*e[2]] + (q1^2+q1*q2)*B[2*e[1] + e[2]] +
(q1^2+q1*q2)*B[3*e[1]] + q1^2*B[3*e[2]]
and use that to check, for example, the braid relations::
sage: T[1,2,1](x) - T[2,1,2](x)
0
The operators satisfy the relations of the affine Hecke
algebra with parameters `q_1`, `q_2`::
sage: T._test_relations()
sage: Tdominant._test_relations()
sage: Tbar._test_relations() #-q2,q1+2*q2 # todo: not implemented: set the appropriate eigenvalues!
And the `\bar{T}` are basically the inverses of the `T` s::
sage: Tinv = KL.demazure_lusztig_operators(2/q1+1/q2,-1/q1,convention="bar")
sage: [Tinv[1](T[1](x))-x for x in KL.some_elements()]
[0, 0, 0, 0, 0, 0, 0]
We check that `\Lambda_1-\Lambda_0` is an eigenvector for
the `Y` s in affine type::
sage: K = QQ['q,q1,q2'].fraction_field()
sage: q,q1,q2=K.gens()
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: L0 = L.classical()
sage: Lambda = L.fundamental_weights()
sage: alphacheck = L0.simple_coroots()
sage: KL = L.algebra(K)
sage: T = KL.demazure_lusztig_operators(q1, q2, convention="dominant")
sage: Y = T.Y()
sage: alphacheck = Y.keys().alpha() # alpha of coroot lattice is alphacheck
sage: alphacheck
Finite family {0: alphacheck[0], 1: alphacheck[1], 2: alphacheck[2]}
sage: x = KL.monomial(Lambda[1]-Lambda[0]); x
B[e[0]]
In fact it is not exactly an eigenvector, but the extra
'\delta` term is to be interpreted as a `q` parameter::
sage: Y[alphacheck[0]](KL.one())
q2^2/q1^2*B[0]
sage: Y[alphacheck[1]](x)
((-q2^2)/(-q1^2))*B[e[0] - e['delta']]
sage: Y[alphacheck[2]](x)
(q1/(-q2))*B[e[0]]
sage: KL.q_project(Y[alphacheck[1]](x),q)
((-q2^2)/(-q*q1^2))*B[(1, 0, 0)]
sage: KL.q_project(x, q)
B[(1, 0, 0)]
sage: KL.q_project(Y[alphacheck[0]](x),q)
((-q*q1)/q2)*B[(1, 0, 0)]
sage: KL.q_project(Y[alphacheck[1]](x),q)
((-q2^2)/(-q*q1^2))*B[(1, 0, 0)]
sage: KL.q_project(Y[alphacheck[2]](x),q)
(q1/(-q2))*B[(1, 0, 0)]
We now check systematically that the Demazure-Lusztig
operators satisfy the relations of the Iwahori-Hecke
algebra::
sage: K = QQ['q1,q2']
sage: q1, q2 = K.gens()
sage: for cartan_type in CartanType.samples(crystallographic=True): # long time 12s
....: L = RootSystem(cartan_type).root_lattice()
....: KL = L.algebra(K)
....: T = KL.demazure_lusztig_operators(q1,q2)
....: T._test_relations()
sage: for cartan_type in CartanType.samples(crystallographic=True): # long time 12s
....: L = RootSystem(cartan_type).weight_lattice()
....: KL = L.algebra(K)
....: T = KL.demazure_lusztig_operators(q1,q2)
....: T._test_relations()
Recall that the Demazure-Lusztig operators are only
defined when all monomials belong to the weight lattice.
Thus, in the group algebra of the ambient space, we need
to specify explicitly the elements on which to run the
tests::
sage: for cartan_type in CartanType.samples(crystallographic=True): # long time 12s
....: L = RootSystem(cartan_type).ambient_space()
....: KL = L.algebra(K)
....: weight_lattice = RootSystem(cartan_type).weight_lattice(extended=L.is_extended())
....: elements = [ KL.monomial(L(weight)) for weight in weight_lattice.some_elements() ]
....: T = KL.demazure_lusztig_operators(q1,q2)
....: T._test_relations(elements=elements)
"""
T_on_basis = functools.partial(self.demazure_lusztig_operator_on_basis,
q1 = q1, q2 = q2, convention = convention)
return HeckeAlgebraRepresentation(self, T_on_basis, self.cartan_type(), q1, q2, side="left")
def demazure_lusztig_operator_on_classical_on_basis(self, weight, i, q, q1, q2, convention="antidominant"):
r"""
Return the result of applying the `i`-th Demazure-Lusztig operator on the classical weight ``weight`` embedded at level 0.
INPUT:
- ``weight`` -- a classical weight `\lambda`
- ``i`` -- an element of the index set
- ``q1,q2`` -- two elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
See :meth:`demazure_lusztig_operators` for the details.
.. TODO::
- Optimize the code to only do the embedding/projection for T_0
- Add an option to specify at which level one wants to
work. Currently this is level 0.
EXAMPLES::
sage: L = RootSystem(["A",1,1]).ambient_space()
sage: L0 = L.classical()
sage: K = QQ['q,q1,q2']
sage: q, q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: KL0 = L0.algebra(K)
These operators coincide with the usual Demazure-Lusztig
operators::
sage: KL.demazure_lusztig_operator_on_classical_on_basis(L0((2,2)), 1, q, q1, q2)
q1*B[(2, 2)]
sage: KL0.demazure_lusztig_operator_on_basis(L0((2,2)), 1, q1, q2)
q1*B[(2, 2)]
sage: KL.demazure_lusztig_operator_on_classical_on_basis(L0((3,0)), 1, q, q1, q2)
(q1+q2)*B[(1, 2)] + (q1+q2)*B[(2, 1)] + (q1+q2)*B[(3, 0)] + q1*B[(0, 3)]
sage: KL0.demazure_lusztig_operator_on_basis(L0((3,0)), 1, q1, q2)
(q1+q2)*B[(1, 2)] + (q1+q2)*B[(2, 1)] + (q1+q2)*B[(3, 0)] + q1*B[(0, 3)]
except that we now have an action of `T_0`, which introduces some `q` s::
sage: KL.demazure_lusztig_operator_on_classical_on_basis(L0((2,2)), 0, q, q1, q2)
q1*B[(2, 2)]
sage: KL.demazure_lusztig_operator_on_classical_on_basis(L0((3,0)), 0, q, q1, q2)
(-q^2*q1-q^2*q2)*B[(1, 2)] + (-q*q1-q*q2)*B[(2, 1)] + (-q^3*q2)*B[(0, 3)]
"""
L = self.basis().keys()
weight = L.embed_at_level(weight, 0)
return self.q_project(self.demazure_lusztig_operator_on_basis(weight, i, q1, q2, convention=convention), q)
def demazure_lusztig_operators_on_classical(self, q, q1, q2, convention="antidominant"):
r"""
Return the Demazure-Lusztig operators acting at level 1 on ``self.classical()``.
INPUT:
- ``q,q1,q2`` -- three elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
Let `KL` be the group algebra of an affine weight lattice
realization `L`. The Demazure-Lusztig operators for `KL`
act on the group algebra of the corresponding classical
weight lattice by embedding it at level 1, and projecting
back.
.. SEEALSO::
- :meth:`demazure_lusztig_operators`.
- :meth:`demazure_lusztig_operator_on_classical_on_basis`.
- :meth:`q_project`
EXAMPLES::
sage: L = RootSystem(["A",1,1]).ambient_space()
sage: K = QQ['q,q1,q2'].fraction_field()
sage: q, q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: KL0 = KL.classical()
sage: L0 = KL0.basis().keys()
sage: T = KL.demazure_lusztig_operators_on_classical(q, q1, q2)
sage: x = KL0.monomial(L0((3,0))); x
B[(3, 0)]
For `T_1,\dots` we recover the usual Demazure-Lusztig operators::
sage: T[1](x)
(q1+q2)*B[(1, 2)] + (q1+q2)*B[(2, 1)] + (q1+q2)*B[(3, 0)] + q1*B[(0, 3)]
For `T_0`, we can note that, in the projection, `\delta`
is mapped to `q`::
sage: T[0](x)
(-q^2*q1-q^2*q2)*B[(1, 2)] + (-q*q1-q*q2)*B[(2, 1)] + (-q^3*q2)*B[(0, 3)]
Note that there is no translation part, and in particular
1 is an eigenvector for all `T_i`'s::
sage: T[0](KL0.one())
q1*B[(0, 0)]
sage: T[1](KL0.one())
q1*B[(0, 0)]
sage: Y = T.Y()
sage: alphacheck=Y.keys().simple_roots()
sage: Y[alphacheck[0]](KL0.one())
((-q2)/(q*q1))*B[(0, 0)]
Matching with Ion Bogdan's hand calculations from 3/15/2013::
sage: L = RootSystem(["A",1,1]).weight_space(extended=True)
sage: K = QQ['q,u'].fraction_field()
sage: q, u = K.gens()
sage: KL = L.algebra(K)
sage: KL0 = KL.classical()
sage: L0 = KL0.basis().keys()
sage: omega = L0.fundamental_weights()
sage: T = KL.demazure_lusztig_operators_on_classical(q, u, -1/u, convention="dominant")
sage: Y = T.Y()
sage: alphacheck = Y.keys().simple_roots()
sage: Ydelta = Y[Y.keys().null_root()]
sage: Ydelta.word, Ydelta.signs, Ydelta.scalar
((), (), 1/q)
sage: Y1 = Y[alphacheck[1]]
sage: Y1.word, Y1.signs, Y1.scalar # This is T_0 T_1 (T_1 acts first, then T_0); Ion gets T_1 T_0
((1, 0), (1, 1), 1)
sage: Y0 = Y[alphacheck[0]]
sage: Y0.word, Y0.signs, Y0.scalar # This is 1/q T_1^-1 T_0^-1
((0, 1), (-1, -1), 1/q)
Note that the following computations use the "dominant" convention::
sage: T0 = T.Tw(0)
sage: T0(KL0.monomial(omega[1]))
q*u*B[-Lambda[1]] + ((u^2-1)/u)*B[Lambda[1]]
sage: T0(KL0.monomial(2*omega[1]))
((q*u^2-q)/u)*B[0] + q^2*u*B[-2*Lambda[1]] + ((u^2-1)/u)*B[2*Lambda[1]]
sage: T0(KL0.monomial(-omega[1]))
1/(q*u)*B[Lambda[1]]
sage: T0(KL0.monomial(-2*omega[1]))
((-u^2+1)/(q*u))*B[0] + 1/(q^2*u)*B[2*Lambda[1]]
"""
# In type BC dual we used q^2 and q elsewhere
# Not sure this is the right thing to do or just a workaround ...
# This probably makes up for the fact that, in type BC
# dual, the null coroot is twice Sage's deltacheck
# whereas the null root is delta. So we need to map delta
# to q^2 in the q_projection.
# Should this go in q_project instead?
ct = self.cartan_type()
a0check = ct.acheck()[ct.special_node()]
T_on_basis = functools.partial(self.demazure_lusztig_operator_on_classical_on_basis,
q1=q1, q2=q2, q=q**a0check, convention=convention)
return HeckeAlgebraRepresentation(self.classical(), T_on_basis, self.cartan_type(), q1=q1, q2=q2, q=q, side="left")
@cached_method
def T0_check_on_basis(self, q1, q2, convention="antidominant"):
r"""
Return the `T_0^\vee` operator acting on the basis.
This implements the formula for `T_{0'}` in Section 6.12 of [Haiman06]_.
REFERENCES:
.. [Haiman06] \M. Haiman, Cherednik algebras, Macdonald polynomials and combinatorics, ICM 2006.
.. WARNING::
The current implementation probably returns just
nonsense, if the convention is not "dominant".
EXAMPLES::
sage: K = QQ['q1,q2'].fraction_field()
sage: q1,q2 = K.gens()
sage: L = RootSystem(["A",1,1]).ambient_space()
sage: L0 = L.classical()
sage: KL = L.algebra(K)
sage: some_weights = L.fundamental_weights()
sage: f = KL.T0_check_on_basis(q1,q2, convention="dominant")
sage: f(L0.zero())
(q1+q2)*B[(0, 0)] + q1*B[(1, -1)]
sage: L = RootSystem(["A",3,1]).ambient_space()
sage: L0 = L.classical()
sage: KL = L.algebra(K)
sage: some_weights = L0.fundamental_weights()
sage: f = KL.T0_check_on_basis(q1,q2, convention="dominant")
sage: f(L0.zero()) # not checked
(q1+q2)*B[(0, 0, 0, 0)] + q1^3/q2^2*B[(1, 0, 0, -1)]
The following results have not been checked::
sage: for x in some_weights:
....: print("{} : {}".format(x, f(x)))
(1, 0, 0, 0) : q1*B[(1, 0, 0, 0)]
(1, 1, 0, 0) : q1*B[(1, 1, 0, 0)]
(1, 1, 1, 0) : q1*B[(1, 1, 1, 0)]
Some examples for type `B_2^{(1)}` dual::
sage: L = RootSystem("B2~*").ambient_space()
sage: L0 = L.classical()
sage: e = L.basis()
sage: K = QQ['q,u'].fraction_field()
sage: q,u = K.gens()
sage: q1 = u
sage: q2 = -1/u
sage: KL = L.algebra(K)
sage: KL0 = KL.classical()
sage: f = KL.T0_check_on_basis(q1,q2, convention="dominant")
sage: T = KL.twisted_demazure_lusztig_operators(q1,q2, convention="dominant")
Direct calculation::
sage: T.Tw(0)(KL0.monomial(L0([0,0])))
((u^2-1)/u)*B[(0, 0)] + u^3*B[(1, 1)]
sage: KL.T0_check_on_basis(q1,q2, convention="dominant")(L0([0,0]))
((u^2-1)/u)*B[(0, 0)] + u^3*B[(1, 1)]
Step by step calculation, comparing by hand with <NAME>::
sage: res = T.Tw(2)(KL0.monomial(L0([0,0]))); res
u*B[(0, 0)]
sage: res = res * KL0.monomial(L0([-1,1])); res
u*B[(-1, 1)]
sage: res = T.Tw_inverse(1)(res); res
(u^2-1)*B[(0, 0)] + u^2*B[(1, -1)]
sage: res = T.Tw_inverse(2)(res); res
((u^2-1)/u)*B[(0, 0)] + u^3*B[(1, 1)]
"""
L = self.basis().keys()
ct = L.cartan_type()
special_node = ct.special_node()
a0 = ct.a()[special_node]
A0 = self.classical()
T = A0.demazure_lusztig_operators(q1, q2, convention=convention)
# TODO: use the formula expressing the inverse of T as a Demazure Lusztig operator? Or go through the affine action of T_0 for the dual
L0 = A0.basis().keys()
# The dominant short root of the classical system
if ct.type() == 'BC':
# CHECKME: this is not exactly phi, but phi rescaled
# appropriately so that it's in the orbit of the
# simple classical roots
phi = -a0*L0(L.simple_roots()[0])
else:
phi = L0(L0.root_system.coroot_lattice().highest_root().associated_coroot())
# Variant: try to fetch it from the other affinization; something like:
# The a0 only has an influence in type BC; it handles the fact that alpha_0
# is not in the orbit of the classical roots
#phi1 = - L0(L'.other_affinization().simple_roots()[special_node]) * a0
#assert phi == phi1
j, v = phi.to_simple_root(reduced_word=True)
translation = A0.monomial(-L0.simple_root(j)/a0)
Tv = T[v]
Tinv = T.Tw_inverse(v+(j,))
def T0_check(weight):
return -q1*q2*Tinv( translation * Tv(A0.monomial(weight)))
# For debugging purposes
T0_check.phi = phi
T0_check.j = j
T0_check.v = v
return T0_check
@cached_method
def classical(self):
"""
Return the group algebra of the corresponding classical lattice.
EXAMPLES::
sage: KL = RootSystem(["A",2,1]).ambient_space().algebra(QQ)
sage: KL.classical()
Algebra of the Ambient space of the Root system of type ['A', 2] over Rational Field
"""
return self.basis().keys().classical().algebra(self.base_ring())
def q_project_on_basis(self, l, q):
r"""
Return the monomial `c * cl(l)` in the group algebra of the classical lattice.
INPUT:
- ``l`` -- an element of the root lattice realization
- ``q`` -- an element of the ground ring
Here, `cl(l)` is the projection of `l` in the classical
lattice, and `c` is the coefficient of `l` in `\delta`.
.. SEEALSO:: :meth:`q_project_on_basis`
EXAMPLES::
sage: K = QQ['q'].fraction_field()
sage: q = K.gen()
sage: KL = RootSystem(["A",2,1]).ambient_space().algebra(K)
sage: L = KL.basis().keys()
sage: e = L.basis()
sage: KL.q_project_on_basis( 4*e[1] + 3*e[2] + e['deltacheck'] - 2*e['delta'], q)
1/q^2*B[(0, 4, 3)]
"""
KL0 = self.classical()
L0 = KL0.basis().keys()
return KL0.term(L0(l), q**l["delta"])
def q_project(self, x, q):
r"""
Implement the `q`-projection morphism from ``self`` to the group algebra of the classical space.
INPUT:
- ``x`` -- an element of the group algebra of ``self``
- ``q`` -- an element of the ground ring
This is an algebra morphism mapping `\delta` to `q` and
`X^b` to its classical counterpart for the other elements
`b` of the basis of the realization.
EXAMPLES::
sage: K = QQ['q'].fraction_field()
sage: q = K.gen()
sage: KL = RootSystem(["A",2,1]).ambient_space().algebra(K)
sage: L = KL.basis().keys()
sage: e = L.basis()
sage: x = KL.an_element() + KL.monomial(4*e[1] + 3*e[2] + e['deltacheck'] - 2*e['delta']); x
B[2*e[0] + 2*e[1] + 3*e[2]] + B[4*e[1] + 3*e[2] - 2*e['delta'] + e['deltacheck']]
sage: KL.q_project(x, q)
B[(2, 2, 3)] + 1/q^2*B[(0, 4, 3)]
sage: KL = RootSystem(["BC",3,2]).ambient_space().algebra(K)
sage: L = KL.basis().keys()
sage: e = L.basis()
sage: x = KL.an_element() + KL.monomial(4*e[1] + 3*e[2] + e['deltacheck'] - 2*e['delta']); x
B[2*e[0] + 2*e[1] + 3*e[2]] + B[4*e[1] + 3*e[2] - 2*e['delta'] + e['deltacheck']]
sage: KL.q_project(x, q)
B[(2, 2, 3)] + 1/q^2*B[(0, 4, 3)]
.. WARNING::
Recall that the null root, usually denoted `\delta`,
is in fact ``a[0]\delta`` in Sage's notation, in order
to avoid half integer coefficients (this only makes a
difference in type BC). Similarly, what's usually
denoted `q` is in fact ``q^a[0]`` in Sage's notations,
to avoid manipulating square roots::
sage: KL.q_project(KL.monomial(L.null_root()),q)
q^2*B[(0, 0, 0)]
"""
L0 = self.classical()
return L0.linear_combination( (self.q_project_on_basis(l, q), c) for l,c in x )
def twisted_demazure_lusztig_operator_on_basis(self, weight, i, q1, q2, convention="antidominant"):
r"""
Return the twisted Demazure-Lusztig operator acting on the basis.
INPUT:
- ``weight`` -- an element `\lambda` of the weight lattice
- ``i`` -- an element of the index set
- ``q1,q2`` -- two elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
.. SEEALSO:: :meth:`twisted_demazure_lusztig_operators`
EXAMPLES::
sage: L = RootSystem(["A",3,1]).ambient_space()
sage: e = L.basis()
sage: K = QQ['q1,q2'].fraction_field()
sage: q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: Lambda = L.classical().fundamental_weights()
sage: KL.twisted_demazure_lusztig_operator_on_basis(Lambda[1]+2*Lambda[2], 1, q1, q2, convention="dominant")
(-q2)*B[(2, 3, 0, 0)]
sage: KL.twisted_demazure_lusztig_operator_on_basis(Lambda[1]+2*Lambda[2], 2, q1, q2, convention="dominant")
(-q1-q2)*B[(3, 1, 1, 0)] + (-q2)*B[(3, 0, 2, 0)]
sage: KL.twisted_demazure_lusztig_operator_on_basis(Lambda[1]+2*Lambda[2], 3, q1, q2, convention="dominant")
q1*B[(3, 2, 0, 0)]
sage: KL.twisted_demazure_lusztig_operator_on_basis(Lambda[1]+2*Lambda[2], 0, q1, q2, convention="dominant")
((q1*q2+q2^2)/q1)*B[(1, 2, 1, 1)] + ((q1*q2+q2^2)/q1)*B[(1, 2, 2, 0)] + q2^2/q1*B[(1, 2, 0, 2)]
+ ((q1^2+2*q1*q2+q2^2)/q1)*B[(2, 1, 1, 1)] + ((q1^2+2*q1*q2+q2^2)/q1)*B[(2, 1, 2, 0)]
+ ((q1*q2+q2^2)/q1)*B[(2, 1, 0, 2)] + ((q1^2+2*q1*q2+q2^2)/q1)*B[(2, 2, 1, 0)] + ((q1*q2+q2^2)/q1)*B[(2, 2, 0, 1)]
"""
if i == 0: # should use the special node
if convention != "dominant":
raise NotImplementedError("The twisted Demazure-Lusztig operator T_0 is only implemented in the dominant convention")
return self.T0_check_on_basis(q1, q2, convention=convention)(weight)
else:
L = self.classical()
return L.demazure_lusztig_operators(q1, q2, convention=convention)[i](L.monomial(weight))
def twisted_demazure_lusztig_operators(self, q1, q2, convention="antidominant"):
r"""
Return the twisted Demazure-Lusztig operators acting on ``self``.
INPUT:
- ``q1,q2`` -- two elements of the ground ring
- ``convention`` -- "antidominant", "bar", or "dominant" (default: "antidominant")
.. WARNING::
- the code is currently only tested for `q_1q_2=-1`
- only the "dominant" convention is functional for `i=0`
For `T_1,\ldots,T_n`, these operators are the usual
Demazure-Lusztig operators. On the other hand, the
operator `T_0` is twisted::
sage: L = RootSystem(["A",3,1]).ambient_space()
sage: e = L.basis()
sage: K = QQ['q1,q2'].fraction_field()
sage: q1, q2 = K.gens()
sage: KL = L.algebra(K)
sage: T = KL.twisted_demazure_lusztig_operators(q1, q2, convention="dominant")
sage: T._test_relations()
TESTS:
The following computations were checked with Mark Shimozono for type `A_1^{(1)}`::
sage: L = RootSystem(["A",1,1]).ambient_space()
sage: e = L.basis()
sage: K = QQ['q1,q2'].fraction_field()
sage: q1,q2 = K.gens()
sage: KL = L.algebra(K)
sage: T = KL.twisted_demazure_lusztig_operators(q1, q2, convention="dominant")
sage: T._test_relations()
sage: L0 = L.classical()
sage: alpha = L0.simple_roots()
sage: T.Ti_on_basis(L0.zero(), 1)
q1*B[(0, 0)]
sage: T.Ti_inverse_on_basis(L0.zero(), 1)
1/q1*B[(0, 0)]
sage: T.Ti_on_basis(alpha[1], 1)
(-q1-q2)*B[(0, 0)] + (-q2)*B[(-1, 1)]
sage: T.Ti_inverse_on_basis(alpha[1], 1)
((q1+q2)/(q1*q2))*B[(0, 0)] + 1/q1*B[(-1, 1)] + ((q1+q2)/(q1*q2))*B[(1, -1)]
sage: T.Ti_on_basis(L0.zero(), 0)
(q1+q2)*B[(0, 0)] + q1*B[(1, -1)]
The next computations were checked with <NAME> for type `A_2^{(1)}`::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: e = L.basis()
sage: K = QQ['u'].fraction_field()
sage: u = K.gen()
sage: KL = L.algebra(K)
sage: T = KL.twisted_demazure_lusztig_operators(u, -~u, convention="dominant")
sage: T._test_relations()
sage: L0 = L.classical()
sage: KL0 = L0.algebra(K)
sage: alpha = L0.simple_roots()
sage: phi = L0.highest_root(); phi
(1, 0, -1)
sage: phi.to_simple_root(reduced_word=True)
(2, (1,))
sage: res = T.Ti_on_basis(L0([1,0,1]), 1); res
1/u*B[(0, 1, 1)]
sage: res = res * KL0.monomial(-alpha[2]); res
1/u*B[(0, 0, 2)]
sage: res = T.Tw_inverse(2)(res); res
((u^2-1)/u^2)*B[(0, 1, 1)] + B[(0, 2, 0)]
sage: res = T.Tw_inverse(1)(res); res
((u^2-1)/u)*B[(1, 1, 0)] + ((u^2-1)/u)*B[(1, 0, 1)] + u*B[(2, 0, 0)]
.. TODO::
Choose a good set of Cartan Type to run on. Rank >4 is
too big. But `C_1` and `B_1` are boring.
We now check systematically that those operators satisfy
the relations of the Iwahori-Hecke algebra::
sage: K = QQ['q1,q2'].fraction_field()
sage: q1, q2 = K.gens()
sage: for cartan_type in CartanType.samples(affine=True, crystallographic=True): # long time 12s
....: if cartan_type.rank() > 4: continue
....: if cartan_type.type() == 'BC': continue
....: KL = RootSystem(cartan_type).weight_lattice().algebra(K)
....: T = KL.twisted_demazure_lusztig_operators(q1, q2, convention="dominant")
....: T._test_relations()
.. TODO::
Investigate why `T_0^\vee` currently does not satisfy
the quadratic relation in type `BC`. This should
hopefully be fixed when `T_0^\vee` will have a more
uniform implementation::
sage: cartan_type = CartanType(["BC",1,2])
sage: KL = RootSystem(cartan_type).weight_lattice().algebra(K)
sage: T = KL.twisted_demazure_lusztig_operators(q1,q2, convention="dominant")
sage: T._test_relations()
Traceback (most recent call last):
... tester.assertTrue(Ti(Ti(x,i,-q2),i,-q1).is_zero()) ...
AssertionError: False is not true
Comparison with T0::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: e = L.basis()
sage: K = QQ['t,q'].fraction_field()
sage: t,q = K.gens()
sage: q1 = t
sage: q2 = -1
sage: KL = L.algebra(K)
sage: L0 = L.classical()
sage: T = KL.demazure_lusztig_operators(q1,q2, convention="dominant")
sage: def T0(*l0): return KL.q_project(T[0].on_basis()(L.embed_at_level(L0(l0), 1)), q)
sage: T0_check_on_basis = KL.T0_check_on_basis(q1, q2, convention="dominant")
sage: def T0c(*l0): return T0_check_on_basis(L0(l0))
sage: T0(0,0,1) # not double checked
((-t+1)/q)*B[(1, 0, 0)] + 1/q^2*B[(2, 0, -1)]
sage: T0c(0,0,1)
(t^2-t)*B[(1, 0, 0)] + (t^2-t)*B[(1, 1, -1)] + t^2*B[(2, 0, -1)] + (t-1)*B[(0, 0, 1)]
"""
T_on_basis = functools.partial(self.twisted_demazure_lusztig_operator_on_basis,
q1=q1, q2=q2, convention=convention)
return HeckeAlgebraRepresentation(self.classical(),
T_on_basis,
self.cartan_type().classical().dual().affine().dual(),
q1, q2,
side = "left")
class ElementMethods:
def acted_upon(self, w):
"""
Implements the action of ``w`` on ``self``.
INPUT:
- ``w`` -- an element of the Weyl group acting on the underlying weight lattice realization
EXAMPLES::
sage: L = RootSystem(["A",3]).ambient_space()
sage: W = L.weyl_group()
sage: M = L.algebra(QQ['q','t'])
sage: m = M.an_element(); m # TODO: investigate why we don't get something more interesting
B[(2, 2, 3, 0)]
sage: m = (m+1)^2; m
B[(0, 0, 0, 0)] + 2*B[(2, 2, 3, 0)] + B[(4, 4, 6, 0)]
sage: w = W.an_element(); w.reduced_word()
[1, 2, 3]
sage: m.acted_upon(w)
B[(0, 0, 0, 0)] + 2*B[(0, 2, 2, 3)] + B[(0, 4, 4, 6)]
"""
return self.map_support(w.action)
def expand(self, alphabet):
"""
Expand ``self`` into variables in the ``alphabet``.
INPUT:
- ``alphabet`` -- a non empty list/tuple of (invertible) variables in a ring to expand in
EXAMPLES::
sage: L = RootSystem(["A",2]).ambient_lattice()
sage: KL = L.algebra(QQ)
sage: p = KL.an_element() + KL.sum_of_monomials(L.some_elements()); p
B[(1, 0, 0)] + B[(1, -1, 0)] + B[(1, 1, 0)] + 2*B[(2, 2, 3)] + B[(0, 1, -1)]
sage: F = LaurentPolynomialRing(QQ, 'x,y,z')
sage: p.expand(F.gens())
2*x^2*y^2*z^3 + x*y + x + y*z^-1 + x*y^-1
TESTS::
sage: type(p.expand(F.gens()))
<class 'sage.rings.polynomial.laurent_polynomial.LaurentPolynomial_mpair'>
sage: p = KL.zero()
sage: p.expand(F.gens())
0
sage: type(p.expand(F.gens()))
<class 'sage.rings.polynomial.laurent_polynomial.LaurentPolynomial_mpair'>
"""
codomain = alphabet[0].parent()
return codomain.sum(c * prod(X**int(n)
for X, n in zip(alphabet, vector(m)))
for m, c in self)
|
StarcoderdataPython
|
92803
|
<filename>Python_Core/ArithmeticExamApplication/arithmetic.py
from random import choice, randint
class MiniCalculator:
def __init__(self):
self.operations = {
"+": MiniCalculator.addition,
"-": MiniCalculator.subtraction,
"*": MiniCalculator.multiplication,
}
@staticmethod
def addition(*numbers):
return sum(numbers)
@staticmethod
def subtraction(*numbers):
result = numbers[0]
for num in numbers[1:]:
result = result - num
return result
@staticmethod
def multiplication(*numbers):
result = numbers[0]
for num in numbers[1:]:
result = result * num
return result
@staticmethod
def square(num):
return num * num
def calculate(self, line, level: int = 1):
if level == 2:
return MiniCalculator.square(int(line))
num1, operation, num2 = line.split()
num1, num2 = map(int, (num1, num2))
if operation in self.operations:
return self.operations[operation](num1, num2)
def generate_math_task(self, level: int = 1):
levels = {1: (2, 9), 2: (11, 29)}
random_operation = choice(tuple(self.operations))
random_nums = (randint(*levels[level]), randint(*levels[level]))
if level == 1:
math_task = f"{random_nums[0]} {random_operation} {random_nums[1]}"
else:
math_task = f"{random_nums[0]}"
return math_task
@staticmethod
def get_int_input():
while True:
user_input = input()
try:
int(user_input)
return user_input
except ValueError:
print("Incorrect format.")
def user_check(self, tasks=5):
levels = {
1: "simple operations with numbers 2-9",
2: "integral squares of 11-29",
}
print(
"\n".join(
(
"Which level do you want? Enter a number:",
f"1 - {levels[1]}",
f"2 - {levels[2]}",
)
)
)
level = MiniCalculator.get_int_input()
result = 0
for _ in range(tasks):
math_task = self.generate_math_task(level=int(level))
print(math_task)
user_input = MiniCalculator.get_int_input()
if user_input == str(self.calculate(line=math_task, level=int(level))):
print("Right!")
result += 1
else:
print("Wrong!")
print(
f"Your mark is {result}/{tasks}.Would you like to save the result? Enter yes or no."
)
user_answer = input()
if user_answer.lower() in ("yes", "y"):
user_name = input("What is your name?\n")
with open("results.txt", mode="a+", encoding="utf-8") as file:
file.write(
f"{user_name}: {result}/{tasks} in level {level} ({levels[int(level)]})\n"
)
print('The results are saved in "results.txt".')
if __name__ == "__main__":
mini_calc = MiniCalculator()
mini_calc.user_check()
|
StarcoderdataPython
|
257743
|
# from zhihu_user_info.util.SpiderUtil import SpiderUtil
from zhihu_user_info.util.SaveUtil import SaveUtil
# from zhihu_user_info.util.Utils import Util
from zhihu_user_info.threadpool.ThreadPool import ThreadPool
import threading
# spider_util = SpiderUtil()
# save_util = SaveUtil()
#
# result_list = [1, 2, 3]
# save_util.middle_save(save_util.question_list_model, result_list)
#
# # with open(r"D:\pycharm\PyCharm 2020.1.1\workplace\taobao\zhihu_user_info\result\hot_list-2021-10-05.txt",
# # mode="w", encoding="utf-8") as f_w:
# # f_w.write("111")
# # f_w.close()
#
# list1 = [1, 2, 3]
# for i in range(0, len(list1)):
# print(list1[i])
# str='https://www.zhihu.com/special/1421924027711156224'
# print(str.__contains__("question"))
save_util = SaveUtil()
thread_pool = ThreadPool(20)
class test():
def __init__(self):
self.lock = threading.RLock()
def get(self, name, i):
print(name + ":" + str(i))
return 1
# def get3(name, i):
# print("new get3:"+name + ":" + str(i))
# return 1
def get1(self):
for i in range(0, 100):
thread_pool.run(func=self.get, args=("get1", i,), callback=self.get1_callback)
# thread_pool.close()
#
# def lock_test(self):
# self.lock.acquire()
# try:
#
def get1_callback(self, status, result):
if status:
# get2(num=result)
# print(11111111111111111111111111111111111111111111)
print("call_back:" + str(status) + "\n")
print("call_back_result:" + str(result) + "\n")
# return result
# def get2(num):
# for i in range(0, 100):
# thread_pool.run(func=get3, args=("get2", i,))
# print("new get2"+str(num+1))
# close()
# thread_pool.close()
def close(self):
thread_pool.close()
def save(self, i):
test_dict = {"thread": i, "thread": i, "thread": i, "thread": i, "thread": i, "thread": i,
"thread": i, "thread": i, "thread": i}
save_util.save(test_dict)
print(i)
def debug_callback(self, status, result):
print(status)
print(result)
if __name__ == '__main__':
t = test()
for i in range(0, 1000):
pass
# t.save(str(i))
# thread_pool.run(func=t.save, args=(i,))
# thread_pool.close()
# t.get1()
# # get2()
# t.close()
|
StarcoderdataPython
|
4972355
|
import jax
import jax.numpy as jnp
# takes in a logit distribution, softmax and then sample
def softmax_sample(key, logits, _, temp=1):
return jax.random.categorical(key, logits/temp, -1).astype(jnp.uint32), None
def nucleaus_filter(logits, top_p=0.9, top_k=None):
sorted_logits = jnp.sort(logits)[:, ::-1] # sort descending
sorted_indices = jnp.argsort(logits)[:, ::-1]
cumulative_probs = jnp.cumsum(jax.nn.softmax(sorted_logits), axis=-1)
if top_k is not None:
# Keep only top_k tokens
indices_range = jnp.arange(len(sorted_indices[0]))
indices_range = jnp.stack([indices_range] * len(sorted_indices), axis=0)
sorted_indices_to_remove = jnp.where(indices_range > top_k, sorted_indices, 0)
_, indices_to_remove = jax.lax.sort_key_val(sorted_indices, sorted_indices_to_remove)
logit_mask = 1e10 * indices_to_remove
logits -= logit_mask
# Remove tokens with cumulative probability above a threshold
sorted_indices_to_remove = cumulative_probs > top_p
sorted_indices_to_remove = jnp.concatenate((jnp.zeros_like(sorted_indices_to_remove[:, :1]), sorted_indices_to_remove), axis=-1)[:, :-1]
_, indices_to_remove = jax.lax.sort_key_val(sorted_indices, sorted_indices_to_remove)
logit_mask = 1e10 * indices_to_remove
logits -= logit_mask
return logits
def nucleaus_sample(key, logits, _, options):
top_p = options.get('top_p', 0.9)
temp = options.get('temp', 1)
top_k = options.get('top_k', None)
print("top_p:", top_p)
print("temp:", temp)
print("top_k:", top_k)
logits = nucleaus_filter(logits, top_p, top_k=top_k)
return softmax_sample(key, logits, None, temp=temp)
if __name__ == "__main__":
import numpy as np
logits = np.array([[-2, -1, 0, 0.8, 0, 0.1, 0.3, 0.4, 0.5, 0.6, 0.7, -3]])
print(nucleaus_filter(logits))
|
StarcoderdataPython
|
4940329
|
import spacy
nlp = spacy.load("en_core_web_sm")
def extract_entities(text):
for entity in nlp(text).ents:
print("Entity: ", entity.text)
print("Entity Type: %s | %s" % (entity.label_, spacy.explain(entity.label_)))
print("Start Offset of the Entity: ", entity.start_char)
print("End Offset of the Entity: ", entity.end_char)
print("--")
if __name__ == "__main__":
text = "<NAME> was the 44th president of the United States of America."
extract_entities(text)
|
StarcoderdataPython
|
8050439
|
"""
evoke definition class for Data objects
schema syntax:
class Widget:
table='widgets' #optional table name - will default to the class name (lowercased). Can provide a database override eg 'mydb.widgets'
name=TAG #first attribute / column name....
number=INT,100,KEY #optional default . KEY will generate an index on this field
date=DATE,KEY,'20000101' #default and KEY can be swapped, but TYPE must come first
comment=STR,KEY #KEY on a STR or TEXT field will generate a text index (ie FULLTEXT)
... etc #as many as you like
insert=[dict(name='whatever',number=123),dict(name="something",number=456)] #seed data (columns) for the table
The above definition implies (and requires) that there is a class called Widget in a module called Widget.py in the app code folder, or in the base folder.
A schema class can be subclassed, eg to give a different class which uses the same database table (or indeed a different table with the same schema, or a modified version of it)
Note that column names can even be mysqsl keywords, as they are always `quoted`.
IHM April 2007
TEXTKEY: for defining multi-column fulltext indices - CURRENTLY MODIFICATIONS ARE NOT SUPPORTED (should use mysql definitions in "show keys from ...")
"""
if __name__=='__main__':
import sys,os
sys.path.append(os.path.abspath('..'))
from lib import TAG,STR,CHAR,TEXT,INT,SMALLINT,TINYINT,FLOAT,DATE,FLAG,MONEY,TIME,REL,BLOB,sql_list,Error
else:
from base.lib import TAG,STR,CHAR,TEXT,INT,SMALLINT,TINYINT,FLOAT,DATE,FLAG,MONEY,TIME,REL,BLOB,sql_list,Error
from DB import execute
class SchemaMismatchError(Error):
"SCHEMA MISMATCH: '%s' column `%s` in table %s is defined in schema as '%s'"
KEY='KEY'
now=DATE().sql().strip("'") #convenience shorthand for initilaising dates
class TEXTKEY(object):
"for defining multi-column fulltext indices"
def __init__(self,*columns):
self.columns=columns
class Schema(object):
"""
each instance requires:
table='tablename'
`fieldname`=TYPE,default,KEY' # for each field, where TYPE in TYPES , KEY and default are optional, and can be swapped in order
"""
TYPES=(TAG,STR,CHAR,TEXT,INT,SMALLINT,TINYINT,FLOAT,DATE,FLAG,MONEY,TIME,REL,BLOB)
_v_built=[]
################################ database maintenance ##########################
@classmethod
def build_database(self,database):
" create or append defined table in MySQL db"
# self.table=getattr(self,'table',self.__name__.lower())
if self.table.find(".")>=0:
self.database,self.table=self.table.split(".",1) #allow for database override in table spec
else:
self.database=database
self.tablesql="`%s`.`%s`" % (self.database,self.table)
tables=[t["Tables_in_%s" % database] for t in execute("show tables from `%s`" % database)]
# print ">>>>>>>>>>>>>>>>TABLES>>>>>>>>>>>>>>>>>",tables
self._v_columns,self._v_keys,self._v_textkeys,self._v_multikeys=self.get_columns_and_indices()
self._v_schema=dict([('uid',INT)]+[(k,v[0]) for (k,v) in self._v_columns.items()])# this is for use by data.py
# create the table or update the table - unless there are no columns
if self._v_columns:
if self.table in tables:
self.update_table(database)
# if the table has not already been created (by a parent class), then create it
elif (("%s.%s") % (database,self.table)) not in self._v_built:
self.create_table(database)
self._v_built.append("%s.%s" % (database,self.table))
# print '>>>>>>>>>>>>v_built=',database,self._v_built
@classmethod
def get_columns_and_indices(self):
""
columns={}
indices=[]
textindices=[]
multikeys=[]
# print "NAME================",self.__name__
# print "items============",self.__dict__.items()
# for k,v in self.__dict__.items():
for k in dir(self):
v=getattr(self,k,None)
if not v is self.TYPES:
if isinstance(v,TEXTKEY):
multikeys.append((k,v.columns))
else:
if not isinstance(v,tuple):
v=[v]
else: #v is a tuple...
v=list(v)
if v[0] in self.TYPES:
if 'KEY' in v: #we need an index
if v[0]._v_mysql_type=="mediumtext":
textindices.append(k)
else:
indices.append(k)
v.remove('KEY')
if len(v)==1:#we have no default
v.append(v[0]._v_default)#put a dummy there
columns[k]=v
return columns,indices,textindices,multikeys
@classmethod
def update_table(self,database):
"""
FOR SAFETY REASONS WE WON'T DELETE OR MODIFY ANY DATA, except keys
- add any new columns and keys to the table
- generate a warning if there are any database columns no longer defined in the schema
- throw an error if there is a mismatch between any database column type and the schema definition
NOTE - self.insert changes are IGNORED
- changes to defaults are ignored (O/S - fix this)
- changes and aditions to TEXTKEY multikeys are IGNORED (O/S - fix this)
- dropping of TEXTKEYs doesn't work, as key names are not correct (should use keys from self._v_multikey) (O/S - fix this)
- ***** should use "select KEYS from <table>" to get the key data to fix the above. This assumes we have mysql v4.0.2 or better.
i.e. Key_name, Column_name, and Index_type (BTREE or FULLTEXT):
keys= Index_type!='FULLTEXT'
textkeys= Key_name==Column_name and Index_type=='FULLTEXT'
multikeys= Key_name!=Column_name and Index_type=='FULLTEXT'
"""
columns=self._v_columns
keys=self._v_keys
textkeys=self._v_textkeys
multikeys=[k[1][0] for k in self._v_multikeys]#mysql just shows the first of the multiple keys in the 'show columns' result
# print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",multikeys
columnnames=columns.keys()
tabledata=execute("show columns from %s" % self.tablesql)
tablecols=[]
tablekeys=[]
tabletextkeys=[]
for i in tabledata:
name=i['Field']
if name!='uid':
tablecols.append(name)
if i['Key']=='MUL':
if i['Type']=='mediumtext':
tabletextkeys.append(name)
else:
tablekeys.append(name)
if name not in columnnames: #don't delete the column - safety first....
print "WARNING: column `%s` in table %s is not defined in schema" % (name,self.tablesql)
elif i['Type']!=columns[name][0]._v_mysql_type:
raise SchemaMismatchError, (i['Type'],name,self.tablesql,columns[name][0]._v_mysql_type)
sql=[]
for i in columnnames:
if i not in tablecols:
v=columns[i]
sql.append("add `%s` %s default %s" % (i,v[0]._v_mysql_type,v[1] is None and "NULL" or ('"%s"' % v[1])))
for i in keys:
if i not in tablekeys:
sql.append("add KEY (`%s`)" % i)
for i in textkeys:
if i not in tabletextkeys:
sql.append("add FULLTEXT (`%s`)" % i)
for i in tablekeys:
if i not in (keys+multikeys):
sql.append("drop KEY `%s`" % i)
for i in tabletextkeys:
if i not in (textkeys+multikeys):
sql.append("drop KEY `%s`" % i)
if sql:
sql="ALTER TABLE %s\n%s" % (self.tablesql,',\n'.join(sql))
print sql
for i in sql.split(';'):
if i:
execute(i)
@classmethod
def create_table(self,database):
"""
generate SQL code to create the table for this schema, and seed it with initial row inserts
will simply ignore any invalidly-specified attributes
"""
def quoted(keys):
ks=[('`%s`' % k) for k in keys]
# print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>",ks
return len(ks)>1 and str(tuple(ks)).replace("'","").replace('"',"") or ('(%s)' % ks[0])
columns=self._v_columns
keys=self._v_keys
textkeys=self._v_textkeys
multikeys=self._v_multikeys
data={
'table':self.tablesql,
'columns':",\n".join(("`%s` %s default %s" % (k,v[0]._v_mysql_type,v[1] is None and "NULL" or ('"%s"' % v[1]))) for (k,v) in columns.items()),
'keys':",\n".join(['PRIMARY KEY (uid)']+[("KEY (`%s`)" % k) for k in keys]+[("FULLTEXT (`%s`)" % k) for k in textkeys]+[("FULLTEXT %s %s" % (k,quoted(v))) for (k,v) in multikeys])
}
sql="CREATE TABLE %(table)s(\nuid int(11) NOT NULL auto_increment,\n%(columns)s,\n%(keys)s\n) ENGINE=MyISAM CHARSET=utf8;" % data
if hasattr(self,'insert'):
if type(self.insert)==type({}):#allow for single item
self.insert=[self.insert]#convert to list
for row in self.insert:
sql+="\nINSERT INTO %s %s VALUES %s;" % (self.tablesql,str(sql_list(row.keys())).replace("'","`"),sql_list(row.values()))
if __name__=='__main__':
return sql
print sql
for i in sql.split(';'):
if i:
execute(i)
def create_database(database):
"called by app.py, for each app"
databases=[d['Database'] for d in execute("show databases")]
if database not in databases:
sql="CREATE DATABASE `%s` CHARSET=utf8" % database
print sql
execute(sql)
def test():
class Test(Schema):
table='goods'
amount=MONEY,0
ok=FLAG
name=TAG,""
code=TAG,"",KEY
ref=INT,KEY
year=INT,2000,KEY
when=DATE
took=TIME
stat=INT,1
txt=STR,KEY
insert=[
dict(uid=3,amount=500,ok='Y'),
dict(amount=500,when="20070707")
]
print Test.create()
if __name__=='__main__':
test()
|
StarcoderdataPython
|
6652875
|
<gh_stars>1-10
"""
Day 2 was also relatively straightforward, not much challenge up till now. The only stumbling block was that I first
did not read that the up and down in part 2 no longer changed the depth by themselves. Other than that, smooth sailing.
"""
from utils import Solution
from typing import Any
class DaySolution(Solution):
def __init__(self, day: int = 2, year: int = 2021) -> None:
super().__init__(day, year)
def _parse_data(self, input_data: str) -> Any:
"""
The input consists of two parts, so let's split on the newline and then on the space two have every
instruction separate and to be able to separate the direction and magnitude.
"""
return [x.split(" ") for x in input_data.split("\n") if x]
def _solve_part1(self, parsed_data: Any) -> Any:
"""
First identify the direction and then add or subtract the magnitude from the corresponding position.
Don't forget that the magnitude still needs to be converted to int.
"""
x = 0
d = 0
for direction in parsed_data:
if direction[0] == "forward":
x += int(direction[1])
elif direction[0] == "up":
d -= int(direction[1])
else:
d += int(direction[1])
return x * d
def _solve_part2(self, parsed_data: Any) -> Any:
"""
This took one more attempt because I didn't read carefully and kept the depth addition/substraction for the
up and down direction in the code first. Easy fix afterwards.
"""
x = 0
d = 0
aim = 0
for direction in parsed_data:
if direction[0] == "forward":
x += int(direction[1])
d += aim * int(direction[1])
elif direction[0] == "up":
aim -= int(direction[1])
else:
aim += int(direction[1])
return x * d
|
StarcoderdataPython
|
4891914
|
<filename>main/rest/image_file.py<gh_stars>10-100
from django.db import transaction
from django.http import Http404
from ..models import Media
from ..models import Resource
from ..models import safe_delete
from ..models import drop_media_from_resource
from ..schema import ImageFileListSchema
from ..schema import ImageFileDetailSchema
from ..search import TatorSearch
from ._base_views import BaseListView
from ._base_views import BaseDetailView
from ._permissions import ProjectTransferPermission
class ImageFileListAPI(BaseListView):
schema = ImageFileListSchema()
permission_classes = [ProjectTransferPermission]
http_method_names = ['get', 'post']
def _get(self, params):
media = Media.objects.get(pk=params['id'])
role = params['role']
response_data = []
if media.media_files:
if role in media.media_files:
response_data = media.media_files[role]
return response_data
def _post(self, params):
with transaction.atomic():
qs = Media.objects.select_for_update().filter(pk=params['id'])
if qs.count() != 1:
raise Http404
media_files = qs[0].media_files
role = params['role']
body = params['body']
index = params.get('index')
if not media_files:
media_files = {}
if role not in media_files:
media_files[role] = []
if index is None:
media_files[role].append(body)
else:
if index >= len(media_files[role]):
raise ValueError(f"Supplied index {index} is larger than current array size "
f"{len(media_files[role])}")
media_files[role].insert(index, body)
qs.update(media_files=media_files)
media = Media.objects.get(pk=params['id'])
Resource.add_resource(body['path'], media)
TatorSearch().create_document(media)
return {'message': f"Media file in media object {media.id} created!"}
def get_queryset(self):
return Media.objects.all()
class ImageFileDetailAPI(BaseDetailView):
schema = ImageFileDetailSchema()
permission_classes = [ProjectTransferPermission]
lookup_field = 'id'
http_method_names = ['get', 'patch', 'delete']
def _get(self, params):
media = Media.objects.get(pk=params['id'])
role = params['role']
index = params['index']
response_data = []
if media.media_files:
if role in media.media_files:
response_data = media.media_files[role]
if index >= len(response_data):
raise ValueError(f"Supplied index {index} is larger than current array size "
f"{len(response_data)}")
return response_data[index]
def _patch(self, params):
with transaction.atomic():
qs = Media.objects.select_for_update().filter(pk=params['id'])
if qs.count() != 1:
raise Http404
media_files = qs[0].media_files
role = params['role']
body = params['body']
index = params['index']
if not media_files:
raise Http404
if role not in media_files:
raise Http404
if index >= len(media_files[role]):
raise ValueError(f"Supplied index {index} is larger than current array size "
f"{len(media_files[role])}")
old_path = media_files[role][index]['path']
new_path = body['path']
media_files[role][index] = body
qs.update(media_files=media_files)
media = Media.objects.get(pk=params['id'])
if old_path != new_path:
drop_media_from_resource(old_path, media)
safe_delete(old_path)
Resource.add_resource(new_path, media)
TatorSearch().create_document(media)
return {'message': f"Media file in media object {media.id} successfully updated!"}
def _delete(self, params):
with transaction.atomic():
qs = Media.objects.select_for_update().filter(pk=params['id'])
if qs.count() != 1:
raise Http404
media_files = qs[0].media_files
role = params['role']
index = params['index']
if not media_files:
raise Http404
if role not in media_files:
raise Http404
if index >= len(media_files[role]):
raise ValueError(f"Supplied index {index} is larger than current array size "
f"{len(media_files[role])}")
deleted = media_files[role].pop(index)
qs.update(media_files=media_files)
media = Media.objects.get(pk=params['id'])
drop_media_from_resource(deleted['path'], media)
safe_delete(deleted['path'])
TatorSearch().create_document(media)
return {'message': f'Media file in media object {params["id"]} successfully deleted!'}
def get_queryset(self):
return Media.objects.all()
|
StarcoderdataPython
|
6416853
|
from dwim.utils import for_app
@for_app('brew', at_least=2)
def match(command):
return (command.script_parts[1] in ['uninstall', 'rm', 'remove']
and "brew uninstall --force" in command.stdout)
def get_new_command(command):
command.script_parts[1] = 'uninstall'
command.script_parts.insert(2, '--force')
return ' '.join(command.script_parts)
|
StarcoderdataPython
|
3323089
|
<reponame>Optimist-Prime/QML-for-MNIST-classification
import sys
import pickle
import numpy as np
from ptrace import ptrace
from tqdm import tqdm
def generate_reduced(feature_data):
new_feature_data = []
for i in tqdm(range(len(feature_data))):
new_feature_data.append(
ptrace(
16,
[6, 7, 8, 9],
# [0, 4, 8, 12],
feature_data[i]
)
)
return np.array(new_feature_data)
if __name__ == '__main__':
print(
'usage: python tools/reduce_data.py in_fname.pickle out_fname.pickle'
)
in_filename = sys.argv[1]
out_filename = sys.argv[2]
print('loading data from %s' % in_filename)
with open(in_filename, 'rb') as infile:
data = pickle.load(infile)
print('reducing data...')
data[0] = generate_reduced(data[0])
print('writing data to %s' % out_filename)
with open(out_filename, 'wb') as outfile:
pickle.dump(data, outfile)
|
StarcoderdataPython
|
194583
|
# Copyright (c) 2011, <NAME> <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the authors nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ANDRES MOREIRA BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
import os
version = '0.6.1'
long_description = """
Python bindings for the snappy compression library from Google.
More details about Snappy library: http://google.github.io/snappy
"""
library_dirs, include_dirs = [], []
if os.environ.get("CIBUILDWHEEL", False) and sys.version_info[:2] == (3, 9) and sys.platform =="darwin":
library_dirs = ["/usr/local/lib/"]
include_dirs = ["/usr/local/include/"]
snappymodule = Extension('snappy._snappy',
libraries=['snappy'],
sources=['src/snappy/snappymodule.cc', 'src/snappy/crc32c.c'],
library_dirs=library_dirs,
include_dirs=include_dirs)
ext_modules = [snappymodule]
packages = ['snappy']
install_requires = []
setup_requires = []
cffi_modules = []
if 'PyPy' in sys.version:
from setuptools import setup
ext_modules = []
install_requires = ['cffi>=1.15.0']
setup_requires = ['cffi>=1.15.0']
cffi_modules = ['./src/snappy/snappy_cffi_builder.py:ffi']
setup(
name='python-snappy',
version=version,
author='<NAME>',
author_email='<EMAIL>',
url='http://github.com/andrix/python-snappy',
description='Python library for the snappy compression library from Google',
long_description=long_description,
keywords='snappy, compression, google',
license='BSD',
classifiers=['Development Status :: 4 - Beta',
'Topic :: Internet',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
# 'Operating System :: Microsoft :: Windows', -- Not tested yet
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10'
],
ext_modules=ext_modules,
packages=packages,
install_requires=install_requires,
setup_requires=setup_requires,
cffi_modules=cffi_modules,
package_dir={'': 'src'},
)
|
StarcoderdataPython
|
396632
|
<reponame>giovp/SingleCellOpenProblems
from ....tools.decorators import method
from ....tools.utils import check_version
import numpy as np
@method(
method_name="NMF-reg",
paper_name="Slide-seq: A scalable technology for measuring genome-wide expression at high spatial resolution", # noqa: E501
paper_url="https://science.sciencemag.org/content/363/6434/1463", # noqa: E501
paper_year=2019,
code_url="https://github.com/tudaga/NMFreg_tutorial",
code_version=check_version("nmf-reg"),
)
def nmfreg(adata):
"""NMF-reg: NMF regression for array-based spatial transcriptomics data.
Re-implementation from https://github.com/tudaga/NMFreg_tutorial.
Originally developed for Slide-seq data.
Parameters
----------
adata : AnnData
Adata with true proportions and signature matrix.
Returns
-------
Adata with predicted proportions saved in `adata.obsm["proportions_pred"]`.
"""
from scipy.optimize import nnls
from scipy.sparse import issparse
from sklearn.decomposition import NMF
from sklearn.preprocessing import StandardScaler
adata_sc = adata.uns["sc_reference"].copy()
n_types = adata_sc.obs["label"].cat.categories.shape[0]
factors = 30 # TODO(handle hyper params)
projection_type = "l2" # TODO(handle hyper params)
# Learn from reference
if issparse(adata_sc.X):
X = adata_sc.X.toarray()
else:
X = adata_sc.X
X_norm = X / X.sum(1)[:, np.newaxis]
X_scaled = StandardScaler(with_mean=False).fit_transform(X_norm)
model = NMF(
n_components=factors,
init="random",
random_state=17, # TODO(handle random_state)
)
Ha = model.fit_transform(X_scaled)
Wa = model.components_
cluster_df = adata.obs[["label"]].copy()
cluster_df.loc[:, "factor"] = np.argmax(Ha, axis=1)
cluster_df.loc[:, "code"] = cluster_df.label.values.codes
factor_to_cluster_map = np.array(
[
np.histogram(
cluster_df.loc[cluster_df.factor == k, "code"],
bins=n_types,
range=(0, n_types),
)[0]
for k in range(factors)
]
).T
factor_to_best_celltype = np.argmax(factor_to_cluster_map, axis=0)
# celltype_to_best_factor = np.argmax(factor_to_cluster_map, axis=1) # TODO(remove?)
factor_to_best_celltype_matrix = np.zeros((factors, n_types))
for i, j in enumerate(factor_to_best_celltype):
factor_to_best_celltype_matrix[i, j] = 1
Ha_norm = StandardScaler(with_mean=False).fit_transform(Ha)
if projection_type == "l2":
sc_deconv = np.dot(Ha_norm ** 2, factor_to_best_celltype_matrix)
else:
sc_deconv = np.dot(Ha_norm, factor_to_best_celltype_matrix)
sc_deconv = sc_deconv / sc_deconv.sum(1)[:, np.newaxis]
# Evaluation on reference TODO(either ove or delete)
cluster_df.loc[:, "predicted_code"] = np.argmax(sc_deconv, axis=1)
pos_neg_dict = {
i: [
sc_deconv[cluster_df.predicted_code == i, i],
sc_deconv[cluster_df.predicted_code != i, i],
]
for i in range(n_types)
}
thresh_certainty = [0] * n_types
for c in range(n_types):
thresh_certainty[c] = np.max(pos_neg_dict[c][1])
# Evaluation ends here
# Start run on actual spatial data
if issparse(adata.X):
X_sp = adata.X.toarray()
else:
X_sp = adata.X
X_sp_norm = X_sp / X_sp.sum(1)[:, np.newaxis]
X_sp_scaled = StandardScaler(with_mean=False).fit_transform(X_sp_norm)
bead_prop_soln = np.array(
[nnls(Wa.T, X_sp_scaled[b, :])[0] for b in range(X_sp_scaled.shape[0])]
)
bead_prop_soln = StandardScaler(with_mean=False).fit_transform(bead_prop_soln)
bead_prop = np.dot(bead_prop_soln, factor_to_best_celltype_matrix)
prop = bead_prop / bead_prop.sum(1)[:, np.newaxis]
adata.obsm["proportions_pred"] = prop
return adata
|
StarcoderdataPython
|
3309364
|
<reponame>SlimyMonkey/divePython<filename>roman/stage1/roman1.py
"""Convert to and from Roman numerals
This program is part of "Dive Into Python", a free Python book for
experienced programmers. Visit http://diveintopython.org/ for the
latest version.
"""
__author__ = "<NAME> (<EMAIL>)"
__version__ = "$Revision: 1.2 $"
__date__ = "$Date: 2004/05/05 21:57:20 $"
__copyright__ = "Copyright (c) 2001 <NAME>"
__license__ = "Python"
#Define exceptions
class RomanError(Exception): pass
class OutOfRangeError(RomanError): pass
class NotIntegerError(RomanError): pass
class InvalidRomanNumeralError(RomanError): pass
def toRoman(n):
"""convert integer to Roman numeral"""
pass
def fromRoman(s):
"""convert Roman numeral to integer"""
pass
|
StarcoderdataPython
|
5179297
|
<filename>build_graph/build_mdn.py<gh_stars>1-10
# Copyright 2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import argparse
import networkx as nx
import calendar
import json
from igraph import *
from dateutil import parser
from urlparse import urlparse
from tqdm import *
import subprocess
from IPy import IP
import pyprind
import cPickle as pickle
'''
This module builds a graph from download metadata with fields as defined below
'''
data_fields = [
'server_ts',
'machine_guid',
'enterprise_guid',
'country',
'file_sha2',
'file_md5',
'filename',
'filesize',
'url',
'parent_file_sha2',
'parent_url',
'download_ip',
'prevalence',
'reputation_score',
'referrer_url',
'file_signer_issuer',
'file_signer_subject',
'file_directory',
'file_type',
'event',
]
"""
Data pre-processing
"""
# IPv4/6 Bogon List - removes invalid IPs for public use
disqualified_IPs = []
with open('build_graph/ipv4-shortbogons.txt', 'r') as f:
disqualified_IPs.extend([x.split('\n')[0] for x in f.readlines()[1:]])
with open('build_graph/ipv6-shortbogons.txt', 'r') as f:
disqualified_IPs.extend([x.split('\n')[0] for x in f.readlines()[1:]])
def IP_in_subnet(test_IP, bogons_list):
try:
IP(test_IP)
for subnet in bogons_list:
if IP(test_IP) in IP(subnet):
return True
return False
except:
return False
def _qualified(values):
if values[8] != 'NULL' or \
(values[11] != 'NULL' and not IP_in_subnet(values[11], disqualified_IPs)) \
or values[14] != 'NULL':
return True
def _matchup(values, droppers):
if values[9] in droppers or values[8] != 'NULL' or \
(values[11] != 'NULL' and not IP_in_subnet(values[11], disqualified_IPs)) or \
values[14] != 'NULL':
return True
def qualified_dropper(inFile):
droppers = set()
with open(inFile, 'r') as f:
for line in tqdm(f):
line = line.strip()
values = line.split('\t')
if len(values) == len(data_fields):
if _qualified(values):
droppers.add(values[4])
return droppers
def filter_rawdata(inFile, outFile, droppers):
outHandler = open(outFile, 'w')
with open(inFile, 'r') as f:
for line in tqdm(f):
line = line.strip()
values = line.split('\t')
if len(values) == len(data_fields):
if _matchup(values, droppers):
outHandler.write('\t'.join(values) + '\n')
outHandler.close()
'''
Graph-building code
'''
nid = 0
node_attributes = ['type', 'data', 'score', 'name', 'prevalence', 'status', 'fileType']
name2id = {}
node2events = {}
event2timestamp = {}
def get_time(str_time):
return calendar.timegm((parser.parse(str_time)).timetuple())
def process_url(url):
try:
parsed = urlparse(url)
except:
return None, None, None, None
try:
return parsed.scheme, parsed.hostname, parsed.path, parsed.port
except:
return parsed.scheme, parsed.hostname, parsed.path, None
def add_node_event_pairing(nid, eid):
try:
node2events[nid].append(eid)
except:
node2events[nid] = [eid]
def get_parent_file_node(line):
if line['parent_file_sha2'] == 'NULL':
return None, None
global nid
d = {'type': 'file', 'name': line['parent_file_sha2'], 'score': str(0), 'data': 'parent', 'prevalence': -1,
'size': -1, 'status': 1, 'event': [line['event']]}
# parent file has always prevalence -1 unless we have its prevalence info, i.e. a parent file was a file
# status == 1 implies that score is artificial
# Note that insert_method wil block repeated insertion, hence this line
if line['parent_file_sha2'] in name2id:
return name2id[line['parent_file_sha2']], d
nid += 1
name2id[line['parent_file_sha2']] = str(nid)
return str(nid), d
def get_file_node(line):
if line['file_sha2'] == 'NULL':
return None, None
global nid
d = {'type': 'file', 'name': line['file_sha2'], 'fileType': line['file_type']}
if line['reputation_score'] == 'NULL':
d['score'] = str(0)
else:
d['score'] = line['reputation_score']
if line['filename'] != 'NULL':
d['data'] = line['filename']
else:
d['data'] = line['file_sha2']
if line['prevalence'] != 'NULL':
d['prevalence'] = line['prevalence']
else:
d['prevalence'] = -1
if line['filesize'] != 'NULL':
d['size'] = line['filesize']
else:
d['size'] = -1
d['status'] = 0
d['event'] = [line['event']]
# Note that insert_method wil block repeated insertion, hence this line
if line['file_sha2'] in name2id:
return name2id[line['file_sha2']], d
nid += 1
name2id[line['file_sha2']] = str(nid)
return str(nid), d
def get_url_node(line, k='url'):
if line[k] == 'NULL':
return None, None
global nid
d = {'type': 'url'}
# Uncomment below line if you want 'referrer_url' as a node type
# d['type'] = 'url' if k == 'parent_url' else k
scheme, hostname, path, port = process_url(line[k])
# Removes url parameters
if line[k].find('?') == -1:
data = line[k]
else:
data = line[k][:line[k].find('?')]
d['name'] = data
if hostname:
d['data'] = hostname
else:
d['data'] = data
d['score'] = str(0)
d['prevalence'] = -1
d['size'] = -1
d['status'] = 1
d['event'] = [line['event']]
if data in name2id:
return name2id[data], d
nid += 1
name2id[data] = str(nid)
return str(nid), d
def get_fqdn_node(line, k='url'):
global nid
if line[k] == 'NULL':
return None, None
scheme, hostname, path, port = process_url(line[k])
if hostname is None:
return None, None
d = {'data': hostname, 'score': str(0), 'name': hostname, 'prevalence': -1, 'size': -1, 'status': 1, 'event': []}
# Separate IPs from FQDNs (IPv4 and IPv6) that may be contained in url
if set(hostname).intersection(set('qwertyuiopasdfghjklzxcvbnm')) == set() \
or ':' in hostname:
d['type'] = 'ip'
else:
d['type'] = 'fqdn'
if hostname in name2id:
return name2id[hostname], d
nid += 1
name2id[hostname] = str(nid)
return str(nid), d
def get_ip_node(line):
if line['download_ip'] == 'NULL':
return None, None
global nid
d = {'type': 'ip', 'data': line['download_ip'], 'score': str(0), 'name': line['download_ip'], 'prevalence': -1,
'size': -1, 'status': 1, 'event': [line['event']]}
if line['download_ip'] in name2id:
return name2id[line['download_ip']], d
nid += 1
name2id[line['download_ip']] = str(nid)
return str(nid), d
def rewrite_node_values(g, node_id, d):
if g.node[node_id]['event']:
g.node[node_id]['event'].extend(d['event'])
if d['type'] == 'file' and g.node[node_id]['type'] == 'file' \
and (int(d['prevalence']) > int(g.node[node_id]['prevalence'])
or int(d['size']) > int(g.node[node_id]['size'])
or g.node[node_id]['name'] == 'parent'):
for attr in node_attributes:
g.node[node_id][attr] = d[attr]
def insert_node(g, node_id, d):
if node_id not in g:
g.add_node(node_id)
for key in d:
g.node[node_id][key] = d[key]
else:
# update
rewrite_node_values(g, node_id, d)
# if event_id != None:
# add_node_event_pairing(id, event_id)
'''
five types of edges:
1. f2f - file to file
2. l2f - link to file
3. l2l - link to link
4. ip2l - ip to link
5. d2l - FQ domain to link
'''
def insert_edge(g, src, trg, type='f2f', weighted=False):
if weighted:
# Weight edge by no. of download events for which node pairing exists
try:
g.edge[src][trg]['weight'] += 1
except KeyError:
g.add_edge(src, trg, type=type, weight=1)
update_node_drops(g, src, trg)
else:
try:
g.edge[src][trg]['weight']
except KeyError:
g.add_edge(src, trg, type=type, weight=0)
def update_node_drops(g, src, trg):
# Increment 'dropper' (parent) for src
try:
g.node[src]['dropper'] += 1
except KeyError:
g.node[src]['dropper'] = 1
# Increment 'droppee' (child) for trg
try:
g.node[trg]['droppee'] += 1
except KeyError:
g.node[trg]['droppee'] = 1
def build_mdn(g, line, include_fqdn=True):
# Add mapping for event ID to timestamp
event2timestamp[line['event']] = line['server_ts']
# Build graph with nodes and edges
f_id, f_d = get_file_node(line)
p_id, p_d = get_parent_file_node(line)
if f_id:
insert_node(g, f_id, f_d) # file node
if p_id:
insert_node(g, p_id, p_d) # parent file node
if f_id and p_id:
insert_edge(g, p_id, f_id, type='f2f', weighted=True)
if f_id:
rurl_id, rurl_d = get_url_node(line, k='referrer_url')
url_id, url_d = get_url_node(line, k='url')
ip_id, ip_d = get_ip_node(line)
# Remove IPs and hosts with disallowed IPs
if ip_id and IP_in_subnet(ip_d['name'], disqualified_IPs):
ip_id = None
ip_d = None
if url_id and IP_in_subnet(url_d['data'], disqualified_IPs):
url_id = None
url_d = None
if rurl_id and IP_in_subnet(rurl_d['data'], disqualified_IPs):
rurl_id = None
rurl_d = None
if rurl_id:
insert_node(g, rurl_id, rurl_d) # referrer url
if url_id:
insert_node(g, url_id, url_d) # url
if ip_id:
insert_node(g, ip_id, ip_d) # ip
if rurl_id:
insert_edge(g, rurl_id, f_id, type='l2f', weighted=True) # file ---- referrer
if rurl_id is None and url_id:
insert_edge(g, url_id, f_id, type='l2f', weighted=True) # file ---- url
if rurl_id is None and url_id is None and ip_id:
insert_edge(g, ip_id, f_id, type='l2f', weighted=True) # file ---- ip
if url_id and rurl_id:
insert_edge(g, url_id, rurl_id, type='l2l', weighted=True) # url ----- referrer
if ip_id and url_id:
insert_edge(g, ip_id, url_id, type='ip2l', weighted=True) # ip ----- url
# Assuming no host url, then link IP to landing page
if rurl_id and url_id is None and ip_id:
insert_edge(g, ip_id, rurl_id, type='ip2l', weighted=True) # ip ---- referrer
if include_fqdn:
rfqdn_id, rfqdn_d = get_fqdn_node(line, k='referrer_url')
fqdn_id, fqdn_d = get_fqdn_node(line, k='url')
if rfqdn_id:
insert_node(g, rfqdn_id, rfqdn_d) # referrer FQDN
if fqdn_id:
insert_node(g, fqdn_id, fqdn_d) # host FQDN
if rfqdn_id and rurl_id:
if rfqdn_d['type'] == 'ip':
insert_edge(g, rfqdn_id, rurl_id, type='ip2l', weighted=True)
else:
insert_edge(g, rfqdn_id, rurl_id, type='d2l', weighted=True) # referrer FQDN --- referrer
if fqdn_id and url_id:
if fqdn_d['type'] == 'ip':
insert_edge(g, fqdn_id, url_id, type='ip2l', weighted=True)
else:
insert_edge(g, fqdn_id, url_id, type='d2l', weighted=True) # FQDN --- url
if p_id:
url_id, url_d = get_url_node(line, k='parent_url')
# Remove IPs and hosts with disallowed IPs
if url_id and IP_in_subnet(url_d['data'], disqualified_IPs):
url_id = None
url_d = None
if url_id:
insert_node(g, url_id, url_d)
insert_edge(g, url_id, p_id, type='l2f')
if include_fqdn:
fqdn_id, fqdn_d = get_fqdn_node(line, k='parent_url')
if fqdn_id:
insert_node(g, fqdn_id, fqdn_d) # FQDN
if fqdn_id and url_id:
if fqdn_d['type'] == 'ip':
insert_edge(g, fqdn_id, url_id, type='ip2l')
else:
insert_edge(g, fqdn_id, url_id, type='d2l') # FQDN --- url
def benign_vs_malicious(g):
benign = 0
malicious = 0
gray = 0
for node in g.nodes():
if float(g.node[node]['score']) <= -50:
malicious += 1
elif float(g.node[node]['score']) > 50:
benign += 1
elif float(g.node[node]['score']) != 0:
gray += 1
return benign, gray, malicious
def serialize_event_attr(g):
"""
Transforms 'events' and 'eventToTimestamp' attributes so NX graph to be writable as GML
:param g: networkX graph object
:returns g
"""
# Serialize 'events' attribute for each graph node
for node_id in g.nodes():
g.node[node_id]['event'] = json.dumps(g.node[node_id]['event'])
return g
def deserialize_event_attr(G):
"""
Transforms 'events' and 'eventToTimestamp' attributes in serialized igraph data to a usable format
:param G: igraph graph object
:returns G
"""
# Deserialize 'events' attribute for each node
for v in G.vs:
v['event'] = json.loads(v['event'])
G['event2timestamp'] = event2timestamp
return G
def build(raw_data):
g = nx.DiGraph()
mypar = pyprind.ProgBar(int(subprocess.check_output(['wc', '-l', raw_data]).decode('utf8').split()[0]),
bar_char='=')
with open(raw_data, 'r') as f:
event = 0
for line in f:
line = line.rstrip()
values = line.split('\t')
values.append(event)
if len(values) == len(data_fields):
build_mdn(g, dict(zip(data_fields, values)))
event += 1
mypar.update()
g = serialize_event_attr(g)
return g
def build_graph_by_igraph(raw_data, loc, gml_filename):
if not os.path.exists(loc):
try:
os.makedirs(loc)
except Exception as e:
pass
g = build(raw_data)
nx.write_gml(g, os.path.join(loc, gml_filename))
G = Graph.Read_GML(os.path.join(loc, gml_filename))
# Add event information
G = deserialize_event_attr(G)
# Add event to node lookup table
G['event2nodes'] = {}
for v in G.vs:
for event in v['event']:
try:
G['event2nodes'][event].append(v.index)
except:
G['event2nodes'][event] = [v.index]
return G
def get_raw_downloads(in_file):
""" Gets raw download statistics for each node (file, URL, IP) """
raw_downloads = {}
with open(in_file, 'r') as f:
for line in tqdm(f):
line = line.rstrip()
values = line.split('\t')
if len(values) > 15:
# Downloaded SHA2
try:
raw_downloads[values[4]]['dropped'] += 1
except:
try:
raw_downloads[values[4]]['dropped'] = 1
except:
raw_downloads[values[4]] = {'dropped': 1}
# Parent SHA2
try:
raw_downloads[values[9]]['dropper'] += 1
except:
try:
raw_downloads[values[9]]['dropper'] = 1
except:
raw_downloads[values[9]] = {'dropper': 1}
if values[14] != 'NULL':
# Referrer URL
try:
raw_downloads[values[14]]['dropper'] += 1
except:
try:
raw_downloads[values[14]]['dropper'] = 1
except:
raw_downloads[values[14]] = {'dropper': 1}
elif values[14] == 'NULL' and values[8] != 'NULL':
# Host URL
try:
raw_downloads[values[8]]['dropper'] += 1
except:
try:
raw_downloads[values[8]]['dropper'] = 1
except:
raw_downloads[values[8]] = {'dropper': 1}
elif values[14] == 'NULL' and values[8] == 'NULL' and values[11] != 'NULL':
# Download IP
try:
raw_downloads[values[11]]['dropper'] += 1
except:
try:
raw_downloads[values[11]]['dropper'] = 1
except:
raw_downloads[values[11]] = {'dropper': 1}
if values[10] != 'NULL':
# Parent SHA2 Host URL
try:
raw_downloads[values[10]]['dropper'] += 1
except:
try:
raw_downloads[values[10]]['dropper'] = 1
except:
raw_downloads[values[10]] = {'dropper': 1}
return raw_downloads
def enrich_G_raw_downloads(G, raw_downloads):
for _v in tqdm(G.vs):
try:
G.vs[_v.index]['dropped'] = raw_downloads[_v['name']]['dropped']
except KeyError:
G.vs[_v.index]['dropped'] = 0.0
try:
G.vs[_v.index]['dropper'] = raw_downloads[_v['name']]['dropper']
except KeyError:
G.vs[_v.index]['dropper'] = 0.0
return G
def parse_avclass_data(in_file):
avclass_response = {}
with open(in_file) as f:
for line in f:
try:
response = line.replace('\n', '').split('\t')
except:
continue
avclass_response[response[0]] = {'label': response[1], 'is_pup': response[2]}
return avclass_response
def enrich_G_avclass_data(G, avclass_response):
for _v in tqdm(G.vs):
sha2 = G.vs[_v.index]['name'].lower()
try:
G.vs[_v.index]['avclasslabel'] = avclass_response[sha2]['label']
G.vs[_v.index]['avclassispup'] = avclass_response[sha2]['is_pup']
except:
G.vs[_v.index]['avclasslabel'] = None
G.vs[_v.index]['avclassispup'] = None
return G
def generate_gml_graph(args):
current_directory = os.getcwd()
in_file_path = os.path.join(current_directory, args.in_file)
out_file_dir = os.path.join(current_directory, args.out_dir)
# Pre-process Data
print("Pre-processing data...")
droppers = qualified_dropper(in_file_path)
filtered_file_path = os.path.join(out_file_dir, "filtered_logs.tsv")
filter_rawdata(in_file_path, filtered_file_path, droppers)
# Build Graph
print("Building graph...")
G = build(filtered_file_path)
gml_file_path = os.path.join(out_file_dir, "graph.gml")
nx.write_gml(G, gml_file_path)
G = Graph.Read_GML(gml_file_path)
# Filter aberrant nodes
print("Filtering aberrant nodes...")
parents_no_indegree = [x.index for x in G.vs if x['data'] == 'parent' and G.degree(x.index, mode=2) == 0]
G.delete_vertices(parents_no_indegree)
isolated_nodes = G.vs.select(_degree=0)
G.delete_vertices(isolated_nodes)
# Enrich graph nodes with raw download statistics
print("Enriching graph nodes with download statistics...")
raw_downloads = get_raw_downloads(filtered_file_path)
G = enrich_G_raw_downloads(G, raw_downloads)
# Enrich graph nodes with AVClass ground truth data
# Compatible with AVClass v1: https://github.com/malicialab/avclass/tree/master/avclass
if args.in_avclass_file:
print("Enriching graph nodes with AVClass data...")
in_avclass_filepath = os.path.join(current_directory, args.in_avclass_file)
avclass_response = parse_avclass_data(in_avclass_filepath)
G = enrich_G_avclass_data(G, avclass_response)
G.write_gml(gml_file_path)
print("Graph generated!")
return G
def generate_components(args, G):
current_directory = os.getcwd()
out_file_dir = os.path.join(current_directory, args.out_dir)
components_dict = {}
print("Generating connected components data...")
bodydouble = G.copy()
weak_components = bodydouble.components(mode=WEAK)
ordered_components_indices = [i[0] for i in
sorted(enumerate(weak_components), key=lambda x: len(x[1]), reverse=True)]
for i in tqdm(range(len(ordered_components_indices))):
j = ordered_components_indices[i]
# Test component
original_G_tc = weak_components.subgraph(j)
G_tc = weak_components.subgraph(j)
_tc_original_size = G_tc.vcount()
_tc_size_trace = [_tc_original_size]
_tc_iter_deg_removals = []
count_deg = 0
has_articulation_pt = 1
while _tc_size_trace[-1] > 2 and has_articulation_pt == 1:
G_tc_articulation_points = G_tc.articulation_points()
temp = G_tc_articulation_points
G_tc_articulation_points = {}
for node_index in temp:
G_tc_articulation_points[node_index] = G_tc.degree(node_index)
sorted_art_degree = sorted([(x[1], x[0], G_tc.vs[x[0]]['name']) \
for x in G_tc_articulation_points.items()], key=lambda x: x[0], reverse=True)
# Remove next node
try:
remove_node = sorted_art_degree[0][1]
except:
break
remove_node_id = G_tc.vs[sorted_art_degree[0][1]]['id']
_tc_iter_deg_removals.append(remove_node_id)
G_tc.delete_vertices(remove_node)
G_tc_sub_components = G_tc.components(mode=WEAK)
G_tc = G_tc_sub_components.giant()
_tc_current_size = G_tc.vcount()
_tc_size_trace.append(_tc_current_size)
count_deg += 1
_tc_iter_deg_trace = _tc_size_trace
components_dict[i] = {
# Trace of component size reduction
'size_trace': _tc_size_trace,
# IDs of removed nodes (time ordered)
'removed_nodes': _tc_iter_deg_removals,
# Final size of component
'min_size': _tc_size_trace[-1],
# Component subgraph
'subgraph': original_G_tc
}
pickle.dump(components_dict, open(os.path.join(out_file_dir, 'components_dict.pickle'), 'wb'))
print("Connected components data generated!")
def main():
# Parse args
parser = argparse.ArgumentParser(description="Build an iGraph graph from a TSV log file. NOTE: Working directory should be parent directory of \"build_graph\".")
parser.add_argument("--in-file", type=str, help="input TSV filepath")
parser.add_argument("--in-avclass-file", type=str, help="input AVClass labels filepath - `label` and `is_pup` data (AVClass v1) expected")
parser.add_argument("--out-dir", type=str, help="output file directory (multiple files will be generated)")
parser.add_argument("--build-components", action="store_true", help="if flag is set, will also build connected components data")
args = parser.parse_args()
G = generate_gml_graph(args=args)
if args.build_components:
generate_components(args=args, G=G)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
6560005
|
<reponame>delacruzsebastian-cpu/Examen-final-PROCESAMIENTO-DE-IMAGENES
import numpy as np
import cv2
from hough import *
from orientation_estimate import *
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.utils import shuffle
import os
#<NAME> <NAME>
class Bandera: #Crear clase
def __init__(self, path , image_name): # se define el constructor
self.path = path
self.image_name=image_name
self.path_file = os.path.join(path, image_name)
self.image = cv2.imread(self.path_file) # se carga la imagen via opencv
self.image_copy = cv2.imread(self.path_file)
self.labels = 0
def Colores(self):
self.image = cv2.cvtColor(self.image, cv2.COLOR_BGR2RGB)
n_colors = 4
self.image = np.array(self.image, dtype=np.float64) / 255
rows, cols, ch = self.image.shape
assert ch == 3
image_array = np.reshape(self.image, (rows * cols, ch))
image_array_sample = shuffle(image_array, random_state=0)[:10000]
model = KMeans(n_clusters=n_colors, random_state=0).fit(image_array_sample)
self.labels = model.predict(image_array)
if np.max(self.labels) == 0:
print('la bandera tiene 1 colores')
if np.max(self.labels) == 1:
print('la bandera tiene 2 colores')
if np.max(self.labels) == 2:
print('la bandera tiene 3 colores')
if np.max(self.labels) == 3:
print('la bandera tiene 4 colores')
def Porcentaje(self):
unique, counts = np.unique(self.labels, return_counts=True)
porc = (counts * 100) / 24480
if np.max(self.labels) == 0:
print('el porcentaje de color 1 es')
print(porc[0])
if np.max(self.labels) == 1:
print('el porcentaje de color 1 es')
print(porc[0])
print('porciento')
print('el porcentaje de color 2 es')
print(porc[1])
if np.max(self.labels) == 2:
print('el porcentaje de color 1 es')
print(porc[0])
print('porciento')
print('el porcentaje de color 2 es')
print(porc[1])
print('porciento')
print('el porcentaje de color 3 es')
print(porc[2])
print('porciento')
if np.max(self.labels) == 3:
print('el porcentaje de color 1 es')
print(porc[0])
print('porciento')
print('el porcentaje de color 2 es')
print(porc[1])
print('porciento')
print('el porcentaje de color 3 es')
print(porc[2])
print('porciento')
print('el porcentaje de color 4 es')
print(porc[3])
print('porciento')
def Orientacion(self):
self.image_copy = cv2.resize(self.image_copy, (600, 600))
high_thresh = 300
bw_edges = cv2.Canny(self.image_copy, high_thresh * 0.3, high_thresh, L2gradient=True)
hough1 = hough(bw_edges)
accumulator = hough1.standard_HT()
acc_thresh = 50
N_peaks = 11
nhood = [25, 9]
peaks = hough1.find_peaks(accumulator, nhood, acc_thresh, N_peaks)
[_, cols] = self.image.shape[:2]
for i in range(len(peaks)):
rho = peaks[i][0]
theta_ = hough1.theta[peaks[i][1]]
theta_pi = np.pi * theta_ / 180
theta_ = theta_ - 180
a = np.cos(theta_pi)
b = np.sin(theta_pi)
x0 = a * rho + hough1.center_x
y0 = b * rho + hough1.center_y
c = -rho
x1 = int(round(x0 + cols * (-b)))
y1 = int(round(y0 + cols * a))
x2 = int(round(x0 - cols * (-b)))
y2 = int(round(y0 - cols * a))
if 85 < np.abs(theta_) < 95:
print("bandera horizontal")
if 175 < np.abs(theta_) < 185:
print("bandera vertical")
if 0 < np.abs(theta_) < -5:
print("bandera mixta")
|
StarcoderdataPython
|
248611
|
# Generated by Django 3.1 on 2020-09-11 07:25
import crm.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(default=crm.models.get_default_uuid, editable=False, max_length=63, verbose_name='UUID')),
('name', models.CharField(blank=True, max_length=63, verbose_name='Name')),
('phone_number', models.CharField(blank=True, max_length=63, verbose_name='Phone Number')),
('remark', models.CharField(blank=True, max_length=255, verbose_name='Remark')),
],
options={
'verbose_name': 'Person',
'verbose_name_plural': 'People',
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Device',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(default=crm.models.get_default_uuid, editable=False, max_length=63, verbose_name='UUID')),
('name', models.CharField(blank=True, max_length=63, verbose_name='Name')),
('os', models.CharField(blank=True, max_length=50, verbose_name='OS')),
('cpu', models.IntegerField(blank=True, null=True, verbose_name='CPU')),
('ram', models.IntegerField(blank=True, null=True, verbose_name='RAM (GB)')),
('remark', models.CharField(blank=True, max_length=400, verbose_name='Remark')),
('customer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='whose_device_customer', to='crm.person', verbose_name='Customer')),
],
options={
'verbose_name': 'Device',
'verbose_name_plural': 'Devices',
'ordering': ['id'],
},
),
migrations.CreateModel(
name='CallLog',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.CharField(default=crm.models.get_default_uuid, editable=False, max_length=63, verbose_name='UUID')),
('request_url', models.CharField(max_length=511, verbose_name='Request URL')),
('request_data', models.CharField(max_length=511, verbose_name='Request data')),
('called_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='whose_calllog_customer', to='crm.person', verbose_name='Called by')),
],
options={
'verbose_name': 'Device',
'verbose_name_plural': 'Devices',
'ordering': ['id'],
},
),
]
|
StarcoderdataPython
|
1721503
|
import torch.nn as nn
from lib import common
ENV_ID = "RoboschoolHalfCheetah-v1"
GAMMA = 0.99
BATCH_SIZE = 256
LR_ACTOR = 0.0003
LR_CRITIC = 0.0003
REPLAY_SIZE = 1000000
REPLAY_INITIAL = 10000
TAU = 0.005
REWARD_STEPS = 1
STEPS_PER_EPOCH = 5000
ETA_INIT = 0.995
ETA_FINAL = 0.999
ETA_BASELINE_EPOCH = 100
ETA_AVG_SIZE = 20
C_MIN = 5000
FIXED_SIGMA_VALUE = 0.3
BETA_AGENT = 1
MAX_ITERATIONS = 1000000
HID_SIZE = 256
ACTF = nn.ReLU
BUFFER = common.EmphasizingExperienceReplay
BETA_START = 0.4
BETA_END_ITER = 10000
ALPHA_PROB = 0.6
MUNCHAUSEN = False
|
StarcoderdataPython
|
1933403
|
from typing import Optional, Union, List
from graphql.execution import execute_sync
from graphql.language import parse
from graphql.type import (
GraphQLBoolean,
GraphQLField,
GraphQLInterfaceType,
GraphQLList,
GraphQLObjectType,
GraphQLSchema,
GraphQLString,
GraphQLUnionType,
)
class Dog:
name: str
barks: bool
mother: Optional["Dog"]
father: Optional["Dog"]
progeny: List["Dog"]
def __init__(self, name: str, barks: bool):
self.name = name
self.barks = barks
self.mother = None
self.father = None
self.progeny = []
class Cat:
name: str
meows: bool
mother: Optional["Cat"]
father: Optional["Cat"]
progeny: List["Cat"]
def __init__(self, name: str, meows: bool):
self.name = name
self.meows = meows
self.mother = None
self.father = None
self.progeny = []
class Person:
name: str
pets: Optional[List[Union[Dog, Cat]]]
friends: Optional[List[Union[Dog, Cat, "Person"]]]
def __init__(
self,
name: str,
pets: Optional[List[Union[Dog, Cat]]] = None,
friends: Optional[List[Union[Dog, Cat, "Person"]]] = None,
):
self.name = name
self.pets = pets
self.friends = friends
NamedType = GraphQLInterfaceType("Named", {"name": GraphQLField(GraphQLString)})
LifeType = GraphQLInterfaceType(
"Life", lambda: {"progeny": GraphQLField(GraphQLList(LifeType))} # type: ignore
)
MammalType = GraphQLInterfaceType(
"Mammal",
lambda: {
"progeny": GraphQLField(GraphQLList(MammalType)), # type: ignore
"mother": GraphQLField(MammalType), # type: ignore
"father": GraphQLField(MammalType), # type: ignore
},
interfaces=[LifeType],
)
DogType = GraphQLObjectType(
"Dog",
lambda: {
"name": GraphQLField(GraphQLString),
"barks": GraphQLField(GraphQLBoolean),
"progeny": GraphQLField(GraphQLList(DogType)), # type: ignore
"mother": GraphQLField(DogType), # type: ignore
"father": GraphQLField(DogType), # type: ignore
},
interfaces=[MammalType, LifeType, NamedType],
is_type_of=lambda value, info: isinstance(value, Dog),
)
CatType = GraphQLObjectType(
"Cat",
lambda: {
"name": GraphQLField(GraphQLString),
"meows": GraphQLField(GraphQLBoolean),
"progeny": GraphQLField(GraphQLList(CatType)), # type: ignore
"mother": GraphQLField(CatType), # type: ignore
"father": GraphQLField(CatType), # type: ignore
},
interfaces=[MammalType, LifeType, NamedType],
is_type_of=lambda value, info: isinstance(value, Cat),
)
def resolve_pet_type(value, _info, _type):
if isinstance(value, Dog):
return DogType.name
if isinstance(value, Cat):
return CatType.name
# Not reachable. All possible types have been considered.
assert False, "Unexpected pet type"
PetType = GraphQLUnionType("Pet", [DogType, CatType], resolve_type=resolve_pet_type)
PersonType = GraphQLObjectType(
"Person",
lambda: {
"name": GraphQLField(GraphQLString),
"pets": GraphQLField(GraphQLList(PetType)),
"friends": GraphQLField(GraphQLList(NamedType)),
"progeny": GraphQLField(GraphQLList(PersonType)), # type: ignore
"mother": GraphQLField(PersonType), # type: ignore
"father": GraphQLField(PersonType), # type: ignore
},
interfaces=[NamedType, MammalType, LifeType],
is_type_of=lambda value, _info: isinstance(value, Person),
)
schema = GraphQLSchema(PersonType, types=[PetType])
garfield = Cat("Garfield", False)
garfield.mother = Cat("Garfield's Mom", False)
garfield.mother.progeny = [garfield]
odie = Dog("Odie", True)
odie.mother = Dog("Odie's Mom", True)
odie.mother.progeny = [odie]
liz = Person("Liz", [], [])
john = Person("John", [garfield, odie], [liz, odie])
def describe_execute_union_and_intersection_types():
def can_introspect_on_union_and_intersection_types():
document = parse(
"""
{
Named: __type(name: "Named") {
kind
name
fields { name }
interfaces { name }
possibleTypes { name }
enumValues { name }
inputFields { name }
}
Mammal: __type(name: "Mammal") {
kind
name
fields { name }
interfaces { name }
possibleTypes { name }
enumValues { name }
inputFields { name }
}
Pet: __type(name: "Pet") {
kind
name
fields { name }
interfaces { name }
possibleTypes { name }
enumValues { name }
inputFields { name }
}
}
"""
)
assert execute_sync(schema=schema, document=document) == (
{
"Named": {
"kind": "INTERFACE",
"name": "Named",
"fields": [{"name": "name"}],
"interfaces": [],
"possibleTypes": [
{"name": "Dog"},
{"name": "Cat"},
{"name": "Person"},
],
"enumValues": None,
"inputFields": None,
},
"Mammal": {
"kind": "INTERFACE",
"name": "Mammal",
"fields": [
{"name": "progeny"},
{"name": "mother"},
{"name": "father"},
],
"interfaces": [{"name": "Life"}],
"possibleTypes": [
{"name": "Dog"},
{"name": "Cat"},
{"name": "Person"},
],
"enumValues": None,
"inputFields": None,
},
"Pet": {
"kind": "UNION",
"name": "Pet",
"fields": None,
"interfaces": None,
"possibleTypes": [{"name": "Dog"}, {"name": "Cat"}],
"enumValues": None,
"inputFields": None,
},
},
None,
)
def executes_using_union_types():
# NOTE: This is an *invalid* query, but it should be *executable*.
document = parse(
"""
{
__typename
name
pets {
__typename
name
barks
meows
}
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"pets": [
{"__typename": "Cat", "name": "Garfield", "meows": False},
{"__typename": "Dog", "name": "Odie", "barks": True},
],
},
None,
)
def executes_union_types_with_inline_fragment():
# This is the valid version of the query in the above test.
document = parse(
"""
{
__typename
name
pets {
__typename
... on Dog {
name
barks
}
... on Cat {
name
meows
}
}
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"pets": [
{"__typename": "Cat", "name": "Garfield", "meows": False},
{"__typename": "Dog", "name": "Odie", "barks": True},
],
},
None,
)
def executes_using_interface_types():
# NOTE: This is an *invalid* query, but it should be a *executable*.
document = parse(
"""
{
__typename
name
friends {
__typename
name
barks
meows
}
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"friends": [
{"__typename": "Person", "name": "Liz"},
{"__typename": "Dog", "name": "Odie", "barks": True},
],
},
None,
)
def executes_interface_types_with_inline_fragment():
# This is the valid version of the query in the above test.
document = parse(
"""
{
__typename
name
friends {
__typename
name
... on Dog {
barks
}
... on Cat {
meows
}
... on Mammal {
mother {
__typename
... on Dog {
name
barks
}
... on Cat {
name
meows
}
}
}
}
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"friends": [
{"__typename": "Person", "name": "Liz", "mother": None},
{
"__typename": "Dog",
"name": "Odie",
"barks": True,
"mother": {
"__typename": "Dog",
"name": "<NAME>",
"barks": True,
},
},
],
},
None,
)
def executes_interface_types_with_named_fragments():
document = parse(
"""
{
__typename
name
friends {
__typename
name
...DogBarks
...CatMeows
}
}
fragment DogBarks on Dog {
barks
}
fragment CatMeows on Cat {
meows
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"friends": [
{"__typename": "Person", "name": "Liz"},
{"__typename": "Dog", "name": "Odie", "barks": True},
],
},
None,
)
def allows_fragment_conditions_to_be_abstract_types():
document = parse(
"""
{
__typename
name
pets {
...PetFields,
...on Mammal {
mother {
...ProgenyFields
}
}
}
friends { ...FriendFields }
}
fragment PetFields on Pet {
__typename
... on Dog {
name
barks
}
... on Cat {
name
meows
}
}
fragment FriendFields on Named {
__typename
name
... on Dog {
barks
}
... on Cat {
meows
}
}
fragment ProgenyFields on Life {
progeny {
__typename
}
}
"""
)
assert execute_sync(schema=schema, document=document, root_value=john) == (
{
"__typename": "Person",
"name": "John",
"pets": [
{
"__typename": "Cat",
"name": "Garfield",
"meows": False,
"mother": {"progeny": [{"__typename": "Cat"}]},
},
{
"__typename": "Dog",
"name": "Odie",
"barks": True,
"mother": {"progeny": [{"__typename": "Dog"}]},
},
],
"friends": [
{"__typename": "Person", "name": "Liz"},
{"__typename": "Dog", "name": "Odie", "barks": True},
],
},
None,
)
# noinspection PyPep8Naming
def gets_execution_info_in_resolver():
encountered = {}
def resolve_type(_source, info, _type):
encountered["context"] = info.context
encountered["schema"] = info.schema
encountered["root_value"] = info.root_value
return PersonType2.name
NamedType2 = GraphQLInterfaceType(
"Named", {"name": GraphQLField(GraphQLString)}, resolve_type=resolve_type
)
PersonType2 = GraphQLObjectType(
"Person",
{
"name": GraphQLField(GraphQLString),
"friends": GraphQLField(GraphQLList(NamedType2)),
},
interfaces=[NamedType2],
)
schema2 = GraphQLSchema(PersonType2)
document = parse("{ name, friends { name } }")
root_value = Person("John", [], [liz])
context_value = {"authToken": "<PASSWORD>"}
assert execute_sync(
schema=schema2,
document=document,
root_value=root_value,
context_value=context_value,
) == (
{"name": "John", "friends": [{"name": "Liz"}]},
None,
)
assert encountered == {
"schema": schema2,
"root_value": root_value,
"context": context_value,
}
|
StarcoderdataPython
|
4827028
|
<filename>guests/factory.py
import logging
from importlib import import_module
class Factory(object):
def __init__(self, cfg):
self.log = logging.getLogger("vo2.%s" % __name__)
self.cfg = cfg
def list_vms(self):
vms = []
try:
vms = self.cfg.get("job", "vms").split(",")
except AttributeError:
self.log.warn("'vms' not set in job config")
finally:
self.log.debug("'vms' = %s" % vms)
return vms
def get(self, name):
self.log.debug("Get VM: %s" % name)
guest = None
vm_section = "%s%s" % (self.cfg.get("general", "guest_name_prefix"), name)
vm_module = "guests.%s" % self.cfg.get(vm_section, "type")
self.log.debug("Importing VM module: %s" % vm_module)
try:
gmodule = import_module(vm_module)
except ImportError as e:
self.log.error("Unable to import virtual device module 'guests.%s': %s" % (vm_module, e))
except AttributeError:
self.log.error("Unknown VM: %s" % name)
else:
guest = gmodule.VirtualMachine(name)
self.configure_guest(guest)
finally:
self.log.debug("Factory made: %s" % guest)
return guest
def configure_guest(self, guest):
section = "%s%s" % (self.cfg.get("general", "guest_name_prefix"), guest.name)
guest.addr = self.cfg.get(section, "address")
guest.port = int(self.cfg.get(section, "port"))
guest.gateway = self.cfg.get(section, "gateway")
guest.headless = self.cfg.get_bool(section, "headless")
self.log.debug("Configuring guest %s: %s,%s,%s" % (section, guest.addr, guest.port, guest.gateway))
section = "timeouts"
guest.rpc_attempts = self.cfg.get_float(section, "rpc_attmpts")
guest.timeout_vm = self.cfg.get_float(section, "vm")
guest.timeout_job = self.cfg.get_float(section, "rpc")
self.log.debug("Configuring guest %s: %s,%s,%s" % (section, guest.rpc_attempts, guest.timeout_vm, guest.timeout_job ))
|
StarcoderdataPython
|
50495
|
<reponame>jpVm5jYYRE1VIKL/djangocms-bootstrap4<filename>djangocms_bootstrap4/contrib/bootstrap4_content/constants.py
from django.utils.translation import gettext_lazy as _
CODE_TYPE_CHOICES = (
('code', _('Inline code')),
('pre', _('Code block')),
('var', _('Variables')),
('kbd', _('User input')),
('samp', _('Sample output')),
)
|
StarcoderdataPython
|
9782308
|
from faust_bootstrap.core.app import FaustApplication
class FaustAppTest(FaustApplication):
def get_unique_app_id(self):
return f"dummy-group"
def setup_topics(self):
...
def build_topology(self):
...
|
StarcoderdataPython
|
3507820
|
<filename>kfunction_3Stage.py
#########################################################################################################################
# <NAME>
# The Ohio State University; Moody's Analytics
# Completely open source
# Functions necessary for 3 Stage Global/Local K-function estimation:
# clockwise = check_clockwise(poly)
# bFeatures = countSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = True)
# dist = distanceMatrix(locsA, locsB)
# flatlist = flatten(S)
# np.array(k) = kFunction(distMat, distVec, Obs = True, Global = True)
# comps = kSimulation(array, simarray, distVec, obsk, Obs = True, Global = True)
# inside = pip(x, y, poly)
# sims = pointSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = True)
# shplst = polygonDefinitions(polyShapes)
# holes = polygonHoles(polyShapes, polyprts)
# polyprts = polygonParts(polyShapes)
# min(lons), min(lats), max(lons), max(lats) = shape_list_decompose(polyList)
# Modified: 07/01/2018
#########################################################################################################################
#########################################################################################################################
# Import Modules
import numpy as np
import geopandas as gpd
import random
#########################################################################################################################
# Check order of vertices; clockwise = outer ring, counter-clockwise = inner ring
def check_clockwise(poly):
"""Checks if a sequence of (x,y) polygon vertice pairs is ordered clockwise or not.
NOTE: Counter-clockwise (=FALSE) vertice order reserved for inner ring polygons"""
clockwise = False
if (sum(x0*y1 - x1*y0 for ((x0, y0), (x1, y1)) in zip(poly, poly[1:] + [poly[0]]))) < 0:
clockwise = not clockwise
return clockwise
#######################################################################################################################
# Point Count Simulation Computation
def countSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = True):
""" Simulates point counts, proportionally, within higher geography (ZIP CODES), matches lower geography (BLOCKS)
to the higher geography by centroid locations, and simulates point counts, proportionally, within lower geography.
Returns a geopandas dataframe (bFeatures) with a column (SIM_POINTS) representing the number of points to be simulated within lower geography. """
# Set Proportions of reference measure for simulation for First Stage Polygons (ZIP_CODES)
fsFeatures['FS_PROBS'] = fsFeatures[[fieldFS]] / float(sum(fsFeatures[fieldFS]))
# Multinomial Count Simulation of points for each polygon in study area
if Global:
fsFeatures['POINTS'] = np.random.multinomial(npts, fsFeatures['FS_PROBS'].tolist(), 1).tolist()[0]
else:
fsFeatures['POINTS'] = np.random.multinomial(npts-1, fsFeatures['FS_PROBS'].tolist(), 1).tolist()[0]
# Convert Second Stage Polygons (BLOCKS) to point dataframe by taking centroids
ssPoints = gpd.GeoDataFrame(ssFeatures.drop(['geometry'], axis = 1), geometry = ssFeatures.centroid)
# Spatial Overlay of Second Stage points on First Stage polygons
ssinfs = gpd.sjoin(ssPoints, fsFeatures.filter([idFieldFS, 'POINTS','geometry'], axis = 1), how = 'left', op = 'within')
# Merge First Stage Information back to Second Stage polygons
ssFeatures = ssFeatures.merge(ssinfs.filter([idFieldFS, 'POINTS'], axis = 1), how = 'left', left_index = True, right_index = True)
ssFeatures = ssFeatures.assign(SIM_POINTS = 0)
# Make Lists to loop through, first stage and second stage
fslist = fsFeatures[idFieldFS].values.tolist(); fslist = list(set(fslist))
sslist = ssFeatures[idFieldFS].values.tolist(); sslist = list(set(sslist))
for i in fslist:
# Each set of blocks that 'fall' in a given ZIP CODE
fspts = fsFeatures.loc[fsFeatures[idFieldFS]==i,'POINTS'].item()
if fspts > 0:
if i in sslist:
bsample = ssFeatures.loc[ssFeatures[idFieldFS] == i,[fieldSS]]
bsample['SS_PROBS'] = bsample[[fieldSS]] / float(sum(bsample[fieldSS]))
bsample['SIM_POINTS'] = np.random.multinomial(fspts, bsample['SS_PROBS'].tolist(), 1).tolist()[0]
ssFeatures.update(bsample.loc[:,['SIM_POINTS']])
else:
ssFeatures = ssFeatures.append(fsFeatures.loc[fsFeatures[idFieldFS] == i, ["POINTS", idFieldFS, "geometry"]].rename(columns={'POINTS': 'SIM_POINTS'}), ignore_index = True)
else:
pass
# Make Counts Retrievable
return ssFeatures.reset_index(drop = True)
#######################################################################################################################
# Distance Matrix Construction - Miles
def distanceMatrix(locsA, locsB):
""" Calculates distance matrix (in miles) between the locations in locsA to locations in locsB. Assumes that both locsA/locsB are numpy arrays.
First column of locsA/locsB must be X or LON; second column must be Y or LAT. Measures all pairwise distances.
Returns the full distance matrix in numpy matrix form (dist). """
# Empty Container Matrix for Distances
dist = np.zeros((len(locsA), len(locsB)))
dist = np.sqrt((69.1 * (locsB[:,0][:,np.newaxis] - locsA[:,0]) * np.cos(locsA[:,1]/57.3))**2 + \
(69.1 * (locsB[:,1][:,np.newaxis] - locsA[:,1]))**2)
return dist
#######################################################################################################################
# Flatten List of Lists to List
def flatten(S):
""" Flattens a list of lists, but leaves tuples intact"""
flatlist = [val for sublist in S for val in sublist]
return flatlist
#######################################################################################################################
# K-function Calculation
def kFunction(distMat, distVec, Obs = True, Global = True):
""" Calculates K-function values of a given point pattern based on the pair-wise distance matrix (distMat) for a vector of distances (distVec).
Global = True provides average K-count of all n points per distance. Local (Global = False) provides K-count for individual points per distance.
Returns K-function values (k). """
if Global:
# Global K-function Estimates
k = np.zeros((len(distVec)))
for i in range(len(distVec)):
k[i] = (sum(sum(distMat <= distVec[i]) - 1)/float(distMat.shape[0]))
#del i
else:
if Obs:
# Local Obs K-function Estimates
k = np.zeros([len(distMat), len(distVec)])
for i in range(len(distMat)):
for j in range(len(distVec)):
k[i,j] = (sum(distMat[i,] <= distVec[j])-1)
else:
# Local Simulated K-function Estimates
k = np.zeros([len(distMat), len(distVec)])
for i in range(len(distMat)):
for j in range(len(distVec)):
k[i,j] = (sum(distMat[i,] <= distVec[j]))
# Make K-function Values Retrievable
return np.array(k)
#######################################################################################################################
# K-function Simulation Calculation
def kSimulation(array, simarray, distVec, obsk, Obs = True, Global = True):
""" Calculates K-function values and P-value counts of simulated point pattern (simarray) relative to observed K-function values (obsk) for a vector of distances (distVec).
Global = True compares observed K-functions to the K-functions from a full simulated distribution. Local (Global = False) compares all observed K-counts to the K-counts of observed points to simulated distribution.
Returns K-function values (simk) and P-value counts (cnts/comps). """
if Global:
# Distance Matrix of Simulated Distribution
simfull = distanceMatrix(simarray, simarray)
# K-function Estimates of Simulation
simk = kFunction(simfull, distVec)
# Compare Observed Counts to Simulated Counts
comps = [x <= y for (x,y) in zip(np.array(obsk), np.array(simk))]
# Counts of Simulations that Result in Higher K-function Values than the Observed Distribution for All Distances
cnts = np.matrix([int(elem) for elem in comps])
#del elem, comps, simfull
# Make Counts and K-function values retrievable
return cnts, np.matrix(simk)
else:
# Distance Matrix of Observed Points to Simulated Points NOTE THAT SIMARRAY IS FIRST ENTRY!
simfull = distanceMatrix(simarray, array)
# K-function Estimates from Simulation
simk = kFunction(simfull, distVec, Obs, Global = False)
# Counts of Simulations that Result in Higher K-function Values than the Observed Distribution for All Distances
comps = [x <= y for (x,y) in zip(np.array(obsk), np.array(simk))]
#del simfull, simk
# Make Counts Retrievable
return comps
#########################################################################################################################
# Point-in-Polygon
def pip(x, y, poly):
"""Checks if a given point (x,y) falls inside or outside the given polygon (poly).
Returns True (inside) or False (outside). """
#if poly[0] != poly[-1]:
# return print("Polygon not closed")
n = len(poly)
inside = False
p1x, p1y = poly[0]
for i in range(n+1):
p2x, p2y = poly[i % n]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x, p1y = p2x, p2y
return inside
#########################################################################################################################
# Point Simulation: Both Global and Local Versions
def pointSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = True):
""" Simulates points, proportionally, within higher geography (ZIP CODES), matches lower geography (BLOCKS)
to the higher geography by centroid locations, and simulates points, proportionally, within lower geography.
Assumes a multinomial count distribution (npts = len(point_pattern), p = proportions) to assign points to given polygons, where
p = proportions are the multinomial probabilities.
Global = True simulates n points for a full distribution. Local (Global = False) simulates n-1 points for local K-function
comparison.
Returns array of simulated point locations (sims). """
# Multinomial Count Simulation of points for each polygon in study area
if Global:
polySimCounts = countSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = True)
else:
polySimCounts = countSimulation(fsFeatures, ssFeatures, npts, fieldFS, fieldSS, idFieldFS, Global = False)
# Extract Point Counts
rpts = polySimCounts['SIM_POINTS'].tolist()
# Identify Single and Multiple Polygons
polyprts = polygonParts(polySimCounts)
# Identify Holes in Polygons
holes = polygonHoles(polySimCounts, polyprts)
# Identify Polygon Shapes
shapes = polygonDefinitions(polySimCounts)
# Point Simulation $\Rightarrow$ locations constrained by given polygons
pts = []
for i in range(len(shapes)):
if holes[i][0] == True:
'''Single Part Polygons'''
total = rpts[i]
count = 0
ptssim = []
minx, miny, maxx, maxy = shape_list_decompose(shapes[i])
while count < total:
x = random.uniform(minx, maxx)
y = random.uniform(miny, maxy)
if pip(x, y, shapes[i]):
count += 1
ptssim.append([x, y])
pts.append(ptssim)
elif holes[i][0] == False and holes[i][1] == True:
'''Multipart Part Polygons w/ No Holes'''
a = [k for k, x in enumerate(holes[i][2]) if x]
apolys = [] #apolys is a list of all polygons
for j in a:
apolys.append([shapes[i][polyprts[i][j]:polyprts[i][j+1]-1]][0])
total = rpts[i]
count = 0
ptssim = []
minx, miny, maxx, maxy = shape_list_decompose(shapes[i])
while count < total:
x = random.uniform(minx, maxx)
y = random.uniform(miny, maxy)
gpip = []
for j in range(len(a)):
gpip.append(pip(x, y, apolys[j]))
if any(gpip):
count += 1
ptssim.append([x, y])
else:
pass
pts.append(ptssim)
elif holes[i][0] == False and holes[i][1] == False:
'''Multipart Part Polygons w/ Holes'''
a = [k for k, x in enumerate(holes[i][2]) if x]
apolys = [] #apolys is a list of all polygons
for j in a:
apolys.append([shapes[i][polyprts[i][j]:polyprts[i][j+1]-1]][0])
h = [k for k, x in enumerate(holes[i][2]) if not x]
hpolys = [] #hpolys is a list of all 'hole' polygons
for j in h:
hpolys.append([shapes[i][polyprts[i][j]:polyprts[i][j+1]-1]][0])
total = rpts[i]
count = 0
ptssim = []
minx, miny, maxx, maxy = shape_list_decompose(shapes[i])
while count < total:
x = random.uniform(minx, maxx)
y = random.uniform(miny, maxy)
gpip = []
for j in range(len(a)):
gpip.append(pip(x, y, apolys[j]))
if any(gpip):
bpip = []
for j in range(len(h)):
bpip.append(pip(x, y, hpolys[j]))
if not any(bpip):
count += 1
ptssim.append([x, y])
else:
pass
else:
pass
pts.append(ptssim)
sims = np.array([item for sublist in pts for item in sublist])
return sims
#######################################################################################################################
# Identify Polygon Parts
def polygonDefinitions(polyShapes):
""" Reads the geometry from a polygon geodataframe and extracts vertices defining all polygons
Muliport polygons are individually processed, need output from polygonParts() to identify individual parts
Returns a list (shplst) of vertices of every polygon. """
# Pull polygon geometry
shapes = polyShapes.geometry.type
shplst = []
for i in range(len(shapes)):
if shapes[i] == "Polygon":
shplst.append(list(zip(*polyShapes.geometry[i].exterior.coords.xy)))
else:
mpp = []
for j in range(len(polyShapes.geometry[i])):
mpp.append(list(zip(*polyShapes.geometry[i][j].exterior.coords.xy)))
shplst.append(flatten(mpp))
return shplst
#######################################################################################################################
# Identify Polygon Parts
def polygonHoles(polyShapes, polyprts):
""" Reads the geometry from a polygon geodataframe and identifies mulipart and hole polygon structures
Returns a list (holes) identifying singlepart vs multipart polygons, and identifies which if any are 'holes.' """
# Pull polygon geometry
shapes = polygonDefinitions(polyShapes)
holes = []
for i in range(len(shapes)):
single, test = True, []
for j in range(len(polyprts[i])-1):
if len(polyprts[i]) > 2:
single = False
test.append(check_clockwise(shapes[i][polyprts[i][j]:polyprts[i][j+1]-1]))
holes.append([single, all(test), test])
return holes
#######################################################################################################################
# Identify Polygon Parts
def polygonParts(polyShapes):
""" Reads the geometry from a polygon geodataframe and identifies the beginning and end nodes of individual polygons.
Necessary for muliport polygons
Returns a list (polyprts) of lists containing the first vertex of every polygon, as well as the last vertex. """
# Pull polygon geometry
shapes = polyShapes.geometry.type
polyprts = []
for i in range(len(shapes)):
if shapes[i] == "Polygon":
polyprts.append([0, len(polyShapes.geometry[i].exterior.coords)])
else:
initial = 0
mpp = [initial]
for j in range(len(polyShapes.geometry[i])):
initial += len(polyShapes.geometry[i][j].exterior.coords)
mpp.append(initial)
polyprts.append(mpp)
return polyprts
#########################################################################################################################
# Decompose a list of tuples defining a polygon
def shape_list_decompose(polyList):
"""Decompose a list of tuples containing the LON/LAT pairs defining a polygon
The result is the bounding box components of the polygon"""
# Pull Longitudes and Latitudes individually
lons = [i[0] for i in polyList]
lats = [i[1] for i in polyList]
return min(lons), min(lats), max(lons), max(lats)
#########################################################################################################################
|
StarcoderdataPython
|
3342933
|
"""
This program is part of MOLDIS: The bigdata analytics platform. Accompanying manuscript
and the complementary web interface can be accessed at : https://moldis.tifrh.res.in/data/bodipy
Python Requirements:
numpy, scipy, scikit-learn, QML
System Requirements:
MOPAC, Obabel, write permission in running directory
Licence: MIT
"""
# ========IMPORTS================
import numpy as np
from sklearn import gaussian_process
import scipy as sp
import argparse as ap
from GenerateBodipy import GenerateBodipy
from GenerateSLATM import GenerateSLATM
# ========ARGPARSER=================
parser = ap.ArgumentParser(description="This program takes in a target value in eV and yield BODIPY molecules\
closer to that value. Only improvement over previous evaluations are displayed. ")
parser.add_argument("target", type=float, help="Target S0->S1 value, in eV")
parser.add_argument("--group", "-g", type=int, default=2,
help="# of substitutions, default = 2, range 2-7")
parser.add_argument("--data", "-d", type=str, default="./data",
help="Location of datafiles, default = ./data")
parser.add_argument("--restart", "-r", type=int, default=5,
help="# of evaluations for single EI evaluation.\
More evaluations give more robust minima, with higher computation cost, default = 5")
parser.add_argument("--exploration", "-x", type=float, default="0.01",
help="Exploitation vs Exploration parameter, default = 0.01, range 0 100")
parser.add_argument("--seed", "-s", type=int, default=5,
help="Number of initial evaluations to build the GP model, default = 5")
parser.add_argument("--iter", "-i", type=int, default=200,
help="Maximum number of iterations. Largest covariance matrix in GP = iter + seed, default = 200")
parser.add_argument("--tol", "-t", type=float, default=0.001,
help="Tolerance, stop iterations once absolute error is less then, default = 0.001")
args = parser.parse_args()
# ==============================================
n_groups = args.group
n_iter = args.iter
n_seeds = args.seed
ex_v_ex = args.exploration
n_restart = args.restart
target = args.target
data_dir = args.data
tol = args.tol
# ================================================
print("Searching for {:d}D BODIPY near {:f} eV".format(n_groups, target))
print("Reading ML model from {}".format(data_dir))
print("Iterations {:d}; Initial evaluations {:d}".format(n_iter, n_seeds))
print("Bayesian opt. parameters:\n Exploration/Exploitation \
param: {:f}; Eval. per EI: {:d}".format(ex_v_ex, n_restart))
# ================================================
# ===================CLASSES==================================
class KRRModel:
"""
This class contains the KRR ML machine. The coefficients $\\alpha$
and descriptor, $d$, will be loaded from location <data>, using files
named desc.npy and coeff.npy. Hyperaparameter$\\sigma$ is defined on
the basis of median search. Energy is evaluated as
$$
E = \\sum_i \\alpha_i * exp(-\\frac{\\sum_j |(d_i - d_j)|}{\\sigma}).
$$
"""
def __init__(self, target, data_dir=data_dir):
self.desc = np.load("{}/desc.npy".format(data_dir))
# self.desc = self.desc[0:2000,:]
# self.desc = self.desc.astype("float")
self.coeff = np.load("{}/coeff.npy".format(data_dir))
# self.coeff = self.coeff[0:2000]
# self.sigma = 26.57
self.sigma = 840.087535153
self.target = target
self.bodipy_generator = GenerateBodipy()
self.slatm_generator = GenerateSLATM()
def get_s0s1(self, descriptor):
desc_diff = np.exp(-np.sum(np.abs(self.desc -
descriptor), axis=1)/self.sigma)
s0s1 = np.sum(desc_diff * self.coeff)
return s0s1
def get_loss(self, descriptor):
"""
Get loss function.
loss function = -(E - Target)**2;
for inverted parabola to be optimzed using EI
param: descriptor (1x322 numpy array)
return: scalar loss
"""
s0s1 = self.get_s0s1(descriptor)
return -(s0s1 - self.target)**2
def gen_descriptor(self, sub_array:np.ndarray):
"""
Generate SLATM descriptor based on input array.
Input array format: for array of len L, L/2 = n_groups
[ <L/2 positions>, <L/2 substitution> ]
param: Len 1x(2*n_groups) array
return: 1x18023 slatm descriptor
"""
positions = sub_array[0:len(sub_array)//2].astype(int)
substitutions= sub_array[len(sub_array)//2:len(sub_array)].astype(int)
descriptor = np.zeros((1, 18023))
self.bodipy_generator(list(positions.flatten()), list(substitutions.flatten()))
descriptor = self.slatm_generator()
return descriptor.reshape(1, -1)
# for pos,group in zip(positions, substitutions):
# if int(group) != 0:
# descriptor[int(pos) - 1, int(group) - 1] = 1.
class BayesOpt:
"""
Simple Bayesian Optimization routine using Expected Improvement algorithm.
Surrogate model: Gaussian Process implemented using scikit-learn. Multiple
evaluation EI minimization idea inspired from <NAME>'s blog.
"""
def __init__(self, ex_v_ex=0.01, n_restart=25):
self.ex_v_ex = 0.01
self.n_restart = n_restart
def ei(self, x_query, x_prev, y_prev, gpr_model):
"""
Get expected improvement. if sigma=0, ei = 0;
"""
mu, sigma = gpr_model.predict(x_query, return_std=True)
sigma = sigma.reshape(-1, 1)
mu_sample_opt = np.max(y_prev)
imp = mu - mu_sample_opt - self.ex_v_ex
Z = imp/(sigma+0.0000001) # avoid nan
ei = imp*sp.stats.norm.cdf(Z) + sigma * sp.stats.norm.pdf(Z)
ei[sigma == 0.0] = 0.0 # ei =0 if sigma =0
return ei.flatten()
# for some reason now this is causing probelem with lbfgs.
# while initially it was not. This work for now, need to look into it deeper
def next_location(self, x_prev, y_prev, gpr_model, constraints):
"""
Get next possible location to evaluate the model on.
Iterate n_restart times to get best EI, propose the location for next
evaluation.
"""
dim = x_prev.shape[1]
min_val = 1
min_x = None
def objective_fun(x): return self.ei(
x.reshape(-1, dim), x_prev, y_prev, gpr_model)
for x0 in np.random.uniform(constraints[:, 0], constraints[:, 1], size=(self.n_restart, dim)):
res = sp.optimize.minimize(
objective_fun, x0=x0, bounds=constraints, method='L-BFGS-B')
if res.fun < min_val:
min_val = res.fun[0]
min_x = res.x
return min_x.reshape(-1, 1)
# =======================INIT======================
# Define GP surrogate
# sigma = 26.57 = 2l^2 X for 1hot
# length_scale = (sigma/2)^0.5 = 3.64485939 X for 1hot
# rbf = gaussian_process.kernels.RBF(length_scale=3.64485939) X for 1hot
# sigma = 840.087535153 = 2l^2
# length_scale = (sigma/2)^0.5 = 20.4949693236
rbf = gaussian_process.kernels.RBF(length_scale=20.4949693236)
gpr_model = gaussian_process.GaussianProcessRegressor(kernel=rbf, alpha=0.001)
# All possible groups and positions to choose from
groups = [i for i in range(1, 47)]
positions = [i for i in range(1, 8)]
# Instantiate Bayesian and Kernel Ridge evaluation model
bo = BayesOpt(ex_v_ex=ex_v_ex, n_restart=n_restart)
kkr = KRRModel(target=target, data_dir=data_dir)
# Constraints between valid groups and positions
constraints = []
for i in range(n_groups):
constraints.append([1, 7])
for i in range(n_groups):
constraints.append([1, 46])
constraints = np.array(constraints)
# Initialize seeds to build initial GP model
x_prev = []
# seed the search with s random 2D
grps = np.random.choice(groups, (n_seeds, n_groups))
for i in range(n_seeds):
tmp = np.random.choice(positions, n_groups, replace=False)
tmp = np.insert(tmp, len(tmp), grps[i, :])
x_prev.append(tmp)
x_prev = np.array(x_prev)
y_prev = np.zeros((n_seeds, 1))
for i in range(n_seeds):
y_prev[i] = kkr.get_loss(kkr.gen_descriptor(x_prev[i]))
y_prev_old = -99.0
# ================MAIN LOOP===============================================
print("=================================================================")
print("ITER\tPOS\t\tGROUPS\t\tS0S1(eV)\tTarget")
print("=================================================================")
for i in range(n_iter):
# for iteration i, obtain updated GPR model
gpr_model.fit(x_prev, y_prev)
# obtain next location using EI
x_next = bo.next_location(x_prev, y_prev, gpr_model, constraints)
unique_pos = False
while not unique_pos:
# if positions are not unique then discard it and generate one more
# random position to shuffle away
# Ideally shall replace with symmetric position, but this helps in
# adding more random seeding data
tmp_positions = np.squeeze(x_next[0:n_groups]).astype(int).tolist()
if len(set(tmp_positions)) < n_groups:
tmp = np.random.choice(positions, n_groups, replace=False)
tmp = np.insert(tmp, len(tmp),
np.random.choice(groups, n_groups))
x_next = tmp
else:
unique_pos = True
# get the loss value at proposed location
y_next = kkr.get_loss(kkr.gen_descriptor(x_next))
y_s0s1 = kkr.get_s0s1(kkr.gen_descriptor(x_next))
x_next = x_next.astype(int).reshape(1, -1).squeeze()
# if new results are improvement over previous, and valid
# print them. If invalid (dimension of substitution reduced)
# then skip it.
if (y_next > y_prev_old) and (len(set(x_next[0:len(x_next)//2])) == len(x_next)//2):
print("{:d}\t{}\t\t{}\t\t{:f}\t{:f}".format(
i,
" ".join(list(map(str, x_next[:len(x_next)//2]))),
" ".join(list(map(str, x_next[len(x_next)//2:]))),
y_s0s1,
target
))
if (np.abs(y_next - y_prev_old) < tol):
print("Desired Tolerance Reached")
break
y_prev_old = y_next
if not (len(set(x_next[0:len(x_next)//2])) == len(x_next)//2):
continue
x_prev = np.vstack((x_prev, x_next))
y_prev = np.vstack((y_prev, y_next))
print("=================================================================")
|
StarcoderdataPython
|
3533612
|
<filename>bloodon/tools/calendarutils.py
from calendar import monthrange
from itertools import groupby
from django.utils import formats
import datetime
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from bloodon.alerts.models import Alert
from base64 import decode
weekdays = {0: 'mon', 1: 'tue', 2: 'wed', 3: 'thu', 4: 'fri', 5: 'sat', 6: 'sun'}
weekTDays = {0: 'Monday', 1: 'Tuesday', 2: 'Wednesday', 3: 'Thursday', 4: 'Friday', 5: 'Saturday', 6: 'Sunday'}
def calendar_events(request):
"""
Show calendar of events this month
"""
lToday = datetime.datetime.now()
calendar = AlertCalendar()
return {'Calendar': mark_safe(calendar)}
class AlertCalendar(object):
def __init__(self, date=datetime.datetime.now()):
self.start = date
currentMonth = date.month
currentYear = date.year
currentDay = date.day
self.idOfToday = str (currentDay) + '_' + str(currentMonth)
totalDaysOfMonth = monthrange(currentYear, currentMonth)[1]
previousYear = currentYear
nextYear = currentYear
previousMonth = currentMonth - 1
nextMonth = currentMonth + 1
if currentMonth == 1:
previousMonth = 12
previousYear = currentYear - 1
if currentMonth == 12:
nextMonth = 1
nextYear = currentYear + 1
totalDaysNextMonth = monthrange(nextYear, nextMonth)[1]
totalDaysPreviousMonth = monthrange(previousYear, previousMonth)[1]
if currentDay > 2:
_from = datetime.date(currentYear, currentMonth, currentDay)
else:
_from = datetime.date(previousYear, previousMonth, totalDaysPreviousMonth)
end = datetime.date(nextYear, nextMonth, min(currentDay, totalDaysNextMonth))
# self.get_alerts(_from, end)
string = self.buildHeader()
starWeek = date.weekday()
total = 1
# before today
if starWeek > 0:
for i in xrange(starWeek, 0, -1):
#
if currentDay > i:
day = currentDay - i
year = currentYear
month = currentMonth
else:
day = totalDaysPreviousMonth - i + 1
year = previousYear
month = previousMonth
string += self.appendNewDay(day, month, year, 'pass')
if total % 7 == 0:
string += '</div><div class="row">'
total += 1
# current month today
for i in range(currentDay, totalDaysOfMonth + 1):
string += self.appendNewDay(i, currentMonth, currentYear, 'current')
if total % 7 == 0:
string += '</div><div class="row">'
total += 1
if total < 36:
for i in range(1, totalDaysNextMonth):
string += self.appendNewDay(i, nextMonth, nextYear, 'next')
if total % 7 == 0:
string += '</div><div class="row">'
total += 1
if total == 36:
break
self.html = string + '</div>'
# get alert by date
def get_alerts(self, start, end):
alerts = Alert.objects.filter(date_for__gte=start, date_for__lte=end)
# group_by_day(home)
field = lambda alert: str(alert.date_for.day) + '_' + str(alert.date_for.month)
self.alerts = dict(
[(day, list(items)) for day, items in groupby(alerts, field)]
)
def buildHeader(self):
string = '<div class="header row">'
for day in weekdays:
string += '<div class="weekday cell cell-header">%s</div>' % _(weekTDays[day])
return string + '</div><div class="row"> '
def appendNewDay(self, day, month, year, className):
id = str(day) + '_' + str(month)
htmlId = str(day) + '/' + str(month) + '/' + str(year)
toAdd = ''
#title = ''
# FIXME
"""
if id in self.alerts:
className += ' event'
#title = formats.date_format(datetime.date(year, month, day), "DATE_FORMAT", True).encode('utf-8')
toAdd = '<div class="alert-event"> %d </div>' % len(self.alerts[id])
"""
if id == self.idOfToday:
className += ' today'
return '<div class="cell cell-day %s" id="%s">%s%d</div>' % (className, htmlId, toAdd, day)
def __str__(self):
return self.html
|
StarcoderdataPython
|
3474609
|
<reponame>codeic/lpthw
# This line prints sentence
print "I will now count my chickens:"
# This line prints word, then does math
print "Hens", round(25 + 30 / 6)
# This line also prints word and then does some math
print "Roosters", round(100-25*3%4)
# This line prints sentence
print "Now I will count the eggs:"
# This line does math
print round(3+2+1-5+4%2+1/4+6)
# This line prints sentence. Math is not being done. This is only a sentence.
print "Is it true that 3+2<5-7?"
# This line does that math. It returns result as a boolean statement because it is operation that contains a symbol that denotes inequality, a less-than sign.
print 3+2<5-7
# This line prints sentence and then does some math.
print "What is 3+2?", round(3+2)
#This line prints sentence and then does some math.
print "What is 5-7?", round(5-7)
# This line prints sentence.
print "Oh, what's why it's False."
# This line prints sentence.
print "How about some more."
# This line prints sentence, does some math and returns a boolean statement.
print "Is it greater?", 5>-2
# This line prints sentence, does some math and returns a boolean statement.
print "Is it greater or equal?", 5>=-2
# This line prints sentence, does some math and returns a boolean statement.
print "Is it less or equal?", 5<=-2
|
StarcoderdataPython
|
6561900
|
# Copyright (C) 2022 viraelin
# License: MIT
from PyQt6.QtWidgets import QGraphicsItem
from PyQt6.QtGui import QUndoCommand
class PaintCommand(QUndoCommand):
def __init__(self, layer: QGraphicsItem, coords: list, text: str) -> None:
super().__init__()
self.layer = layer
self.coords = coords
self.setText(text)
def undo(self) -> None:
for coord in self.coords:
x = coord[0]
y = coord[1]
old = coord[2]
self.layer.cells[y][x] = old
self.layer.update()
def redo(self) -> None:
for coord in self.coords:
x = coord[0]
y = coord[1]
new = coord[3]
self.layer.cells[y][x] = new
self.layer.update()
|
StarcoderdataPython
|
1654589
|
<gh_stars>0
import inspect
import traceback
import uvicorn
import json
from starlette.applications import Starlette
from starlette.endpoints import HTTPEndpoint
from starlette.responses import JSONResponse
app = Starlette()
@app.route('/{endpoint}/{uid}')
class DeviceEndpoint(HTTPEndpoint):
@staticmethod
async def _call_function(func, data, endpoint, uid):
if inspect.iscoroutinefunction(func):
await func(data, endpoint, uid)
else:
func(data, endpoint, uid)
async def post(self, request):
data = await request.body()
if data:
try:
data = json.loads(data)
except json.JSONDecodeError as e:
return JSONResponse(
{'message': f'Failed to read data:{e}'},
status_code=400
)
endpoint = request.path_params['endpoint']
uid = request.path_params['uid']
if not endpoint in DeviceCallback.instances:
return JSONResponse(
{'message':f'Endpoint {endpoint} is not available'},
status_code=400
)
try:
request_validator = DeviceCallback.instances[endpoint].request_validator
if request_validator is not None:
request_validator.validate()
except Exception as e:
return JSONResponse(
{'message':f'Validation failed:\n{e}'},
status_code=400
)
callback_func = DeviceCallback.instances[endpoint].callback_func
try:
await self._call_function(
callback_func,
data=data,
endpoint=endpoint,
uid=uid
)
return JSONResponse({'message':'Success'}, status_code=200)
except BaseException:
traceback.print_exc()
return JSONResponse(
{'message':'Callback failed'},
status_code=500
)
class DeviceCallback:
instances = {}
def __init__(self, endpoint, callback_func, request_validator=None):
self.endpoint = endpoint
self.callback_func = callback_func
self.request_validator = request_validator
DeviceCallback.instances[self.endpoint] = self
class device_callback:
def __init__(self, endpoint, request_validator=None):
self.endpoint = endpoint
self.request_validator = request_validator
def __call__(self, fn):
DeviceCallback(
endpoint=self.endpoint,
callback_func=fn,
request_validator=self.request_validator
)
return fn
def tenctarium(host='localhost', port=8090):
uvicorn.run(app, host=host, port=port)
|
StarcoderdataPython
|
6585262
|
#!/usr/bin/env python2.7
#coding=utf-8
#author@alingse
import json
import sys
# set A,set B, give A - B
# do not need sorted, It use the memory
# all string should be keep as unicode
def get_id_from_A(line_data):
return line_data.strip()
#return json.loads(line_data).get('id')
#return json.loads(line_data)['weibo']
#return line_data.strip().decode('utf-8')
#return json.loads(line_data)['name']
#return json.loads(line_data).get('item_id')
#return json.loads(line_data).get('auctionId')
#return json.loads(line_data).get('pi')
#return json.loads(line_data).get('company_id')
#return json.loads(line_data).get('search_unicode')
#return json.loads(line_data).get('unique')
#return json.loads(line_data).get('KeyNo')
#return line_data.split('\t',1)[0]
#return line_data.strip().split('\t',1)[1]
#return line_data.split('\t',1)[0]
#return json.loads(line_data).get('shop_id')
#return json.loads(line_data).get('item_info').get('item_id')
#return json.loads(line_data).get('item_info').get('category_id')
def get_id_from_B(line_data):
return line_data.strip()
#return line_data.strip().decode('utf-8')
#return line_data.strip().split('\t')[0]
#return line_data.strip().split('\t',1)[0]
#return line_data.strip().split('\t',1)[1]
#return line_data.strip()[-32:]
#return json.loads(line_data).get('uid')
#return json.loads(line_data).get('key')
#return json.loads(line_data).get('urltail')
#return json.loads(line_data).get('shop_id')
#return json.loads(line_data).get('name')
#return json.loads(line_data).get('item_info').get('item_id')
#return json.loads(line_data).get('item_info').get('category_id')
#return json.loads(line_data).get('company_id')[-32:]
#return json.loads(line_data).get('company_id')
#return json.loads(line_data).get('name')
#return json.loads(line_data).get('unique')
#return json.loads(line_data).get('item_id')
#return json.loads(line_data).get('search_unicode')
#return json.loads(line_data)['weibo']
#return json.loads(line_data).get('user_id')
#return json.loads(line_data).get('brand_id')
#return json.loads(line_data).get('pi')
#return json.loads(line_data).get('urltail')
#uin=json.loads(line_data).get('uin')
#if uin!=None: return str(uin)
id_dict = {}
A_file = open(sys.argv[1], 'r')
B_file = open(sys.argv[2], 'r')
output = open(sys.argv[3], 'w')
#default A is not small
Asmall = False
#Asmall=True
if Asmall:
print('set B might too big')
for line_A in A_file:
if line_A == "\n":
continue
_id = get_id_from_A(line_A)
id_dict[_id] = 0
A_file.close()
print('A _id count is {}'.format(len(id_dict)))
for line_B in B_file:
if line_B == "\n":
continue
_id = get_id_from_B(line_B)
if _id in id_dict:
id_dict[_id] = 1
B_file.close()
count = 0
A_file = open(sys.argv[1], 'r')
for line_A in A_file:
if line_A == "\n":
continue
_id = get_id_from_A(line_A)
if id_dict[_id] == 1:
continue
output.write(line_A)
count += 1
print('A in B is {}'.format(count))
A_file.close()
output.close()
if not Asmall:
print('set A is too big')
for line_B in B_file:
if line_B == "\n":
continue
_id = get_id_from_B(line_B)
id_dict[_id] = 1
B_file.close()
print('B _id count is:{}'.format(len(id_dict)))
count = 0
for line_A in A_file:
if line_A == "\n":
continue
_id = get_id_from_A(line_A)
if _id in id_dict:
continue
output.write(line_A)
count += 1
print('A in B is {}'.format(count))
A_file.close()
output.close()
|
StarcoderdataPython
|
1767217
|
<reponame>Minres/PySysC
#
# Copyright (c) 2019 -2021 MINRES Technolgies GmbH
#
# SPDX-License-Identifier: Apache-2.0
#
'''
Created on 30.08.2021
@author: eyck
'''
from cppyy import gbl as cpp
class ScModule(cpp.scc.PyScModule):
'''
classdocs
'''
def __init__(self, name):
super().__init__(self, name)
def __getattr__(self, attr):
if self.instance is None:
raise AttributeError
return getattr(self.instance, attr)
def ScMethod(self, func, sensitivity=[], initialize=False):
pass
|
StarcoderdataPython
|
5036247
|
print("Conversor de moedas (Dol; BTC; BAT)")
v = float(input("Insira o valor em R$ a ser convertido: "))
print(
f"""Este valor representa:\n{v*6.10:.2f} BAT\n{v*235940.41:.2f} BTC\n{v*5.63:.2f} Dól"""
)
print("*Cotação de: 07/01/2022 às 18:29")
|
StarcoderdataPython
|
6495747
|
<filename>social_team/accounts/models.py
from django.conf import settings
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \
PermissionsMixin
from cropperjs.models import CropperImageField
from markdownx.models import MarkdownxField
class UserManager(BaseUserManager):
"""Create and save new user"""
def create_user(self, email, password=None):
if not email:
raise ValueError("User must have email address")
user = self.model(email=self.normalize_email(email))
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password=None):
"""Create and save a new super user"""
user = self.create_user(email, password=password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username"""
email = models.EmailField(verbose_name='email address', max_length=255,
unique=True)
full_name = models.CharField(max_length=255, blank=False, default='')
avatar = CropperImageField(dimensions=(240, 240), linked=True,
default='avatars/sample.png',
upload_to='avatars/')
bio = MarkdownxField()
date_joined = models.DateTimeField(auto_now_add=True)
main_skills = models.ManyToManyField('MainSkill',
related_name='mainskills')
other_skills = models.ManyToManyField('OtherSkill',
related_name='otherskills')
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
def __str__(self):
return self.full_name
class MainSkill(models.Model):
"""Model for user main skills"""
name = models.CharField(max_length=255, unique=True)
def __str__(self):
return self.name
class OtherSkill(models.Model):
"""Model for user own skills"""
user = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='user_skills')
name = models.CharField(max_length=255, blank=True)
def __str__(self):
return self.name
class UserProject(models.Model):
"""Model for user own project"""
user = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='user_projects')
project_name = models.CharField(max_length=255, blank=True)
url = models.URLField()
def __str__(self):
return self.project_name
|
StarcoderdataPython
|
196701
|
<filename>tests/python_to_cpp/Shed Skin Examples/compile_all.py
import sys, platform, os
enopt = False
total = 0
ok = 0
for fname in os.listdir('.'):
if fname.endswith('.cpp'):
print(fname, end = '', flush = True)
if sys.platform == 'win32':
was_break = False
for version in ['2019', '2017']:
for edition in ['BuildTools', 'Community', 'Enterprise', 'Professional']:
vcvarsall = 'C:\\Program Files' + ' (x86)'*platform.machine().endswith('64') + '\\Microsoft Visual Studio\\' + version + '\\' + edition + R'\VC\Auxiliary\Build\vcvarsall.bat'
if os.path.isfile(vcvarsall):
was_break = True
#print('Using ' + version + '\\' + edition)
break # ^L.break
if was_break:
break
if not was_break:
sys.exit('''Unable to find vcvarsall.bat!
If you do not have Visual Studio 2017 or 2019 installed please install it or Build Tools for Visual Studio from here[https://visualstudio.microsoft.com/downloads/].''')
stderr_fname = 'output/' + fname + '.txt'
r = os.system('"' + vcvarsall + '" ' + ('x64' if platform.machine().endswith('64') else 'x86') + ' > nul && cl.exe /std:c++17 /MT /EHsc /nologo /we4239 ' + '/O2 '*enopt + fname + ' > ' + stderr_fname)
if r == 0:
ok += 1
os.remove(stderr_fname)
else:
print(' - error', end = '')
print()
total += 1
else:
sys.exit('Only win32 platform is supported so far!')
print(str(ok) + '/' + str(total) + ' files are OK')
|
StarcoderdataPython
|
4953236
|
from django.apps import AppConfig
class TheblogConfig(AppConfig):
name = 'theblog'
|
StarcoderdataPython
|
9608238
|
#!/usr/bin/env python
import os
import sys
from glob import glob
input = sys.argv[1]
if not os.path.isdir(input):
print 'Error - Not a directory - Please provide a directory as input. Exiting...'
sys.exit()
else:
os.chdir(input)
tiff_check = glob('*.tiff')
dpx_check = glob('*.dpx')
if len(dpx_check) > 0:
images = dpx_check
elif len(tiff_check) > 0:
images = tiff_check
else:
print 'no images found'
permission = ''
for i in images:
new_filename = ''
split_names = i.split('_')
if 'oe' in split_names[0]:
for x in split_names[1:-1]:
new_filename += x + '_'
new_filename += split_names[-1]
if not permission == 'y' or permission == 'Y':
permission = raw_input('\n**** Original filename = %s\n**** New filename = %s\n**** If this looks ok, please press Y, otherwise, type N\n' % ( i, new_filename))
while permission not in ('Y','y','N','n'):
permission = raw_input('\n**** Original filename = %s\n**** New filename = %s\n**** If this looks ok, please press Y, otherwise, type N\n' % ( i, new_filename))
if permission == 'n' or permission == 'N':
print 'Exiting at your command'
sys.exit()
elif permission =='y' or permission == 'Y':
os.rename(i, new_filename)
print '**** Renaming %s with %s' % (i, new_filename)
elif permission == 'y' or permission == 'Y':
os.rename(i, new_filename)
print '**** Renaming %s with %s' % (i, new_filename)
else:
print 'This does not need to be renamed - exiting...'
sys.exit()
|
StarcoderdataPython
|
9607915
|
from typing import *
from ..base import HPOBase
from ...utils import Metrics
from ....optim import BayesianOptimization
@HPOBase.register("bo")
class BayesianHPO(HPOBase):
@property
def is_sequential(self) -> bool:
return True
def _init_config(self, **kwargs):
self._bo_config = kwargs.get("bo_config", {})
bo_normalization = self._bo_config.setdefault("normalization", "cube")
if bo_normalization == "cube":
bo_norm_cfg = self._bo_config.setdefault("normalization_config", {})
bo_norm_cfg.setdefault("convert_only", False)
self._num_iter = kwargs.get("num_iter", 10)
self._num_warmup = kwargs.get("num_warmup", 10000)
self._init_points = kwargs.get("init_points", 5)
if self._init_points <= 1:
msg = f"init_points should larger than 1, {self._init_points} found"
raise ValueError(msg)
self._bo_core, self._iteration = None, 0
def _score(self, final_scores: Dict[str, float]) -> float:
return sum(
[
self._score_weights.setdefault(k, 1.0) * v * Metrics.sign_dict[k]
for k, v in final_scores.items()
]
)
def _sample_param(self) -> Union[None, Dict[str, Any]]:
self._iteration += 1
if self._bo_core is None:
params = self.param_generator.params
self._bo_core = BayesianOptimization(None, params, **self._bo_config)
if self._iteration <= self._init_points:
return self.param_generator.pop()
if not self._bo_core.space.is_empty:
nested = self.last_param
flattened = self.param_generator.flatten_nested(nested)
self._bo_core.register(
flattened,
self._score(self._get_scores(self.last_patterns)),
)
else:
for code, params in self.param_mapping.items():
patterns = self.patterns[code]
flattened = self.param_generator.flatten_nested(params)
self._bo_core.register(
flattened,
self._score(self._get_scores(patterns)),
)
flattened = self._bo_core.suggest(self._num_warmup, self._num_iter)
return self.param_generator.nest_flattened(flattened)
__all__ = ["BayesianHPO"]
|
StarcoderdataPython
|
11380014
|
from graph import canvas
from search import tools
def isValidCell(cell, visited, obstacles):
x, y = cell
if x < 0 or x >= canvas.WIDTH:
return False
if y < 0 or y >= canvas.HEIGHT:
return False
if cell in visited:
return False
for x, y in obstacles:
if cell[0] == x and cell[1] == y:
return False
return True
def dfs(start, end, obstacles=[], show_details=False):
# 1. create stack for DFS
# 2. visited contains all visited nodes
# 3. push start node to stack
# 4. pop a node from stack
stack = [(start, [start])]
visited = set()
mov = [(0, 1), (0, -1), (1, 0), (-1, 0)]
while stack:
(vertex, path) = stack.pop()
graph = list(filter(lambda x: isValidCell(x, visited, obstacles), [tuple(map(sum, zip(vertex, dir))) for dir in mov]))
if vertex not in visited:
if tools.cell_equal(vertex, end):
return path
visited.add(vertex)
for neighbor in graph:
stack.append((neighbor, path+[neighbor]))
if show_details:
canvas.draw_cell(vertex, canvas.COLOR.DARK_GREEN.value)
canvas.update()
return None
|
StarcoderdataPython
|
5171577
|
<reponame>deboradyankova/python_education
n = int(input())
lst_positive = []
lst_negative = []
for _ in range(n):
number = int(input())
if number >= 0:
lst_positive.append(number)
else:
lst_negative.append(number)
print(lst_positive)
print(lst_negative)
print(f'Count of positives: {len(lst_positive)}. Sum of negatives: {sum(lst_negative)}')
|
StarcoderdataPython
|
3544664
|
# Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time. Monotonic if possible.
"""
__all__ = ['time']
try:
# Patches standard time module.
# From https://pypi.python.org/pypi/Monotime.
import monotime
except ImportError:
pass
try:
# From https://pypi.python.org/pypi/monotinic.
from monotonic import monotonic as time
except ImportError:
try:
# Monotime or Python 3.3+.
from time import monotonic as time
except ImportError:
# Not monotonic.
from time import time
|
StarcoderdataPython
|
3543648
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle.fluid as fluid
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import StrategyFactory
class TestPyramidHashOpApi(unittest.TestCase):
def test_dist_geo_server_transpiler(self):
num_voc = 128
embed_dim = 64
x_shape, x_lod = [16, 10], [[3, 5, 2, 6]]
x = fluid.data(name='x', shape=x_shape, dtype='int32', lod_level=1)
hash_embd = fluid.contrib.layers.search_pyramid_hash(
input=x,
num_emb=embed_dim,
space_len=num_voc * embed_dim,
pyramid_layer=4,
rand_len=16,
drop_out_percent=0.5,
is_training=True,
use_filter=False,
white_list_len=6400,
black_list_len=2800,
seed=3,
lr=0.002,
param_attr=fluid.ParamAttr(
name="PyramidHash_emb_0",
learning_rate=0, ),
param_attr_wl=fluid.ParamAttr(
name="Filter",
learning_rate=0, ),
param_attr_bl=None,
distribute_update_vars=["PyramidHash_emb_0"],
name=None)
cost = fluid.layers.reduce_sum(hash_embd)
role = role_maker.UserDefinedRoleMaker(
current_id=0,
role=role_maker.Role.SERVER,
worker_num=2,
server_endpoints=["127.0.0.1:36011", "127.0.0.1:36012"])
fleet.init(role)
strategy = StrategyFactory.create_geo_strategy(5)
optimizer = fluid.optimizer.SGD(0.1)
optimizer = fleet.distributed_optimizer(optimizer, strategy)
optimizer.minimize(cost)
pserver_startup_program = fleet.startup_program
pserver_mian_program = fleet.main_program
if __name__ == "__main__":
unittest.main()
|
StarcoderdataPython
|
4969055
|
from wtforms import Form, TextField, SelectField
from wtforms.validators import DataRequired
class QueryForm(Form):
search_query = TextField('', validators=[DataRequired()], render_kw={"placeholder": "Your query here"})
search_category = SelectField('Search for', choices=[('pa', 'Paper / Author'), ('p', 'Paper'), ('a', 'Author')])
|
StarcoderdataPython
|
9780204
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
import cohesity_management_sdk.models.ad_object_meta_data
import cohesity_management_sdk.models.email_meta_data
import cohesity_management_sdk.models.file_version
import cohesity_management_sdk.models.universal_id
import cohesity_management_sdk.models.protection_source
import cohesity_management_sdk.models.sharepoint_document_metadata
class FileSearchResult(object):
"""Implementation of the 'FileSearchResult' model.
Specifies details about the found file or folder.
Attributes:
ad_object_meta_data (AdObjectMetaData): Specifies details about the AD
objects.
document_type (string): Specifies the inferred document type.
email_meta_data (EmailMetaData): Specifies details about the emails
and the folder containing emails.
file_versions (list of FileVersion): Array of File Versions.
Specifies the different snapshot versions of a file or folder that
were captured at different times.
filename (string): Specifies the name of the found file or folder.
is_folder (bool): Specifies if the found item is a folder. If true,
the found item is a folder.
job_id (long|int): Specifies the Job id for the Protection Job that is
currently associated with object that contains the backed up file
or folder. If the file or folder was backed up on current Cohesity
Cluster, this field contains the id for the Job that captured the
object that contains the file or folder. If the file or folder was
backed up on a Primary Cluster and replicated to this Cohesity
Cluster, a new Inactive Job is created, the object that contains
the file or folder is now associated with new Inactive Job, and
this field contains the id of the new Inactive Job.
job_uid (UniversalId): Specifies the universal id of the Protection
Job that backed up the object that contains the file or folder.
one_drive_document_metadata (OneDriveDocumentMetadata): Specifies the
metadata for the OneDrive document.
protection_source (ProtectionSource): Specifies a generic structure
that represents a node in the Protection Source tree. Node details
will depend on the environment of the Protection Source.
registered_source_id (long|int): Specifies the id of the top-level
registered source (such as a vCenter Server) where the source
object that contains the the file or folder is stored.
sharepoint_document_metadata (SharepointDocumentMetadata): Specifies
the metadata about the Sharepoint documents.
snapshot_tags (list of string): Snapshot tags present on this
document.
source_id (long|int): Specifies the source id of the object that
contains the file or folder.
tags (list of string): Tags present on this document.
tags_to_snapshots_map (dict<object, list of int>): Mapping from
snapshot tags to.
mtype (TypeFileSearchResultEnum): Specifies the type of the file
document such as KDirectory, kFile, etc.
view_box_id (long|int): Specifies the id of the Domain (View Box)
where the source object that contains the file or folder is
stored.
"""
# Create a mapping from Model property names to API property names
_names = {
"ad_object_meta_data":'adObjectMetaData',
"document_type":'documentType',
"email_meta_data":'emailMetaData',
"file_versions":'fileVersions',
"filename":'filename',
"is_folder":'isFolder',
"job_id":'jobId',
"job_uid":'jobUid',
"one_drive_document_metadata":'oneDriveDocumentMetadata',
"protection_source":'protectionSource',
"registered_source_id":'registeredSourceId',
"sharepoint_document_metadata":'sharepointDocumentMetadata',
"snapshot_tags":'snapshotTags',
"source_id":'sourceId',
"tags":'tags',
"tags_to_snapshots_map":'tagsToSnapshotsMap',
"mtype":'type',
"view_box_id":'viewBoxId'
}
def __init__(self,
ad_object_meta_data=None,
document_type=None,
email_meta_data=None,
file_versions=None,
filename=None,
is_folder=None,
job_id=None,
job_uid=None,
one_drive_document_metadata=None,
protection_source=None,
registered_source_id=None,
sharepoint_document_metadata=None,
snapshot_tags=None,
source_id=None,
tags=None,
tags_to_snapshots_map= None,
mtype=None,
view_box_id=None):
"""Constructor for the FileSearchResult class"""
# Initialize members of the class
self.ad_object_meta_data = ad_object_meta_data
self.document_type = document_type
self.email_meta_data = email_meta_data
self.file_versions = file_versions
self.filename = filename
self.is_folder = is_folder
self.job_id = job_id
self.job_uid = job_uid
self.one_drive_document_metadata = one_drive_document_metadata
self.protection_source = protection_source
self.registered_source_id = registered_source_id
self.sharepoint_document_metadata = sharepoint_document_metadata
self.snapshot_tags = snapshot_tags
self.source_id = source_id
self.tags = tags
self.tags_to_snapshots_map = tags_to_snapshots_map
self.mtype = mtype
self.view_box_id = view_box_id
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
ad_object_meta_data = cohesity_management_sdk.models.ad_object_meta_data.AdObjectMetaData.from_dictionary(dictionary.get('adObjectMetaData')) if dictionary.get('adObjectMetaData') else None
document_type = dictionary.get('documentType')
email_meta_data = cohesity_management_sdk.models.email_meta_data.EmailMetaData.from_dictionary(dictionary.get('emailMetaData')) if dictionary.get('emailMetaData') else None
file_versions = None
if dictionary.get('fileVersions') != None:
file_versions = list()
for structure in dictionary.get('fileVersions'):
file_versions.append(cohesity_management_sdk.models.file_version.FileVersion.from_dictionary(structure))
filename = dictionary.get('filename')
is_folder = dictionary.get('isFolder')
job_id = dictionary.get('jobId')
job_uid = cohesity_management_sdk.models.universal_id.UniversalId.from_dictionary(dictionary.get('jobUid')) if dictionary.get('jobUid') else None
protection_source = cohesity_management_sdk.models.protection_source.ProtectionSource.from_dictionary(dictionary.get('protectionSource')) if dictionary.get('protectionSource') else None
one_drive_document_metadata = cohesity_management_sdk.models.one_drive_document_metadata.OneDriveDocumentMetadata.from_dictionary(dictionary.get('oneDriveDocumentMetadata')) if dictionary.get('oneDriveDocumentMetadata') else None
registered_source_id = dictionary.get('registeredSourceId')
sharepoint_document_metadata = cohesity_management_sdk.models.sharepoint_document_metadata.SharepointDocumentMetadata.from_dictionary(dictionary.get('sharepointDocumentMetadata')) if dictionary.get('sharepointDocumentMetadata') else None
snapshot_tags = dictionary.get('snapshotTags', None)
source_id = dictionary.get('sourceId')
tags = dictionary.get('tags', None)
tags_to_snapshots_map = dictionary.get('tagsToSnapshotsMap', None)
mtype = dictionary.get('type')
view_box_id = dictionary.get('viewBoxId')
# Return an object of this model
return cls(ad_object_meta_data,
document_type,
email_meta_data,
file_versions,
filename,
is_folder,
job_id,
job_uid,
one_drive_document_metadata,
protection_source,
registered_source_id,
sharepoint_document_metadata,
snapshot_tags,
source_id,
mtype,
tags,
tags_to_snapshots_map,
view_box_id)
|
StarcoderdataPython
|
1840842
|
#!/usr/bin/env python3
import os, os.path
import shutil
import subprocess
SROOT = 'src'
DROOT = 'dist'
def main():
# setup
if not os.path.exists(DROOT):
os.mkdir(DROOT)
if os.path.exists(os.path.join(SROOT, '__javascript__')):
shutil.rmtree(os.path.join(SROOT, '__javascript__'))
# transpile src/
run('transcrypt -b -m --parent=.none src/main.py')
shutil.copy(os.path.join(SROOT, '__javascript__', 'main.min.js'), os.path.join(DROOT, 'main.min.js'))
shutil.copy(os.path.join(SROOT, '__javascript__', 'main.js'), os.path.join(DROOT, 'main.js'))
#########################
### Helper functions
def run(cmd):
print('\t' + cmd)
subprocess.run(cmd, shell=True, check=True)
#########################
### Start the program
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
1612682
|
from django import forms
from django.utils.safestring import mark_safe
from markupfield.widgets import MarkupTextarea
from .models import Nomination
class NominationForm(forms.ModelForm):
class Meta:
model = Nomination
fields = (
"name",
"email",
"previous_board_service",
"employer",
"other_affiliations",
"nomination_statement",
)
widgets = {
"nomination_statement": MarkupTextarea()
} # , "self_nomination": forms.CheckboxInput()}
help_texts = {
"name": "Name of the person you are nominating.",
"email": "Email address for the person you are nominating.",
"previous_board_service": "Has the person previously served on the PSF Board? If so what year(s)? Otherwise 'New board member'.",
"employer": "Nominee's current employer.",
"other_affiliations": "Any other relevant affiliations the Nominee has.",
"nomination_statement": "Markdown syntax supported.",
}
class NominationCreateForm(NominationForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super().__init__(*args, **kwargs)
self_nomination = forms.BooleanField(
required=False,
help_text="If you are nominating yourself, we will automatically associate the nomination with your python.org user.",
)
def clean_self_nomination(self):
data = self.cleaned_data["self_nomination"]
if data:
if not self.request.user.first_name or not self.request.user.last_name:
raise forms.ValidationError(
mark_safe(
'You must set your First and Last name in your <a href="/users/edit/">User Profile</a> to self nominate.'
)
)
return data
|
StarcoderdataPython
|
11215883
|
<reponame>ghrecommender/ghrecommender-backend
from rest_framework import views
from rest_framework.response import Response
from rest_framework_extensions.cache.decorators import cache_response
from recommendations.serializers import RecommendationSerializer
from core.utils import UserKeyConstructor
from .utils import get_stars, get_recommendations
__all__ = ['RecommendationsView']
class RecommendationsView(views.APIView):
@cache_response(key_func=UserKeyConstructor())
def get(self, request):
username = request.user.username
popular = get_stars(username) <= 30
recommendations = get_recommendations(username, count=100, popular=popular)
serializer = RecommendationSerializer(recommendations, many=True)
return Response(serializer.data)
|
StarcoderdataPython
|
1918911
|
<gh_stars>10-100
def no_more_commits_than_origin(project_folder, original_project_folder=None, *args, **kwargs):
if not original_project_folder:
return
if not project_folder.repo or not original_project_folder.repo:
return
# FIXME this check works incorrectly in case of
# new commit in original repo after student forked it
if project_folder.repo.count_commits() <= original_project_folder.repo.count_commits():
return ''
def commit_messages_from_blacklist(project_folder, bad_commit_messages, last_commits_to_check_amount, *args, **kwargs):
if not project_folder.repo:
return
for commit in project_folder.repo.iter_commits('master', max_count=last_commits_to_check_amount):
message = commit.message.lower().strip().strip('.\'"')
if message in bad_commit_messages:
return message
|
StarcoderdataPython
|
11397102
|
import inspect
import os
import argparse
import black
from pathlib import Path
INDENT = " " * 4
GENERATED_COMMENT = "# Generated content DO NOT EDIT\n"
def do_indent(text: str, indent: str):
return text.replace("\n", f"\n{indent}")
def function(obj, indent, text_signature=None):
if text_signature is None:
text_signature = obj.__text_signature__
string = ""
string += f"{indent}def {obj.__name__}{text_signature}:\n"
indent += INDENT
string += f'{indent}"""\n'
string += f"{indent}{do_indent(obj.__doc__, indent)}\n"
string += f'{indent}"""\n'
string += f"{indent}pass\n"
string += "\n"
string += "\n"
return string
def member_sort(member):
if inspect.isclass(member):
value = 10 + len(inspect.getmro(member))
else:
value = 1
return value
def fn_predicate(obj):
value = inspect.ismethoddescriptor(obj) or inspect.isbuiltin(obj)
if value:
return obj.__doc__ and obj.__text_signature__ and not obj.__name__.startswith("_")
if inspect.isgetsetdescriptor(obj):
return obj.__doc__ and not obj.__name__.startswith("_")
return False
def get_module_members(module):
members = [
member
for name, member in inspect.getmembers(module)
if not name.startswith("_") and not inspect.ismodule(member)
]
members.sort(key=member_sort)
return members
def pyi_file(obj, indent=""):
string = ""
if inspect.ismodule(obj):
string += GENERATED_COMMENT
members = get_module_members(obj)
for member in members:
string += pyi_file(member, indent)
elif inspect.isclass(obj):
indent += INDENT
mro = inspect.getmro(obj)
if len(mro) > 2:
inherit = f"({mro[1].__name__})"
else:
inherit = ""
string += f"class {obj.__name__}{inherit}:\n"
body = ""
if obj.__doc__:
body += f'{indent}"""\n{indent}{do_indent(obj.__doc__, indent)}\n{indent}"""\n'
fns = inspect.getmembers(obj, fn_predicate)
# Init
if obj.__text_signature__:
body += f"{indent}def __init__{obj.__text_signature__}:\n"
body += f"{indent+INDENT}pass\n"
body += "\n"
for (name, fn) in fns:
body += pyi_file(fn, indent=indent)
if not body:
body += f"{indent}pass\n"
string += body
string += "\n\n"
elif inspect.isbuiltin(obj):
string += f"{indent}@staticmethod\n"
string += function(obj, indent)
elif inspect.ismethoddescriptor(obj):
string += function(obj, indent)
elif inspect.isgetsetdescriptor(obj):
# TODO it would be interesing to add the setter maybe ?
string += f"{indent}@property\n"
string += function(obj, indent, text_signature="(self)")
else:
raise Exception(f"Object {obj} is not supported")
return string
def py_file(module, origin):
members = get_module_members(module)
string = GENERATED_COMMENT
string += f"from .. import {origin}\n"
string += "\n"
for member in members:
name = member.__name__
string += f"{name} = {origin}.{name}\n"
return string
def do_black(content, is_pyi):
mode = black.Mode(
target_versions={black.TargetVersion.PY36},
line_length=100,
is_pyi=is_pyi,
string_normalization=True,
experimental_string_processing=False,
)
try:
return black.format_file_contents(content, fast=True, mode=mode)
except black.NothingChanged:
return content
def write(module, directory, origin, check=False):
submodules = [
(name, member) for name, member in inspect.getmembers(module) if inspect.ismodule(member)
]
filename = os.path.join(directory, "__init__.pyi")
pyi_content = pyi_file(module)
pyi_content = do_black(pyi_content, is_pyi=True)
os.makedirs(directory, exist_ok=True)
if check:
with open(filename, "r") as f:
data = f.read()
assert (
data == pyi_content
), f"The content of {filename} seems outdated, please run `python stub.py`"
else:
with open(filename, "w") as f:
f.write(pyi_content)
filename = os.path.join(directory, "__init__.py")
py_content = py_file(module, origin)
py_content = do_black(py_content, is_pyi=False)
os.makedirs(directory, exist_ok=True)
is_auto = False
if not os.path.exists(filename):
is_auto = True
else:
with open(filename, "r") as f:
line = f.readline()
if line == GENERATED_COMMENT:
is_auto = True
if is_auto:
if check:
with open(filename, "r") as f:
data = f.read()
assert (
data == py_content
), f"The content of {filename} seems outdated, please run `python stub.py`"
else:
with open(filename, "w") as f:
f.write(py_content)
for name, submodule in submodules:
write(submodule, os.path.join(directory, name), f"{name}", check=check)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--check", action="store_true")
args = parser.parse_args()
import tokenizers
write(tokenizers.tokenizers, "py_src/tokenizers/", "tokenizers", check=args.check)
|
StarcoderdataPython
|
4907700
|
import copy
import logging
from typing import TYPE_CHECKING
import naff.api.events as events
from naff.client.const import logger_name, MISSING
from ._template import EventMixinTemplate, Processor
from naff.models import GuildIntegration, Sticker, to_snowflake
from naff.api.events.discord import (
GuildEmojisUpdate,
IntegrationCreate,
IntegrationUpdate,
IntegrationDelete,
BanCreate,
BanRemove,
GuildStickersUpdate,
WebhooksUpdate,
)
if TYPE_CHECKING:
from naff.api.events import RawGatewayEvent
__all__ = ("GuildEvents",)
log = logging.getLogger(logger_name)
class GuildEvents(EventMixinTemplate):
@Processor.define()
async def _on_raw_guild_create(self, event: "RawGatewayEvent") -> None:
"""
Automatically cache a guild upon GUILD_CREATE event from gateway.
Args:
event: raw guild create event
"""
guild = self.cache.place_guild_data(event.data)
self._user._guild_ids.add(to_snowflake(event.data.get("id"))) # noqa : w0212
self._guild_event.set()
if self.fetch_members: # noqa
# delays events until chunking has completed
await guild.chunk_guild(presences=True)
self.dispatch(events.GuildJoin(guild))
@Processor.define()
async def _on_raw_guild_update(self, event: "RawGatewayEvent") -> None:
before = copy.copy(await self.cache.fetch_guild(event.data.get("id")))
self.dispatch(events.GuildUpdate(before or MISSING, self.cache.place_guild_data(event.data)))
@Processor.define()
async def _on_raw_guild_delete(self, event: "RawGatewayEvent") -> None:
guild_id = int(event.data.get("id"))
if event.data.get("unavailable", False):
self.dispatch(
events.GuildUnavailable(
guild_id,
self.cache.get_guild(guild_id) or MISSING,
)
)
else:
# noinspection PyProtectedMember
if guild_id in self._user._guild_ids:
# noinspection PyProtectedMember
self._user._guild_ids.remove(guild_id)
# get the guild right before deleting it
guild = self.cache.get_guild(guild_id)
self.cache.delete_guild(guild_id)
self.dispatch(
events.GuildLeft(
guild_id,
guild or MISSING,
)
)
@Processor.define()
async def _on_raw_guild_ban_add(self, event: "RawGatewayEvent") -> None:
self.dispatch(BanCreate(event.data.get("guild_id"), self.cache.place_user_data(event.data.get("user"))))
@Processor.define()
async def _on_raw_guild_ban_remove(self, event: "RawGatewayEvent") -> None:
self.dispatch(BanRemove(event.data.get("guild_id"), self.cache.place_user_data(event.data.get("user"))))
@Processor.define()
async def _on_raw_integration_create(self, event: "RawGatewayEvent") -> None:
self.dispatch(IntegrationCreate(GuildIntegration.from_dict(event.data, self))) # type: ignore
@Processor.define()
async def _on_raw_integration_update(self, event: "RawGatewayEvent") -> None:
self.dispatch(IntegrationUpdate(GuildIntegration.from_dict(event.data, self))) # type: ignore
@Processor.define()
async def _on_raw_integration_delete(self, event: "RawGatewayEvent") -> None:
self.dispatch(
IntegrationDelete(event.data.get("guild_id"), event.data.get("id"), event.data.get("application_id"))
)
@Processor.define()
async def _on_raw_guild_emojis_update(self, event: "RawGatewayEvent") -> None:
guild_id = event.data.get("guild_id")
emojis = event.data.get("emojis")
if self.cache.emoji_cache:
before = [copy.copy(self.cache.get_emoji(emoji["id"])) for emoji in emojis]
else:
before = []
after = [self.cache.place_emoji_data(guild_id, emoji) for emoji in emojis]
self.dispatch(
GuildEmojisUpdate(
guild_id=guild_id,
before=before,
after=after,
)
)
@Processor.define()
async def _on_raw_guild_stickers_update(self, event: "RawGatewayEvent") -> None:
self.dispatch(
GuildStickersUpdate(event.data.get("guild_id"), Sticker.from_list(event.data.get("stickers", []), self))
)
@Processor.define()
async def _on_raw_webhook_update(self, event: "RawGatewayEvent") -> None:
self.dispatch(WebhooksUpdate(event.data.get("guild_id"), event.data.get("channel_id")))
|
StarcoderdataPython
|
3236996
|
<filename>app/api/event_sub_topics.py
from flask_rest_jsonapi import ResourceDetail, ResourceList, ResourceRelationship
from app.api.bootstrap import api
from app.api.custom_placeholders import CustomPlaceholder
from app.api.helpers.db import safe_query
from app.api.helpers.exceptions import ForbiddenException
from app.api.helpers.permission_manager import has_access
from app.api.helpers.utilities import require_relationship
from app.api.schema.event_sub_topics import EventSubTopicSchema
from app.models import db
from app.models.event import Event
from app.models.event_sub_topic import EventSubTopic
from app.models.event_topic import EventTopic
class EventSubTopicListPost(ResourceList):
"""
Create event sub topics
"""
def before_post(self, args, kwargs, data):
"""
before post method to check for required relationship and proper permission
:param args:
:param kwargs:
:param data:
:return:
"""
require_relationship(['event_topic'], data)
if not has_access('is_admin'):
raise ForbiddenException({'source': ''}, 'Admin access is required.')
view_kwargs = True
methods = ['POST', ]
schema = EventSubTopicSchema
data_layer = {'session': db.session,
'model': EventSubTopic}
class EventSubTopicList(ResourceList):
"""
List event sub topics
"""
def query(self, view_kwargs):
"""
query method for event sub-topics list
:param view_kwargs:
:return:
"""
query_ = self.session.query(EventSubTopic)
if view_kwargs.get('event_topic_id'):
event_topic = safe_query(self, EventTopic, 'id', view_kwargs['event_topic_id'], 'event_topic_id')
query_ = query_.join(EventTopic).filter(EventTopic.id == event_topic.id)
return query_
view_kwargs = True
methods = ['GET', ]
schema = EventSubTopicSchema
data_layer = {'session': db.session,
'model': EventSubTopic,
'methods': {
'query': query
}}
class EventSubTopicDetail(ResourceDetail):
"""
Event sub topic detail by id
"""
def before_get_object(self, view_kwargs):
"""
before get method to get the resource id to fetch details
:param view_kwargs:
:return:
"""
if view_kwargs.get('event_identifier'):
event = safe_query(self, Event, 'identifier', view_kwargs['event_identifier'], 'event_identifier')
view_kwargs['event_id'] = event.id
if view_kwargs.get('event_id'):
event = safe_query(self, Event, 'id', view_kwargs['event_id'], 'event_id')
if event.event_sub_topic_id:
view_kwargs['id'] = event.event_sub_topic_id
else:
view_kwargs['id'] = None
if view_kwargs.get('custom_placeholder_id'):
custom_placeholder = safe_query(self, CustomPlaceholder, 'id', view_kwargs['custom_placeholder_id'],
'custom_placeholder_id')
if custom_placeholder.event_sub_topic_id:
view_kwargs['id'] = custom_placeholder.event_sub_topic_id
else:
view_kwargs['id'] = None
decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
schema = EventSubTopicSchema
data_layer = {'session': db.session,
'model': EventSubTopic,
'methods': {
'before_get_object': before_get_object
}}
class EventSubTopicRelationshipRequired(ResourceRelationship):
"""
Event sub topic Relationship
"""
decorators = (api.has_permission('is_admin', methods="PATCH"),)
methods = ['GET', 'PATCH']
schema = EventSubTopicSchema
data_layer = {'session': db.session,
'model': EventSubTopic}
class EventSubTopicRelationshipOptional(ResourceRelationship):
"""
Event sub topic Relationship
"""
decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
schema = EventSubTopicSchema
data_layer = {'session': db.session,
'model': EventSubTopic}
|
StarcoderdataPython
|
4941204
|
<reponame>LumaKernel/dotfiles
import os
from powerline_shell.utils import ThreadedSegment
definitions = {
'fish': {
'abbr': 'fi',
'color': 157,
},
'bash': {
'abbr': 'ba',
'color': 221,
},
}
class Segment(ThreadedSegment):
def add_to_powerline(self):
self.join()
if 'SHELL_NAME' not in os.environ:
return
shell_name = os.environ.get('SHELL_NAME', '')
this_definition = definitions.get(shell_name, {})
self.powerline.append(
' ' + this_definition.get('abbr', '?') + ' ',
0,
this_definition.get('color', 255),
)
|
StarcoderdataPython
|
1933814
|
from flask import request
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.config import db
from zeus.models import Author, Build, Email, Repository, Source
from zeus.pubsub.utils import publish
from .base_repository import BaseRepositoryResource
from ..schemas import BuildSchema, BuildCreateSchema
build_schema = BuildSchema(strict=True)
builds_schema = BuildSchema(many=True, strict=True, exclude=["repository"])
class RepositoryBuildsResource(BaseRepositoryResource):
def select_resource_for_update(self):
return False
def get(self, repo: Repository):
"""
Return a list of builds for the given repository.
"""
user = auth.get_current_user()
query = (
Build.query.options(
joinedload("source"),
joinedload("source").joinedload("author"),
joinedload("source").joinedload("revision"),
subqueryload_all("stats"),
)
.filter(Build.repository_id == repo.id)
.order_by(Build.number.desc())
)
show = request.args.get("show")
if show == "mine":
query = query.filter(
Source.author_id.in_(
db.session.query(Author.id).filter(
Author.email.in_(
db.session.query(Email.email).filter(
Email.user_id == user.id
)
)
)
)
)
return self.paginate_with_schema(builds_schema, query)
def post(self, repo: Repository):
"""
Create a new build.
"""
schema = BuildCreateSchema(strict=True, context={"repository": repo})
result = self.schema_from_request(schema, partial=True)
if result.errors:
return self.respond(result.errors, 403)
data = result.data
# TODO(dcramer): only if we create a source via a patch will we need the author
# author_data = data.pop('author')
# if author_data.get('email'):
# author = Author.query.filter(
# Author.repository_id == repo.id, Author.email == author_data['email']
# ).first()
# else:
# author = None
# if not author:
# author = Author(repository_id=repo.id, **author_data)
# db.session.add(author)
# db.session.flush()
# TODO(dcramer): need to handle patch case yet
source = (
Source.query.options(joinedload("author"), joinedload("revision"))
.filter(
Source.revision_sha == data.pop("ref"), Source.repository_id == repo.id
)
.first()
)
build = Build(repository=repo, **data)
# TODO(dcramer): we should convert source in the schema
build.source = source
# build.source_id = source.id
build.author = source.author
if not source.patch_id:
if not build.label:
build.label = source.revision.message.split("\n")[0]
if not build.label:
return self.error("missing build label")
db.session.add(build)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
result = build_schema.dump(build)
assert not result.errors, "this should never happen"
publish("builds", "build.create", result.data)
return self.respond(result.data, 200)
|
StarcoderdataPython
|
1912060
|
<filename>RandomApps/python/pytohtml.py
def inputMatrix(m,n, vektori = False):
if vektori:
border = "border-right: 2px solid black; border-left: 2px solid black; border-top: none; border-bottom: none;"
else:
border = "border: none;"
tmp = ""
tmp+= """
<div class="container">
<div style="border-left: 2px solid black; border-right: 2px solid black; display: inline-block">
"""
for i in range(m):
for j in range(n):
tmp+= """
<input type="text" name="%s" style="width: 30px; height: 30px; %s text-align: center"/>""" % (i, border)# 1 px dotted grey"/>""" % i
tmp+= "<br>\n"
tmp+="""
</div>
"""
tmp+="""
<div class="row">
<input type="submit" value="Unesi Vektore">
</div>
</div>
"""
return tmp
def outputMatrix(a, vektori = False):
m,n = a.shape
if vektori:
border = "border-right: 2px solid black; border-left: 2px solid black; border-top: none; border-bottom: none;"
else:
border = "border: none:"
tmp = ""
tmp+= """
<div class="container">
<div style="border-left: 2px solid black; border-right: 2px solid black; display: inline-block">
"""
for i in range(m):
for j in range(n):
tmp+= """
<label style="width: 30px; height: 30px; %s text-align: center"> %s </label>""" % (border, "{0:.1f}".format(a[i,j]))# 1 px dotted grey"/>""" % i
tmp+= "<br>\n"
tmp+="""
</div>
"""
tmp+="""
</div>
"""
return tmp
def textMat(a, tekst, vektori = False):
m,n = a.shape
if vektori:
border = "border-right: 2px solid black; border-left: 2px solid black; border-top: none; border-bottom: none;"
else:
border = "border: none:"
tmp = ""
tmp+= """
<div class="container">"""
tmp += """
<div class="row">
<h3><span class="label label-default"> %s </span></h1>
</div>""" % tekst
tmp+= """
<div style="border-left: 2px solid black; border-right: 2px solid black; display: inline-block">
"""
for i in range(m):
for j in range(n):
tmp+= """
<label style="width: 30px; height: 30px; %s text-align: center"> %s </label>""" % (border, "{0:.1f}".format(a[i,j]))# 1 px dotted grey"/>""" % i
tmp+= "<br>\n"
tmp+="""
</div>
"""
tmp+="""
</div>
"""
return tmp
|
StarcoderdataPython
|
9640110
|
<filename>Udacity/utils/utils.py
"Utils for python develoment"
# Author: <NAME>
# Start Date : 20th December 2014
import time
import numpy
def timed_call(func, *args):
"Call function with args, return time and result"
t0 = time.clock()
result = func(*args)
t1 = time.clock()
return t1 - t0, result
def timed_calls(n, func, *args):
" call func n times with args, and print stats \
else run for n seconds "
if isinstance(n, int):
times = [timed_call(func, *args)[0] for _ in range(n)]
else:
times = []
while sum(times) < n:
times.append(timed_call(func, *args)[0])
return max(times), numpy.mean(times), max(times)
|
StarcoderdataPython
|
8005767
|
<reponame>flying-sausages/iommi
from django.urls import path
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.views.decorators.csrf import csrf_exempt
from examples import (
example_adding_decorator,
example_links,
)
from examples.models import (
TBar,
TFoo,
)
from examples.views import ExamplesPage
from iommi import (
Form,
html,
Page,
Table,
)
examples = []
example = example_adding_decorator(examples)
@example(gettext('Standard example'))
class HelloWorldPage(Page):
h1 = html.h1('Hello world!')
p = html.p('This is an iommi page!')
@example(gettext('View with some calculation to do before making the page'))
def page_view_example_2(request):
math_result = 1 + 1
class MathPage(HelloWorldPage):
result = html.pre(format_html("Math result: 1+1={}", math_result))
return MathPage()
@example(gettext('Further specializing an already defined page'))
def page_view_example_3(request):
math_result = 1 + 1
return HelloWorldPage(
parts__result=html.pre(format_html("Math result: 1+1={}", math_result)),
)
@example(gettext('Busy page with different components'))
def page_view_example_4(request):
class BusyPage(Page):
tfoo = Table(auto__model=TFoo, page_size=5, columns__name__filter=dict(include=True, field__include=True))
tbar = Table(auto__model=TBar, page_size=5, columns__b__filter=dict(include=True, field__include=True))
create_tbar = Form.create(auto__model=TBar)
return BusyPage()
class IndexPage(ExamplesPage):
header = html.h1('Page examples')
description = html.p('Some examples of iommi Page')
examples = example_links(examples)
@csrf_exempt
def page_live(request):
return Page(
parts__foo='Test',
parts__circle=mark_safe('<svg><circle cx=50 cy=50 r=40 stroke=green fill=yellow stroke-width=4></svg>'),
parts__bar=Table(auto__model=TFoo, page_size=2),
)
urlpatterns = [
path('', IndexPage().as_view()),
path('example_1/', HelloWorldPage().as_view()),
path('example_2/', page_view_example_2),
path('example_3/', page_view_example_3),
path('example_4/', page_view_example_4),
path('live/', page_live),
]
|
StarcoderdataPython
|
6446718
|
<gh_stars>0
import RPi.GPIO as GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
class Device:
def __init__(self, pin, device_type):
self.pin = pin
self.type = device_type
def on(self):
GPIO.setup(self.pin, GPIO.OUT)
if self.type == "led":
GPIO.output(self.pin, GPIO.HIGH)
else:
GPIO.output(self.pin, GPIO.LOW)
def off(self):
GPIO.setup(self.pin, GPIO.OUT)
if self.type == "led":
GPIO.output(self.pin, GPIO.LOW)
else:
GPIO.output(self.pin, GPIO.HIGH)
|
StarcoderdataPython
|
3346248
|
# encoding: utf-8
# Copyright 1999-2017 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import logging
import sys
from odps.df import DataFrame
from odps.config import options
from odps.ml.utils import TEMP_TABLE_PREFIX
from odps.ml.algolib import *
from odps.ml.algolib.loader import load_classifiers
from odps.ml.tests.base import MLTestBase, tn
logger = logging.getLogger(__name__)
IONOSPHERE_TABLE = tn('pyodps_test_ml_ionosphere')
MODEL_NAME = tn('pyodps_test_out_model')
class TestAlgoBuild(MLTestBase):
def setUp(self):
super(TestAlgoBuild, self).setUp()
self.create_ionosphere(IONOSPHERE_TABLE)
self.register_algorithm()
def tearDown(self):
super(TestAlgoBuild, self).tearDown()
def register_algorithm(self):
algo_def = XflowAlgorithmDef('MyNaiveBayes', project='algo_public', xflow_name='NaiveBayes')
algo_def.add_port(PortDef.build_data_input()).add_port(PortDef.build_model_output())
algo_def.add_param(ParamDef.build_input_table()).add_param(ParamDef.build_input_partitions())
algo_def.add_param(ParamDef.build_model_name())
algo_def.add_param(ParamDef.build_feature_col_names())
algo_def.add_param(ParamDef.build_label_col_name())
load_classifiers(algo_def, sys.modules[__name__])
def test_custom_algo(self):
options.ml.dry_run = True
df = DataFrame(self.odps.get_table(IONOSPHERE_TABLE))
splited = df.split(0.6)
labeled_data = splited[0].label_field("class")
naive_bayes = MyNaiveBayes()
model = naive_bayes.train(labeled_data)._add_case(self.gen_check_params_case(
{'labelColName': 'class', 'featureColNames': ','.join('a%02d' % i for i in range(1, 35)),
'modelName': MODEL_NAME, 'inputTableName': TEMP_TABLE_PREFIX + '_split'}))
model.persist(MODEL_NAME)
predicted = model.predict(splited[1])
predicted.persist(MODEL_NAME)
|
StarcoderdataPython
|
5002436
|
<filename>setup.py
#!/usr/bin/env python
import os
from distutils.core import setup
import pubchempy
if os.path.exists('README.txt'):
long_description = open('README.txt').read()
else:
long_description = open('README.md').read()
setup(
name='PubChemPy',
version=pubchempy.__version__,
author=pubchempy.__author__,
author_email=pubchempy.__email__,
license=pubchempy.__license__,
url='https://github.com/mcs07/PubChemPy',
py_modules=['pubchempy'],
description='A simple Python wrapper around the PubChem PUG REST API.',
long_description=long_description,
keywords='pubchem python rest api pug',
classifiers=[
'Intended Audience :: Science/Research',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
StarcoderdataPython
|
1912181
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.40
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
# This file is compatible with both classic and new-style classes.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_EST_Item', [dirname(__file__)])
except ImportError:
import _EST_Item
return _EST_Item
if fp is not None:
try:
_mod = imp.load_module('_EST_Item', fp, pathname, description)
finally:
fp.close()
return _mod
_EST_Item = swig_import_helper()
del swig_import_helper
else:
import _EST_Item
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
class EST_Item(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, EST_Item, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, EST_Item, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _EST_Item.new_EST_Item(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _EST_Item.delete_EST_Item
__del__ = lambda self : None;
def F(self, *args): return _EST_Item.EST_Item_F(self, *args)
def S(self, *args): return _EST_Item.EST_Item_S(self, *args)
def I(self, *args): return _EST_Item.EST_Item_I(self, *args)
def A(self, *args): return _EST_Item.EST_Item_A(self, *args)
def set(self, *args): return _EST_Item.EST_Item_set(self, *args)
def set_function(self, *args): return _EST_Item.EST_Item_set_function(self, *args)
def f_remove(self, *args): return _EST_Item.EST_Item_f_remove(self, *args)
def evaluate_features(self): return _EST_Item.EST_Item_evaluate_features(self)
def f_present(self, *args): return _EST_Item.EST_Item_f_present(self, *args)
def length(self): return _EST_Item.EST_Item_length(self)
def as_relation(self, *args): return _EST_Item.EST_Item_as_relation(self, *args)
def in_relation(self, *args): return _EST_Item.EST_Item_in_relation(self, *args)
def relation_name(self): return _EST_Item.EST_Item_relation_name(self)
def relation(self): return _EST_Item.EST_Item_relation(self)
def same_item(self, *args): return _EST_Item.EST_Item_same_item(self, *args)
def unref_all(self): return _EST_Item.EST_Item_unref_all(self)
def prepend_daughter(self, *args): return _EST_Item.EST_Item_prepend_daughter(self, *args)
def append_daughter(self, *args): return _EST_Item.EST_Item_append_daughter(self, *args)
def daughter1(self): return _EST_Item.EST_Item_daughter1(self)
def daughtern(self): return _EST_Item.EST_Item_daughtern(self)
def next_sibling(self): return _EST_Item.EST_Item_next_sibling(self)
def prev_sibling(self): return _EST_Item.EST_Item_prev_sibling(self)
def parent(self): return _EST_Item.EST_Item_parent(self)
def features(self): return _EST_Item.EST_Item_features(self)
def leafs(self): return _EST_Item.EST_Item_leafs(self)
def daughters(self): return _EST_Item.EST_Item_daughters(self)
EST_Item_swigregister = _EST_Item.EST_Item_swigregister
EST_Item_swigregister(EST_Item)
def next_item(*args):
return _EST_Item.next_item(*args)
next_item = _EST_Item.next_item
def first_leaf(*args):
return _EST_Item.first_leaf(*args)
first_leaf = _EST_Item.first_leaf
def last_leaf(*args):
return _EST_Item.last_leaf(*args)
last_leaf = _EST_Item.last_leaf
def next_leaf(*args):
return _EST_Item.next_leaf(*args)
next_leaf = _EST_Item.next_leaf
def num_leaves(*args):
return _EST_Item.num_leaves(*args)
num_leaves = _EST_Item.num_leaves
def remove_item(*args):
return _EST_Item.remove_item(*args)
remove_item = _EST_Item.remove_item
def copy_node_tree(*args):
return _EST_Item.copy_node_tree(*args)
copy_node_tree = _EST_Item.copy_node_tree
def copy_node_tree_contents(*args):
return _EST_Item.copy_node_tree_contents(*args)
copy_node_tree_contents = _EST_Item.copy_node_tree_contents
def item_jump(*args):
return _EST_Item.item_jump(*args)
item_jump = _EST_Item.item_jump
|
StarcoderdataPython
|
3442409
|
import torch
import torch.nn as nn
# model
class net_PixelShuffle(nn.Module):
def __init__(self, upscale_factor):
super().__init__()
self.op = torch.nn.PixelShuffle(upscale_factor)
def forward(self, input):
return self.op(input)
_model_ = net_PixelShuffle(2)
# dummy input for onnx generation
_dummy_ = torch.randn(1, 8, 3, 3)
|
StarcoderdataPython
|
9634994
|
import cv2
import numpy as np
from domestik import px
skala = px.jendela.skala
h, w = 90, 160
judul = 'sembarang'
skala(judul, [900,1600])
warna = np.random.randint(256, size=3)
dasar = np.zeros([h,w, 3]) + warna
dasar = dasar.astype(np.uint8)
cv2.imshow(judul, dasar)
cv2.displayStatusBar(judul, str(warna))
cv2.waitKey()
cv2.destroyAllWindows()
|
StarcoderdataPython
|
12807948
|
<reponame>LightArrowsEXE/Encoding-Projects
from typing import Tuple, Union
import vapoursynth as vs
from lvsfunc.misc import source
from vardautomation import FileInfo, PresetBD, PresetFLAC, VPath
from project_module import encoder as enc
from project_module import flt
core = vs.core
# Sources
JP_BD = FileInfo(r'BDMV/DISC3/BDMV/STREAM/00001.m2ts', (24, -24),
idx=lambda x: source(x, cachedir=''), preset=[PresetBD, PresetFLAC])
JP_BD.name_file_final = VPath(fr"premux/{JP_BD.name} (Premux).mkv")
JP_BD.a_src_cut = VPath(JP_BD.name)
JP_BD.do_qpfile = True
def filterchain() -> Union[vs.VideoNode, Tuple[vs.VideoNode, ...]]:
"""Main filterchain"""
import havsfunc as haf
import lvsfunc as lvf
import vardefunc as vdf
from adptvgrnMod import adptvgrnMod
from ccd import ccd
from vsutil import depth
src = JP_BD.clip_cut
src = depth(src, 16)
scaled, descale_mask = flt.rescaler(src, height=855)
denoise_y = core.knlm.KNLMeansCL(scaled, d=2, a=3, h=0.3)
denoise_uv = ccd(denoise_y, threshold=7, matrix='709')
stab = haf.GSMC(denoise_uv, radius=1, thSAD=200, planes=[0])
decs = vdf.noise.decsiz(stab, sigmaS=8, min_in=200 << 8, max_in=232 << 8)
aa_weak = lvf.aa.nneedi3_clamp(decs, strength=4)
aa_strong = lvf.sraa(decs, rfactor=1.6)
aa_clamp = lvf.aa.clamp_aa(decs, aa_weak, aa_strong, strength=2)
halo_mask = lvf.mask.halo_mask(aa_clamp)
darken = flt.line_darkening(aa_clamp, strength=0.35)
dehalo = core.std.MaskedMerge(darken, lvf.dehalo.bidehalo(darken, sigmaS_final=1.2, sigmaR=11/255), halo_mask)
merged_credits = core.std.MaskedMerge(dehalo, src, descale_mask)
deband = flt.masked_f3kdb(merged_credits, rad=21, thr=[28, 24], grain=[32, 16])
grain: vs.VideoNode = adptvgrnMod(deband, seed=42069, strength=0.25, luma_scaling=10,
size=1.35, sharp=80, grain_chroma=False)
return grain
if __name__ == '__main__':
FILTERED = filterchain()
enc.Encoder(JP_BD, FILTERED).run(clean_up=True, make_comp=True) # type: ignore
elif __name__ == '__vapoursynth__':
FILTERED = filterchain()
if not isinstance(FILTERED, vs.VideoNode):
raise ImportError(
f"Input clip has multiple output nodes ({len(FILTERED)})! Please output just 1 clip"
)
else:
enc.dither_down(FILTERED).set_output(0)
else:
JP_BD.clip_cut.std.SetFrameProp('node', intval=0).set_output(0)
FILTERED = filterchain()
if not isinstance(FILTERED, vs.VideoNode):
for i, clip_filtered in enumerate(FILTERED, start=1):
clip_filtered.std.SetFrameProp('node', intval=i).set_output(i)
else:
FILTERED.std.SetFrameProp('node', intval=1).set_output(1)
|
StarcoderdataPython
|
11321251
|
<gh_stars>100-1000
import wave
import pyaudio
class RecordAudio:
def __init__(self):
# 录音参数
self.chunk = 1024
self.format = pyaudio.paInt16
self.channels = 1
self.rate = 16000
# 打开录音
self.p = pyaudio.PyAudio()
self.stream = self.p.open(format=self.format,
channels=self.channels,
rate=self.rate,
input=True,
frames_per_buffer=self.chunk)
def record(self, output_path="audio/temp.wav", record_seconds=3):
"""
录音
:param output_path: 录音保存的路径,后缀名为wav
:param record_seconds: 录音时间,默认3秒
:return: 录音的文件路径
"""
i = input("按下回车键开机录音,录音3秒中:")
print("开始录音......")
frames = []
for i in range(0, int(self.rate / self.chunk * record_seconds)):
data = self.stream.read(self.chunk)
frames.append(data)
print("录音已结束!")
wf = wave.open(output_path, 'wb')
wf.setnchannels(self.channels)
wf.setsampwidth(self.p.get_sample_size(self.format))
wf.setframerate(self.rate)
wf.writeframes(b''.join(frames))
wf.close()
return output_path
|
StarcoderdataPython
|
5013878
|
from datetime import datetime, timedelta
class ProgressLogger:
prev_print_at = datetime.now()
def log(self, prefix, count, total=None):
if self.prev_print_at > datetime.now() - timedelta(seconds=1):
return
print(
"{}... {:03.2f}{}".format(
prefix,
count * 100 / total if total else count,
'%' if total else ''
), end='\r'
)
self.prev_print_at = datetime.now()
|
StarcoderdataPython
|
5087920
|
# coding=utf-8
# Copyright 2019 Deepmind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single file script for doing a quick evaluation of a model.
This script is called by run.sh.
Usage:
user@host:/path/to/deepmind_research$ unsupervised_adversarial_training/run.sh
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from absl import app
from absl import flags
import cleverhans
from cleverhans import attacks
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import math_grad
import tensorflow_hub as hub
UAT_HUB_URL = ('https://tfhub.dev/deepmind/unsupervised-adversarial-training/'
'cifar10/wrn_106/1')
FLAGS = flags.FLAGS
flags.DEFINE_enum('attack_fn_name', 'fgsm', ['fgsm', 'none'],
'Name of the attack method to use.')
flags.DEFINE_float('epsilon_attack', 8.0 / 255,
'Maximum allowable perturbation size, between 0 and 1.')
flags.DEFINE_integer('num_steps', 20, 'Number of attack iterations.')
flags.DEFINE_integer('num_batches', 100, 'Number of batches to evaluate.')
flags.DEFINE_integer('batch_size', 32, 'Batch size.')
flags.DEFINE_integer('skip_batches', 0,
'Controls index of start image. This can be used to '
'evaluate the model on different subsets of the test set.')
flags.DEFINE_float('learning_rate', 0.003, 'Attack optimizer learning rate.')
def _top_1_accuracy(logits, labels):
return tf.reduce_mean(tf.cast(tf.nn.in_top_k(logits, labels, 1), tf.float32))
def make_classifier():
model = hub.Module(UAT_HUB_URL)
def classifier(x):
x = _cifar_meanstd_normalize(x)
model_input = dict(x=x, decay_rate=0.1, prefix='default')
return model(model_input)
return classifier
def eval_cifar():
"""Evaluate an adversarially trained model."""
attack_fn_name = FLAGS.attack_fn_name
total_batches = FLAGS.num_batches
batch_size = FLAGS.batch_size
# Note that a `classifier` is a function mapping [0,1]-scaled image Tensors
# to a logit Tensor. In particular, it includes *both* the preprocessing
# function, and the neural network.
classifier = make_classifier()
cleverhans_model = cleverhans.model.CallableModelWrapper(classifier, 'logits')
_, data_test = tf.keras.datasets.cifar10.load_data()
data = _build_dataset(data_test, batch_size=batch_size, shuffle=False)
# Necessary for backwards-compatibility
# Earlier versions of TF don't have a registered gradient for the AddV2 op
tf.RegisterGradient('AddV2')(math_grad._AddGrad) # pylint: disable=protected-access
# Generate adversarial images.
if attack_fn_name == 'fgsm':
attack = attacks.MadryEtAl(cleverhans_model)
num_cifar_classes = 10
adv_x = attack.generate(data.image,
eps=FLAGS.epsilon_attack,
eps_iter=FLAGS.learning_rate,
nb_iter=FLAGS.num_steps,
y=tf.one_hot(data.label, depth=num_cifar_classes))
elif attack_fn_name == 'none':
adv_x = data.image
logits = classifier(adv_x)
probs = tf.nn.softmax(logits)
adv_acc = _top_1_accuracy(logits, data.label)
with tf.train.SingularMonitoredSession() as sess:
total_acc = 0.
for _ in range(FLAGS.skip_batches):
sess.run(data.image)
for _ in range(total_batches):
_, _, adv_acc_val = sess.run([probs, data.label, adv_acc])
total_acc += adv_acc_val
print('Batch accuracy: {}'.format(adv_acc_val))
print('Total accuracy against {}: {}'.format(
FLAGS.attack_fn_name, total_acc / total_batches))
########## Utilities ##########
# Defines a dataset sample."""
Sample = collections.namedtuple('Sample', ['image', 'label'])
def _build_dataset(raw_data, batch_size=32, shuffle=False):
"""Builds a dataset from raw NumPy tensors.
Args:
raw_data: Pair (images, labels) of numpy arrays. `images` should have shape
(N, H, W, C) with values in [0, 255], and `labels` should have shape
(N,) or (N, 1) indicating class indices.
batch_size: int, batch size
shuffle: bool, whether to shuffle the data (default: True).
Returns:
(image_tensor, label_tensor), which iterate over the dataset, which are
(batch_size, H, W, C) tf.float32 and (batch_size,) tf.int32 Tensors
respectively
"""
images, labels = raw_data
labels = np.squeeze(labels)
samples = Sample(images.astype(np.float32) / 255., labels.astype(np.int64))
data = tf.data.Dataset.from_tensor_slices(samples)
if shuffle:
data = data.shuffle(1000)
return data.repeat().batch(batch_size).make_one_shot_iterator().get_next()
def _cifar_meanstd_normalize(image):
"""Mean + stddev whitening for CIFAR-10 used in ResNets.
Args:
image: Numpy array or TF Tensor, with values in [0, 255]
Returns:
image: Numpy array or TF Tensor, shifted and scaled by mean/stdev on
CIFAR-10 dataset.
"""
# Channel-wise means and std devs calculated from the CIFAR-10 training set
cifar_means = [125.3, 123.0, 113.9]
cifar_devs = [63.0, 62.1, 66.7]
rescaled_means = [x / 255. for x in cifar_means]
rescaled_devs = [x / 255. for x in cifar_devs]
image = (image - rescaled_means) / rescaled_devs
return image
def main(unused_argv):
eval_cifar()
if __name__ == '__main__':
app.run(main)
|
StarcoderdataPython
|
1958602
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
main
~~~~~
the purpose of this code is to solve the knapsack problem.
the code will read the input (the items in the knapsack) and will hopefully
solve the problem (find the best fit to the restrictions) by using genetic algorithm
"""
import logging
import click
import pytest
from src.population import Population
import config.conf_file as cnf
@click.command()
@click.option('--population_size', type=click.IntRange(cnf.MIN_POP_SIZE, cnf.MAX_POP_SIZE, clamp=True),
prompt="Please choose population size between {} to {}".format(cnf.MIN_POP_SIZE, cnf.MAX_POP_SIZE),
default='20')
@click.option('--max_weight', type=click.IntRange(cnf.MIN_MAX_WEIGHT, cnf.MAX_MAX_WEIGHT, clamp=True),
prompt="Please choose maximum weight between {} to {}".format(
cnf.MIN_MAX_WEIGHT, cnf.MAX_MAX_WEIGHT),
default='15')
@click.option('--num_iterations', type=click.IntRange(cnf.MIN_ITERATIONS, cnf.MAX_ITERATIONS, clamp=True),
prompt="Please choose number of iterations between {} to {}".format(
cnf.MIN_ITERATIONS, cnf.MAX_ITERATIONS), default='15')
def main(population_size, max_weight, num_iterations):
"""
starts logging and insert data to population to initiate the genetic algorithm
:param population_size: int, how many items are allowed in the knapsack
:param max_weight: int, what is the maximum weight allowed in the knapsack
:param num_iterations: how many iterations can the genetic algorithm perform
:return:
"""
pytest.main()
configure_logging()
logger = logging.getLogger(__name__)
logger.info('## Started ##')
try:
pop = Population(population_size, cnf.KNAPSACK_FILE_NAME, max_weight, logger)
pop.circle_of_life(num_iterations)
except IOError:
logger.error('Failed to open file', exc_info=True)
logger.info('## Finished ##')
return
def configure_logging():
"""
initialize the logging process
:return:
"""
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create a file handler
handler = logging.FileHandler(cnf.LOG_FILE_NAME)
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s: %(name)s: %(levelname)s: %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3554085
|
<reponame>Andrei-Dolgolev/mirror<filename>tests/test_populate.py
"""
Tests for mirror CLI
"""
import argparse
import unittest
import mirror.cli
class TestGenerateMirrorCLI(unittest.TestCase):
def setUp(self):
self.subcommand = 'test-subcommand'
def subcommand_populator(parser: argparse.ArgumentParser) -> None:
parser.add_argument('positional_arg')
parser.add_argument('--opt-a', '-a', required=True)
parser.add_argument('--opt-b', '-b', required=False)
parser.add_argument('--opt-c', '-c', action='store_true')
self.subcommand_populators = {
self.subcommand: subcommand_populator,
}
def test_generate_mirror_cli_1(self):
parser = argparse.ArgumentParser()
mirror.cli.populate_cli(parser, self.subcommand_populators)
args = parser.parse_args([self.subcommand, '-a', 'lol', 'rofl'])
self.assertEqual(args.opt_a, 'lol')
self.assertIsNone(args.opt_b)
self.assertFalse(args.opt_c)
self.assertEqual(args.positional_arg, 'rofl')
def test_generate_mirror_cli_2(self):
parser = argparse.ArgumentParser()
mirror.cli.populate_cli(parser, self.subcommand_populators)
with self.assertRaises(SystemExit):
args = parser.parse_args([self.subcommand, 'rofl'])
def test_generate_mirror_cli_3(self):
parser = argparse.ArgumentParser()
mirror.cli.populate_cli(parser, self.subcommand_populators)
with self.assertRaises(SystemExit):
args = parser.parse_args([self.subcommand, '--opt-a', 'lol'])
def test_generate_mirror_cli_4(self):
parser = argparse.ArgumentParser()
mirror.cli.populate_cli(parser, self.subcommand_populators)
args = parser.parse_args([self.subcommand, '--opt-a', 'lol', 'rofl', '--opt-c'])
self.assertEqual(args.opt_a, 'lol')
self.assertIsNone(args.opt_b)
self.assertTrue(args.opt_c)
self.assertEqual(args.positional_arg, 'rofl')
|
StarcoderdataPython
|
1886465
|
<reponame>forensic-security/cybereason<gh_stars>1-10
# patch to cope with a bug in /rest/file-search/fetch-direct that
# returns two {'file-encoding': 'chunked'} headers
import re
from h11._abnf import field_name, field_value
from h11._util import bytesify, LocalProtocolError, validate
from h11._headers import Headers
_content_length_re = re.compile(br'[0-9]+')
_field_name_re = re.compile(field_name.encode('ascii'))
_field_value_re = re.compile(field_value.encode('ascii'))
def normalize_and_validate(headers, _parsed: bool=False):
new_headers = []
seen_content_length = None
saw_transfer_encoding = False
for name, value in headers:
# For headers coming out of the parser, we can safely skip some steps,
# because it always returns bytes and has already run these regexes
# over the data:
if not _parsed:
name = bytesify(name)
value = bytesify(value)
validate(_field_name_re, name, 'Illegal header name {!r}', name)
validate(_field_value_re, value, 'Illegal header value {!r}', value)
if not isinstance(name, bytes):
raise TypeError(f'Header name must be bytes, not {type(name)}')
if not isinstance(name, bytes):
raise TypeError(f'Header value must be bytes, not {type(name)}')
raw_name = name
name = name.lower()
if name == b'content-length':
lengths = {length.strip() for length in value.split(b',')}
if len(lengths) != 1:
raise LocalProtocolError('conflicting Content-Length headers')
value = lengths.pop()
validate(_content_length_re, value, 'bad Content-Length')
if seen_content_length is None:
seen_content_length = value
new_headers.append((raw_name, name, value))
elif seen_content_length != value:
raise LocalProtocolError('conflicting Content-Length headers')
elif name == b'transfer-encoding':
# "A server that receives a request message with a transfer coding
# it does not understand SHOULD respond with 501 (Not Implemented)."
# https://tools.ietf.org/html/rfc7230#section-3.3.1
if saw_transfer_encoding:
if saw_transfer_encoding == value:
continue
raise LocalProtocolError(
'multiple Transfer-Encoding headers', error_status_hint=501
)
# "All transfer-coding names are case-insensitive"
# -- https://tools.ietf.org/html/rfc7230#section-4
value = value.lower()
if value != b'chunked':
raise LocalProtocolError(
'Only Transfer-Encoding: chunked is supported',
error_status_hint=501,
)
saw_transfer_encoding = value
new_headers.append((raw_name, name, value))
else:
new_headers.append((raw_name, name, value))
return Headers(new_headers)
|
StarcoderdataPython
|
3468930
|
import finder_in_files
from colorama import Fore # pip install colorama
class Test:
def __init__(self):
self.settings_files = {
'txt': True,
'log': True,
'html': True,
'css': True,
'cpp': True,
'h': True,
'py': True,
'c': True,
'doc': True,
'docx': True,
'rtf': True,
'odt': True,
'pdf': True
}
def settings(self): # Configuring which extensions are used
for exp in self.settings_files: # Output to the configuration console.
if self.settings_files[exp]:
print("\t{0:9} | ON".format(Fore.GREEN + exp))
else:
print("\t{0:9} | OFF".format(Fore.RED + exp))
print(Fore.WHITE)
while True: # Change configuration
act = input('\tEnter "back" or "quit" to quit\nThe extension to be turned off / on: ').lower()
if act in self.settings_files:
if self.settings_files[act]:
self.settings_files[act] = False
else:
self.settings_files[act] = True
return
elif act == "back" or act == "quit":
return
else:
print(Fore.RED + '\nError! You entered the wrong file extension\n' + Fore.WHITE)
def directory(self): # To find out the directory
while True:
catalog_name = input('Enter "back" to exit\n\tDirectory path (C:/program): ').lower()
if catalog_name == "back" or catalog_name == "quit":
return
find_str = input("\nText to find: ").lower()
try:
results = finder_in_files.search(catalog_name, find_str, self.settings_files)
if not results:
print('\nThe text was not found. ;(\n')
else:
for name_file, numbers_str, numbers_repeat in results:
print(f"\nPath to the file: {name_file}")
print(f'Content lines: {str(numbers_str).replace(",", " |")}')
print(f"String repetitions: {numbers_repeat}\n")
except FileNotFoundError:
print(Fore.RED + '\nError! You entered an invalid file path.\n' + Fore.WHITE)
def run(self): # Main function
print('Press CTRL + C to quit.\n')
while True:
print("\nTo enter:\n[1] - Start")
print("[2] - Setting File extensions")
inlet = input("Enter: ").lower()
if inlet == "1":
self.directory()
elif inlet == "2":
self.settings()
else:
print(Fore.RED + "Error! You entered the wrong command." + Fore.WHITE)
if __name__ == "__main__": # Start!
program = Test()
program.run()
|
StarcoderdataPython
|
4838857
|
from argparse import ArgumentParser
import shutil
from tensorboard.backend.event_processing.event_file_inspector import get_inspection_units, print_dict, get_dict_to_print
"""
Deletes all folders with small tensorboard run files
"""
parser = ArgumentParser('delete small runs')
parser.add_argument('--logdir', type=str, default='.')
parser.add_argument('--delete_smaller_than', type=int)
args = parser.parse_args()
run_len = {}
inspect_units = get_inspection_units(logdir=args.logdir)
for run in inspect_units:
path = run[0]
max_length = 0
for key, value in get_dict_to_print(run.field_to_obs).items():
if value is not None:
length = value['max_step']
if max_length < length:
max_length = length
run_len[path] = max_length
for run, length in run_len.items():
if args.delete_smaller_than is None:
print(f'run:{run} length:{length}')
else:
if length < args.delete_smaller_than:
try:
print(f'{run} is {length} and was deleted')
shutil.rmtree(run)
except:
print(f"OS didn't let us delete {run}")
|
StarcoderdataPython
|
7955
|
<filename>openfermioncirq/variational/ansatzes/swap_network_trotter_hubbard_test.py
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from openfermioncirq.variational.ansatzes import SwapNetworkTrotterHubbardAnsatz
def test_swap_network_trotter_hubbard_ansatz_param_bounds():
ansatz = SwapNetworkTrotterHubbardAnsatz(3, 1, 1.0, 4.0, periodic=False)
assert list(symbol.name for symbol in ansatz.params()) == [
'Th_0', 'V_0',]
assert ansatz.param_bounds() == [
(-2.0, 2.0), (-1.0, 1.0)]
ansatz = SwapNetworkTrotterHubbardAnsatz(1, 4, 1.0, 4.0, periodic=False)
assert list(symbol.name for symbol in ansatz.params()) == [
'Tv_0', 'V_0',]
assert ansatz.param_bounds() == [
(-2.0, 2.0), (-1.0, 1.0)]
ansatz = SwapNetworkTrotterHubbardAnsatz(3, 2, 1.0, 4.0)
assert list(symbol.name for symbol in ansatz.params()) == [
'Th_0', 'Tv_0', 'V_0',]
assert ansatz.param_bounds() == [
(-2.0, 2.0), (-2.0, 2.0), (-1.0, 1.0)]
|
StarcoderdataPython
|
3566916
|
<gh_stars>0
#!/usr/bin/env python
import warnings
import os
import pickle
import datetime
import logging
from functools import partial
from multiprocessing import Pool
import jinja2
from tqdm import tqdm
import tables
import numpy as np
from astropy import table
from astropy import time, units as u
from mica.starcheck import get_starcheck_catalog
from agasc.supplement.magnitudes import star_obs_catalogs, mag_estimate, mag_estimate_report as msr
from agasc.supplement.utils import save_version, MAGS_DTYPE
from cxotime import CxoTime
logger = logging.getLogger('agasc.supplement')
def level0_archive_time_range():
"""
Return the time range covered by mica archive aca_l0 files.
:return: tuple of CxoTime
"""
import sqlite3
import os
db_file = os.path.expandvars('$SKA/data/mica/archive/aca0/archfiles.db3')
with sqlite3.connect(db_file) as connection:
cursor = connection.cursor()
cursor.execute("select tstop from archfiles order by tstop desc limit 1")
t_stop = cursor.fetchall()[0][0]
cursor.execute("select tstop from archfiles order by tstart asc limit 1")
t_start = cursor.fetchall()[0][0]
return CxoTime(t_stop).date, CxoTime(t_start).date
def get_agasc_id_stats(agasc_ids, obs_status_override={}, tstop=None, no_progress=None):
"""
Call mag_stats.get_agasc_id_stats for each AGASC ID
:param agasc_ids: list
:param obs_status_override: dict.
Dictionary overriding the OK flag for specific observations.
Keys are (OBSID, AGASC ID) pairs, values are dictionaries like
{'obs_ok': True, 'comments': 'some comment'}
:param tstop: cxotime-compatible timestamp
Only observations prior to this timestamp are considered.
:return: astropy.table.Table, astropy.table.Table, list
obs_stats, agasc_stats, fails
"""
from agasc.supplement.magnitudes import mag_estimate
from astropy.table import Table, vstack
fails = []
obs_stats = []
agasc_stats = []
bar = tqdm(agasc_ids, desc='progress', disable=no_progress, unit='star')
for agasc_id in agasc_ids:
bar.update()
try:
logger.debug('-' * 80)
logger.debug(f'{agasc_id=}')
agasc_stat, obs_stat, obs_fail = \
mag_estimate.get_agasc_id_stats(agasc_id=agasc_id,
obs_status_override=obs_status_override,
tstop=tstop)
agasc_stats.append(agasc_stat)
obs_stats.append(obs_stat)
fails += obs_fail
except mag_estimate.MagStatsException as e:
msg = str(e)
logger.debug(msg)
fails.append(dict(e))
except Exception as e:
# transform Exception to MagStatsException for standard book keeping
msg = f'Unexpected Error: {e}'
logger.debug(msg)
fails.append(dict(mag_estimate.MagStatsException(agasc_id=agasc_id, msg=msg)))
bar.close()
logger.debug('-' * 80)
try:
agasc_stats = Table(agasc_stats) if agasc_stats else None
obs_stats = vstack(obs_stats) if obs_stats else None
except Exception as e:
agasc_stats = None
obs_stats = None
# transform Exception to MagStatsException for standard book keeping
fails.append(dict(mag_estimate.MagStatsException(
msg=f'Exception at end of get_agasc_id_stats: {str(e)}')))
return obs_stats, agasc_stats, fails
def get_agasc_id_stats_pool(agasc_ids, obs_status_override=None, batch_size=100, tstop=None,
no_progress=None):
"""
Call update_mag_stats.get_agasc_id_stats multiple times using a multiprocessing.Pool
:param agasc_ids: list
:param obs_status_override: dict.
Dictionary overriding the OK flag for specific observations.
Keys are (OBSID, AGASC ID) pairs, values are dictionaries like
{'obs_ok': True, 'comments': 'some comment'}
:param batch_size: int
:param tstop: cxotime-compatible timestamp
Only observations prior to this timestamp are considered.
:return: astropy.table.Table, astropy.table.Table, list
obs_stats, agasc_stats, fails, failed_jobs
"""
import time
from astropy.table import vstack, Table
if obs_status_override is None:
obs_status_override = {}
jobs = []
args = []
finished = 0
logger.info(f'Processing {batch_size} stars per job')
for i in range(0, len(agasc_ids), batch_size):
args.append(agasc_ids[i:i + batch_size])
with Pool() as pool:
for arg in args:
jobs.append(pool.apply_async(get_agasc_id_stats,
[arg, obs_status_override, tstop, True]))
bar = tqdm(total=len(jobs), desc='progress', disable=no_progress, unit='job')
while finished < len(jobs):
finished = sum([f.ready() for f in jobs])
if finished - bar.n:
bar.update(finished - bar.n)
time.sleep(1)
bar.close()
fails = []
for arg, job in zip(args, jobs):
if job.successful():
continue
try:
job.get()
except Exception as e:
for agasc_id in arg:
fails.append(dict(
mag_estimate.MagStatsException(agasc_id=agasc_id, msg=f'Failed job: {e}')
))
results = [job.get() for job in jobs if job.successful()]
obs_stats = [r[0] for r in results if r[0] is not None]
agasc_stats = [r[1] for r in results if r[1] is not None]
obs_stats = vstack(obs_stats) if obs_stats else Table()
agasc_stats = vstack(agasc_stats) if agasc_stats else Table()
fails += sum([r[2] for r in results], [])
return obs_stats, agasc_stats, fails
def _update_table(table_old, table_new, keys):
# checking names, because actual types change upon saving in fits format
assert table_old.as_array().dtype.names == table_new.as_array().dtype.names, \
'Tables have different dtype'
table_old = table_old.copy()
new_row = np.ones(len(table_new), dtype=bool)
_, i_new, i_old = np.intersect1d(table_new[keys].as_array(),
table_old[keys].as_array(),
return_indices=True)
new_row[i_new] = False
table_old[i_old] = table_new[i_new]
return table.vstack([table_old, table_new[new_row]])
def update_mag_stats(obs_stats, agasc_stats, fails, outdir='.'):
"""
Update the mag_stats catalog.
I currently save three files:
- mag_stats_agasc.fits with stats for each AGASC ID
- mag_stats_obsid.fits with stats for each OBSID
- mag_stats_fails.pkl with a list of failures
:param obs_stats:
:param agasc_stats:
:param fails:
:param outdir:
:return:
"""
if agasc_stats is not None and len(agasc_stats):
filename = outdir / 'mag_stats_agasc.fits'
logger.debug(f'Updating {filename}')
if filename.exists():
agasc_stats = _update_table(table.Table.read(filename), agasc_stats,
keys=['agasc_id'])
os.remove(filename)
for column in agasc_stats.colnames:
if column in mag_estimate.AGASC_ID_STATS_INFO:
agasc_stats[column].description = mag_estimate.AGASC_ID_STATS_INFO[column]
agasc_stats.write(filename)
if obs_stats is not None and len(obs_stats):
filename = outdir / 'mag_stats_obsid.fits'
logger.debug(f'Updating {filename}')
if filename.exists():
obs_stats = _update_table(table.Table.read(filename), obs_stats,
keys=['agasc_id', 'obsid', 'timeline_id'])
os.remove(filename)
for column in obs_stats.colnames:
if column in mag_estimate.OBS_STATS_INFO:
obs_stats[column].description = mag_estimate.OBS_STATS_INFO[column]
obs_stats.write(filename)
if len(fails):
filename = outdir / 'mag_stats_fails.pkl'
logger.debug(f'Updating {filename}')
with open(filename, 'wb') as out:
pickle.dump(fails, out)
def update_supplement(agasc_stats, filename, include_all=True):
"""
Update the magnitude table of the AGASC supplement.
:param agasc_stats:
:param filename:
:param include_all: bool
if True, all OK entries are included in supplement.
if False, only OK entries marked 'selected_*'
:return:
"""
if len(agasc_stats) == 0:
return [], []
if include_all:
outliers_new = agasc_stats[
(agasc_stats['n_obsids_ok'] > 0)
]
else:
outliers_new = agasc_stats[
(agasc_stats['n_obsids_ok'] > 0)
& (agasc_stats['selected_atol']
| agasc_stats['selected_rtol']
| agasc_stats['selected_color']
| agasc_stats['selected_mag_aca_err'])
]
outliers_new['mag_aca'] = outliers_new['mag_obs']
outliers_new['mag_aca_err'] = outliers_new['mag_obs_err']
outliers_new = outliers_new[MAGS_DTYPE.names].as_array()
if outliers_new.dtype != MAGS_DTYPE:
outliers_new = outliers_new.astype(MAGS_DTYPE)
outliers = None
new_stars = None
updated_stars = None
if filename.exists():
# I could do what follows directly in place, but the table is not that large.
with tables.File(filename, 'r') as h5:
if 'mags' in h5.root:
outliers_current = h5.root.mags[:]
# find the indices of agasc_ids in both current and new lists
_, i_new, i_cur = np.intersect1d(outliers_new['agasc_id'],
outliers_current['agasc_id'],
return_indices=True)
current = outliers_current[i_cur]
new = outliers_new[i_new]
# from those, find the ones which differ in last observation time
i_cur = i_cur[current['last_obs_time'] != new['last_obs_time']]
i_new = i_new[current['last_obs_time'] != new['last_obs_time']]
# overwrite current values with new values (and calculate diff to return)
updated_stars = np.zeros(len(outliers_new[i_new]),
dtype=MAGS_DTYPE)
updated_stars['mag_aca'] = (outliers_new[i_new]['mag_aca']
- outliers_current[i_cur]['mag_aca'])
updated_stars['mag_aca_err'] = (outliers_new[i_new]['mag_aca_err']
- outliers_current[i_cur]['mag_aca_err'])
updated_stars['agasc_id'] = outliers_new[i_new]['agasc_id']
outliers_current[i_cur] = outliers_new[i_new]
# find agasc_ids in new list but not in current list
new_stars = ~np.in1d(outliers_new['agasc_id'], outliers_current['agasc_id'])
# and add them to the current list
outliers_current = np.concatenate([outliers_current, outliers_new[new_stars]])
outliers = np.sort(outliers_current)
new_stars = outliers_new[new_stars]['agasc_id']
if outliers is None:
logger.warning('Creating new "mags" table')
outliers = outliers_new
new_stars = outliers_new['agasc_id']
updated_stars = np.array([], dtype=MAGS_DTYPE)
mode = 'r+' if filename.exists() else 'w'
with tables.File(filename, mode) as h5:
if 'mags' in h5.root:
h5.remove_node('/mags')
h5.create_table('/', 'mags', outliers)
save_version(filename, 'mags')
return new_stars, updated_stars
def write_obs_status_yaml(obs_stats=None, fails=(), filename=None):
obs = []
if obs_stats and len(obs_stats):
obs_stats = obs_stats[~obs_stats['obs_ok']]
mp_starcat_times = np.unique(obs_stats['mp_starcat_time'])
for mp_starcat_time in mp_starcat_times:
rows = obs_stats[obs_stats['mp_starcat_time'] == mp_starcat_time]
rows.sort(keys='agasc_id')
obs.append({
'mp_starcat_time': mp_starcat_time,
'obsid': obs_stats['obsid'],
'agasc_id': list(rows['agasc_id']),
'status': 1,
'comments': obs_stats['comment']
})
for fail in fails:
if fail['agasc_id'] is None or fail['mp_starcat_time'] is None:
continue
mp_starcat_times = fail['mp_starcat_time'] if type(fail['mp_starcat_time']) is list \
else [fail['mp_starcat_time']]
agasc_id = fail['agasc_id']
for mp_starcat_time in mp_starcat_times:
obs.append({
'mp_starcat_time': mp_starcat_time,
'obsid': fail['obsid'],
'agasc_id': [agasc_id],
'status': 1,
'comments': fail['msg']
})
if len(obs) == 0:
if filename and filename.exists():
filename.unlink()
return
agasc_ids = []
for o in obs:
cat = get_starcheck_catalog(o['obsid'])
if cat:
cat = cat['cat']
maxmags = dict(zip(cat['id'], cat['maxmag']))
agasc_ids += [(agasc_id, maxmags.get(agasc_id, -1)) for agasc_id in o['agasc_id']]
else:
agasc_ids += [(agasc_id, -1) for agasc_id in obs['agasc_id']]
agasc_ids = dict(sorted(agasc_ids))
yaml_template = """bad:
{%- for agasc_id, maxmag in agasc_ids.items() %}
{{ agasc_id }}: 0
{%- endfor %}
mags:
{%- for agasc_id, maxmag in agasc_ids.items() %}
- agasc_id: {{ agasc_id }}
mag_aca: {{ maxmag }}
mag_aca_err: 0.1
{%- endfor %}
obs:
{%- for obs in observations %}
- mp_starcat_time: {{ obs.mp_starcat_time }}
obsid: {{ obs.obsid }}
status: {{ obs.status }}
agasc_id: [{% for agasc_id in obs.agasc_id -%}
{{ agasc_id }}{%- if not loop.last %}, {% endif -%}
{%- endfor -%}]
comments: {{ obs.comments }}
{%- endfor %}
"""
tpl = jinja2.Template(yaml_template)
result = tpl.render(observations=obs, agasc_ids=agasc_ids)
if filename:
with open(filename, 'w') as fh:
fh.write(result)
return result
def do(start,
stop,
output_dir,
agasc_ids=None,
report=False,
reports_dir=None,
report_date=None,
multi_process=False,
include_bad=False,
dry_run=False,
no_progress=None,
email='',
):
"""
:param start: cxotime.CxoTime
Start time. Only stars with at least one observation between start/stop are considered.
:param stop: cxotime.CxoTime
Stop time. Only stars with at least one observation between start/stop are considered.
:param output_dir: pathlib.Path
Directory where to place all output.
:param agasc_ids: list
List of AGASC IDs. Otional. If not given, all observations within start/stop are used.
:param report: bool
Generate an HTML report.
:param reports_dir: pathlib.Path
Directory where to write reports.
:param report_date: cxotime.CxoTime
The report date (report_date.date[:8] will be the report directory name)
:param multi_process: bool
Run on mulyiple processes.
:param include_bad: bool
Consider stars that are in the 'bad' supplement table.
:param dry_run: bool
Only parse options and not actually run the magnitude estimates
:param no_progress: bool
Hide progress bar
:param email: str
:return:
"""
# PyTables is not really unicode-compatible, but python 3 is basically unicode.
# For our purposes, PyTables works. It would fail with characters that can not be written
# as ascii. It displays a warning which I want to avoid:
warnings.filterwarnings("ignore", category=tables.exceptions.FlavorWarning)
filename = output_dir / 'agasc_supplement.h5'
if multi_process:
get_stats = partial(get_agasc_id_stats_pool, batch_size=10)
else:
get_stats = get_agasc_id_stats
skip = True
if agasc_ids is None:
obs_in_time = ((star_obs_catalogs.STARS_OBS['mp_starcat_time'] >= start)
& (star_obs_catalogs.STARS_OBS['mp_starcat_time'] <= stop))
agasc_ids = sorted(star_obs_catalogs.STARS_OBS[obs_in_time]['agasc_id'])
else:
agasc_ids = np.intersect1d(agasc_ids, star_obs_catalogs.STARS_OBS['agasc_id'])
skip = False
agasc_ids = np.unique(agasc_ids)
stars_obs = star_obs_catalogs.STARS_OBS[
np.in1d(star_obs_catalogs.STARS_OBS['agasc_id'], agasc_ids)
]
# if supplement exists:
# - drop bad stars
# - get OBS status override
# - get the latest observation for each agasc_id,
# - find the ones already in the supplement
# - include only the ones with supplement.last_obs_time < than stars_obs.mp_starcat_time
obs_status_override = {}
if filename.exists():
with tables.File(filename, 'r') as h5:
if not include_bad and 'bad' in h5.root:
logger.info('Excluding bad stars')
stars_obs = stars_obs[~np.in1d(stars_obs['agasc_id'], h5.root.bad[:]['agasc_id'])]
if 'obs' in h5.root:
obs_status_override = table.Table(h5.root.obs[:])
obs_status_override.convert_bytestring_to_unicode()
obs_status_override = {
(r['mp_starcat_time'], r['agasc_id']):
{'status': r['status'], 'comments': r['comments']}
for r in obs_status_override
}
if 'mags' in h5.root and len(stars_obs):
outliers_current = h5.root.mags[:]
times = stars_obs[['agasc_id', 'mp_starcat_time']].group_by(
'agasc_id').groups.aggregate(lambda d: np.max(CxoTime(d)).date)
if len(outliers_current):
times = table.join(times,
table.Table(outliers_current[['agasc_id', 'last_obs_time']]),
join_type='left')
else:
times['last_obs_time'] = table.MaskedColumn(
np.zeros(len(times), dtype=h5.root.mags.dtype['last_obs_time']),
mask=np.ones(len(times), dtype=bool)
)
if skip:
if hasattr(times['last_obs_time'], 'mask'):
# the mask exists if there are stars in stars_obs
# that are not in outliers_current
update = (times['last_obs_time'].mask
| ((~times['last_obs_time'].mask)
& (CxoTime(times['mp_starcat_time']).cxcsec
> times['last_obs_time']).data)
)
else:
update = (CxoTime(times['mp_starcat_time']).cxcsec > times['last_obs_time'])
stars_obs = stars_obs[np.in1d(stars_obs['agasc_id'], times[update]['agasc_id'])]
agasc_ids = np.sort(np.unique(stars_obs['agasc_id']))
if len(update) - np.sum(update):
logger.info(f'Skipping {len(update) - np.sum(update)} '
f'stars already in the supplement')
if len(stars_obs) == 0:
logger.info(f'There are no new observations to process')
return
# do the processing
logger.info(f'Will process {len(agasc_ids)} stars on {len(stars_obs)} observations')
logger.info(f'from {start} to {stop}')
if dry_run:
return
obs_stats, agasc_stats, fails = \
get_stats(agasc_ids, tstop=stop,
obs_status_override=obs_status_override,
no_progress=no_progress)
failed_global = [f for f in fails if not f['agasc_id'] and not f['obsid']]
failed_stars = [f for f in fails if f['agasc_id'] and not f['obsid']]
failed_obs = [f for f in fails if f['obsid']]
msg = (
f'Got:\n'
f' {0 if obs_stats is None else len(obs_stats)} OBSIDs,'
f' {0 if agasc_stats is None else len(agasc_stats)} stars,'
)
if failed_obs:
msg += f' {len(failed_obs)} failed observations,'
if failed_stars:
msg += f' {len(failed_stars)} failed stars,'
if failed_global:
msg += f' {len(failed_global)} global errors'
logger.info(msg)
if not output_dir.exists():
output_dir.mkdir(parents=True)
update_mag_stats(obs_stats, agasc_stats, fails, output_dir)
obs_status_file = output_dir / 'obs_status.yml'
try:
write_obs_status_yaml([], fails=failed_obs + failed_stars, filename=obs_status_file)
except Exception as e:
logger.warning(f'Failed to write {obs_status_file}: {e}')
new_stars, updated_stars = update_supplement(agasc_stats, filename=filename)
logger.info(f' {len(new_stars)} new stars, {len(updated_stars)} updated stars')
if agasc_stats is not None and len(agasc_stats):
if email:
try:
bad_obs = (
(obs_stats['mp_starcat_time'] >= start)
& (obs_stats['mp_starcat_time'] < stop)
& ~obs_stats['obs_ok']
)
if np.any(bad_obs):
msr.email_bad_obs_report(obs_stats[bad_obs], to=email)
except Exception as e:
logger.error(f'Error sending email to {email}: {e}')
if report:
if report_date is None:
report_dir = reports_dir
report_data_file = report_dir / f'report_data.pkl'
nav_links = None
report_date = CxoTime.now()
else:
report_dir = reports_dir / f'{report_date.date[:8]}'
report_data_file = report_dir / f'report_data_{report_date.date[:8]}.pkl'
week = time.TimeDelta(7 * u.day)
nav_links = {
'previous': f'../{(report_date - week).date[:8]}/index.html',
'up': '..',
'next': f'../{(report_date + week).date[:8]}/index.html'
}
# If the report data file exists, the arguments for the report from the file are
# modified according to the current run. Otherwise, they are created from scratch.
if report_data_file.exists():
with open(report_data_file, 'rb') as fh:
report_data = pickle.load(fh)
logger.info(f'Loading existing report data from {report_data_file}')
multi_star_html_args = report_data['args']
# arguments for the report are modified here
# merge fails:
# - from previous run, take fails that were not run just now
# - add current fails
multi_star_html_args['fails'] = fails
multi_star_html_args['no_progress'] = no_progress
else:
sections = [{
'id': 'new_stars',
'title': 'New Stars',
'stars': new_stars
}, {
'id': 'updated_stars',
'title': 'Updated Stars',
'stars': updated_stars['agasc_id'] if len(updated_stars) else []
}, {
'id': 'other_stars',
'title': 'Other Stars',
'stars': list(agasc_stats['agasc_id'][
~np.in1d(agasc_stats['agasc_id'], new_stars)
& ~np.in1d(agasc_stats['agasc_id'], updated_stars['agasc_id'])
])
}
]
multi_star_html_args = dict(
filename='index.html',
sections=sections,
updated_stars=updated_stars,
fails=fails,
report_date=report_date.date,
tstart=start,
tstop=stop,
nav_links=nav_links,
include_all_stars=False,
no_progress=no_progress
)
try:
report = msr.MagEstimateReport(
agasc_stats=output_dir / 'mag_stats_agasc.fits',
obs_stats=output_dir / 'mag_stats_obsid.fits',
directory=report_dir
)
report.multi_star_html(**multi_star_html_args)
latest = reports_dir / 'latest'
if os.path.lexists(latest):
logger.debug('Removing existing "latest" symlink')
latest.unlink()
logger.debug('Creating "latest" symlink')
latest.symlink_to(report_dir.absolute())
except Exception as e:
report_dir = output_dir
logger.error(f'Error when creating report: {e}')
finally:
report_data_file = report_dir / report_data_file.name
if not report_dir.exists():
report_dir.mkdir(parents=True)
report_data = {
'args': multi_star_html_args,
'directory': report_dir
}
with open(report_data_file, 'wb') as fh:
pickle.dump(report_data, fh)
logger.info(f'Report data saved in {report_data_file}')
now = datetime.datetime.now()
logger.info(f"done at {now}")
|
StarcoderdataPython
|
52171
|
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.pipeline import make_pipeline
import matplotlib.pyplot as plt
import numpy as np
import random
#===============================================================================================#
# Number of cases per day of covid 19 in the US for 218 days
cases = [
1,0,1,0,3,0,0,0,0,2,1,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,8,6,23,25,
20,66,47,64,147,225,290,278,414,267,338,1237,755,2797,3419,4777,3528,5836,8821,10934,
10115,13987,16916,17965,19332,18251,22635,22562,27043,26135,34864,30683,26065,43438,
21597,31534,31705,33251,33288,29145,24156,26385,27158,29164,29002,29916,25995,29468,
26490,25858,37144,29873,33161,29256,23371,23901,25512,31787,30369,29794,29763,19138,
22303,23366,30861,25996,26660,23792,18106,21467,20869,27191,22977,31967,13284,24481,
23405,22860,20522,24268,26229,15342,24958,16429,19680,21304,18123,23553,26177,14790,
24955,14676,20555,29034,29214,17919,17598,17376,20486,21744,22317,25468,21957,18577,
28392,22834,27828,32218,32411,27616,26657,34313,37667,40588,44602,44703,41390,35664,
43644,54357,52730,57718,52228,44361,46329,50304,64771,59260,66281,62918,60469,58858,
60971,67404,72045,74710,67574,63201,57777,63028,70106,72219,74818,64582,61795,54448,
59862,65935,68042,68605,58947,47576,49716,49988,53685,55836,62042,54590,48690,40522,
55540,56307,52799,56729,54686,41893,38986,39318,46500,44864,46754,45265,38679,33076,
37086,46393
]
days = list(range(len(cases)))
print(len(days))
days = np.asarray(days)
cases = np.asarray(cases)
days = days[:, np.newaxis]
cases = cases[:, np.newaxis]
plt.scatter(days, cases)
plt.show()
xseq = np.linspace(days.min(), days.max(), 300).reshape(-1,1)
regr = make_pipeline(PolynomialFeatures(12), LinearRegression())
regr.fit(days, cases)
plt.scatter(days, cases)
plt.plot(xseq, regr.predict(xseq), color = "red")
plt.show()
#===============================================================================================#
# Ref
# https://espanol.cdc.gov/coronavirus/2019-ncov/cases-updates/previouscases.html
|
StarcoderdataPython
|
1951006
|
<filename>PET_Library.py
__author__ = 'lpeng'
"""Penpan library
refering Mcmahon, 2013; Rotstayn, 2006; Li, 2013; Yang, 2012; Roderick, 2007"""
from pylab import *
class Data:
# Initialization
def __init__(self, INPUT, solar): #, npt):
# Define the incoming grid data variables
self.Tair = INPUT['tavg']
self.Tmin = INPUT['tmax']
self.Tmax = INPUT['tmin']
self.Pres = self.Convert_Unit_Pres(INPUT['p'])
self.e = self.Convert_Unit_Pres(INPUT['ea']) # vapor pressure: hpa
self.Wind = INPUT['wind'] # Wind: m/s
self.sunhour = INPUT['sun'] # sunhour: hour
# self.CF = self.Convert_Unit_CF(INPUT['tc'])
self.lat = INPUT['lat']
self.elev = INPUT['elev']
self.doy = INPUT['doy']
self.Albedo = 0.23
# Calculate some variables
self.Calculate_Tmean()
# self.Calculate_Saturated_Vapor_Pressure()
self.Calculate_Mean_Saturated_Vapor_Pressure()
self.Calculate_Slope_Saturation_Vapor_Pressure_Curve()
self.Calculate_Gamma()
self.Calculate_VPD()
self.Calculate_Rso()
self.Calculate_Rs(solar)
self.Calculate_Rs_pan()
self.Calculate_LWnet()
self.Calculate_Rnet_pan() # npt temporary with npt
self.Penpan()
# self.Penman()
# self.Priestley_Taylor()
# self.Hamon()
# self.Turc()
def Calculate_Tmean(self):
self.Tmean = (self.Tmax + self.Tmin) / 2.0
return
def Calculate_Saturated_Vapor_Pressure(self):
self.estar = 0.6108 * np.exp((17.27 * self.Tair) / (237.3 + self.Tair))
return
def Calculate_Mean_Saturated_Vapor_Pressure(self):
estar_max = 0.6108 * np.exp((17.27 * self.Tmax) / (237.3 + self.Tmax))
estar_min = 0.6108 * np.exp((17.27 * self.Tmin) / (237.3 + self.Tmin))
self.estar = (estar_max + estar_min) / 2
return
def Calculate_Slope_Saturation_Vapor_Pressure_Curve(self):
# self.DELTA = 4098 * 0.6108 * np.exp((17.27 * self.Tmean) / (237.3 + self.Tmean)) / (237.3 + self.Tmean) ** 2
self.DELTA = 4098 * self.estar / (237.3 + self.Tmean) ** 2
return
def Calculate_Rs(self, solar):
"Compute the SW irradiance from the input data"
"R_s : downward solar irradiance at the surface"
if solar == "sunhours":
"Sunshine hour data have been used for calculating incoming solar radiation"
## using Angstrom-Prescott equation
# a_s = 0.25
# b_s = 0.5
# yellow river
a_s = 0.195
b_s = 0.5125
self.SWin = (a_s + b_s * (self.sunhour/self.daylength)) * self.Ra
elif solar == "cloud": # this is from Linacre, 1993, equation 19
"Cloudiness data have been used for calculating sunshine hour and thus incoming solar radiation"
self.SWin = (0.85 - 0.047 * self.CF) * self.Ra
return
def Calculate_Rso(self):
# dr is inverse relative distance Earth-Sun
dr = 1 + 0.033 * np.cos(2 * np.pi * self.doy/365.0)
# delta is solar declination
delta = 0.409 * np.sin((2*np.pi * self.doy/365.0) - 1.39)
# from decimal degree to radians
phi = (np.pi/180) * self.lat
# omega: sunset hour angle
omega = np.arccos(-np.tan(phi) * np.tan(delta))
# daylength: the maximum daylight hours
self.daylength = 24 / np.pi * omega
if self.elev:
z2 = self.elev
else:
z2 = 30 # set the station elevation above the sea level as 30m
Gsc = 0.082 # solar constant = 0.082 [MJ m^-2 min^-1]
# Ra: extraterrestrial radiation for daily period for different location, different daytime
self.Ra = 24 * 60 / np.pi * Gsc * dr * (omega * np.sin(phi) * np.sin(delta) + np.cos(phi) * np.cos(delta) * np.sin(omega))
self.Rso = (0.75 + 2e-5 * z2) * self.Ra
return
def Calculate_Rs_pan(self):
"Compute the total SW irradiance of the pan"
"Prad : pan evaporation factor, which accounts for the extra direct irradiance intercepted by the walls of the pan when the sun is not directly overhead"
"f_dir : fraction of fs that is direct"
# Linacre, 1994
# P_rad = 1.32 + 4 * 10**(-4) * abs(self.lat) + 8 * 10 ** (-5) * self.lat ** 2
P_rad = 1.70 + 3 * 10 ** (-4) * self.lat ** 2
f_dir = -0.11 + 1.31 * self.SWin / self.Ra # extraterrestrial radiation
self.Rsp = (f_dir * P_rad + 2.0 * (1 - f_dir) + 2.0 * self.Albedo) * self.SWin
return
def Calculate_LWnet(self):
stefan_b = 4.903e-9 # [MJ K-4 m-2 day-1]
epsilon_s = 0.98
self.LWnet = stefan_b * ((self.Tmax+273.16)**4 + (self.Tmin+273.16)**4) / 2.0 * (0.34-0.14 * np.sqrt(self.e)) * (1.35 * self.SWin/self.Rso - 0.35)
# self.LWnet = stefan_b * (self.Tair+273.16)**4 * (0.34-0.14 * np.sqrt(self.e)) * (1.35 * self.SWin/self.Rso - 0.35)
# self.PDLWnet_PDTair = - 4 * stefan_b * (self.Tair+273.16)**3 * (0.34-0.14 * np.sqrt(self.e)) * (1.35 * self.SWin/self.Rso - 0.35)
return
def Calculate_Rnet_pan(self): #, npt):
Ap = 0.14 # Class A pan albedo (Linacre, 1992; Rotstayn, 2006; Roderick, 2007; Yang and Yang, 2011)
self.Rn_pan = (1 - Ap) * self.Rsp - self.LWnet
# # temporary
# import pandas as pd
# def running_mean(y, npts):
# return pd.rolling_mean(y, npts, center=True)
# Rn_pan = (1 - Ap) * self.Rsp - self.LWnet
# self.Rn_pan = running_mean(Rn_pan, npt)
# # temporary
return
def Calculate_Gamma(self):
cp = 1.013 # Specific heat of moist air at constant pressure [kJ kg-1 C-1]
self.lv = 2.501 - 0.002361 * self.Tair # Latent heat vapor (MJ/kg)
self.gamma = ((cp * self.Pres) / (0.622 * self.lv)) * 10 ** (-3)
return self.gamma
## Convert unit
# W/m2 to MJ/m2/day
def Convert_Unit_Rad(self, input):
watt2jule = 10e5/86400.0
data = input / float(watt2jule)
return data
# pressure: hpa to kpa
def Convert_Unit_Pres(self, input):
data = input / 10.0
return data
# 10m wind to 2m wind
def Convert_Unit_Wind(self, input):
zh = 10 # 10 m wind field
data = (4.87 / (np.log(67.8 * zh - 5.42))) * input
return data
def Convert_Unit_CF(self, input):
data = input / 10.0
return data
## Calculation for each components
def Calculate_VPD(self):
self.vpd = (self.estar - self.e) * (self.estar>=self.e) + 0 * (self.estar<self.e)
return self.vpd
# Reference-surface Models
def Penpan(self):
"ET_type = D20 Pan Evaporation"
# These parameters are from Yang 2012
coeff_hq = 5 # for D20 pan
# f_pan_u = 2.6 * (1 + 0.536 * self.Wind)
# Beijing experiment for vapor transfer function
# f_pan_u = 5.4 * (1 + 0.73 * self.Wind)/self.lv
# f_pan_u = 1.201 + 1.621 * self.Wind
f_pan_u = 1.313 + 1.381 * self.Wind
# f_pan_u = 2.626 + 1.381 * self.Wind
# f_pan_u = 0.35*(1+9.8e-3 * self.Wind)
# f_pan_u = 1.39e-8*(1+1.35 * self.Wind)
PET_R = self.DELTA/(self.DELTA + coeff_hq * self.gamma) * self.Rn_pan / self.lv
PET_A = coeff_hq * self.gamma/(self.DELTA + coeff_hq * self.gamma) * f_pan_u * self.vpd
self.penpan = PET_R + PET_A
return
def Penpan_2parts(self):
"ET_type = D20 Pan Evaporation"
# These parameters are from Yang 2012
coeff_hq = 5 # for D20 pan
f_pan_u = 1.313 + 1.381 * self.Wind
PET_R = self.DELTA/(self.DELTA + coeff_hq * self.gamma) * self.Rn_pan / self.lv
PET_A = coeff_hq * self.gamma/(self.DELTA + coeff_hq * self.gamma) * f_pan_u * self.vpd
return PET_R, PET_A
def Penpan_u2(self, a, b):
"ET_type = D20 Pan Evaporation"
# These parameters are from Yang 2012
coeff_hq = 5 # for D20 pan
# f_pan_u = 2.6 * (1 + 0.536 * self.Wind)
# Beijing experiment for vapor transfer function
# f_pan_u = 1.313 + 1.381 * self.Wind
f_pan_u = a + b * self.Wind
PET_R = self.DELTA/(self.DELTA + coeff_hq * self.gamma) * self.Rn_pan / self.lv
PET_A = coeff_hq * self.gamma/(self.DELTA + coeff_hq * self.gamma) * f_pan_u * self.vpd / self.lv
penpan = PET_R + PET_A
return penpan
def Penman(self):
# Hydrology Book
PET_R = (self.DELTA / (self.DELTA + self.gamma)) * self.Rn_pan / self.lv
PET_A = (self.gamma / (self.DELTA + self.gamma)) * ((6.43 * (1 + 0.536 * self.Wind) * self.vpd) / self.lv)
self.penman = PET_R + PET_A
return
def Priestley_Taylor(self):
alpha = 1.26
self.PT = alpha * self.DELTA / (self.DELTA + self.gamma) * self.Rn_pan / self.lv
return
def Hamon(self):
# Tair is daily mean air temperature
mask = np.zeros(len(self.Tair))
mask[self.Tair > 0] = 1
calibration = 2
estar = 6.108 * np.exp(17.27 * self.Tair / (self.Tair + 237.3)) # you must use daily average temperature
self.hamon = 0.1651 * 216.7 * estar / (self.Tair + 273.3) * (self.daylength / 12.0) * calibration * mask
return
def Turc(self):
mask = np.zeros(len(self.Tair))
mask[self.Tair > 0] = 1
RH = self.e/self.estar*100
turc = (0.013 * (23.88 * self.Rsp + 50) * (self.Tair) / (self.Tair + 15) * (RH>=50) + 0.013 * (23.88 * self.Rsp + 50) * (self.Tair) / (self.Tair + 15) * (1 + (50-RH)/70) * (RH<50)) * mask
# this mask doesn't work for the Tair=-15 case, because this will make the denum zero, so the result is nan at first place, it is still nan after multiplying a number
turc[np.isnan(turc)==True] = 0
self.turc = turc
return
|
StarcoderdataPython
|
254058
|
<gh_stars>0
import shutil
from pathlib import Path
def copy(src: Path, dest: Path):
"""Copy backup from `src` to `dest`.
Args:
src (Path): the source to copy from
dest (Path): the destination to copy to
Returns:
bool: True if successful
"""
shutil.copy(str(src), str(dest))
print(f'Backed up {src.name} to {dest}!')
return True
def backup(file: Path, max_backup: int = 1):
"""Backs up a file, with `max_backup` indicating
how many circular copies should be kept.
Always attempts to resolve `file`, even if it isn't
a symlink.
Can be used with batch.
Backups are "{file.name}-{backup_number}".
Args:
file (Path): the file to attempt to back up
max_backup (int, optional): maximum circular backup count;
defaults to 1; should always be >= 1
Returns:
bool: True if successful
"""
resolved = file.resolve()
backup_dir = resolved.parent / 'backups'
if not backup_dir.exists():
backup_dir.mkdir(parents=True)
if max_backup < 1:
max_backup = 1
# Since the file you are backing up should be newer,
# let's use it for the temporary `oldest_time`
oldest_time = file.stat().st_mtime
oldest = backup_dir / f'{file.name}-1'
for i in range(max_backup):
current = backup_dir / f'{file.name}-{i+1}'
if not current.exists():
return copy(resolved, current)
elif current.stat().st_mtime < oldest_time:
oldest_time = current.stat().st_mtime
oldest = current
return copy(resolved, oldest)
def restore(file: Path, backup_number: int):
"""Restores a backup given a `backup_number` to
the card file
Not to be used with batch.
Args:
file (Path): the target file to replace/restore
backup_number (int): the specific backup to restore
Returns:
bool: True if successful
Raises:
Exception: if the backup directory doesn't exist
"""
resolved = file.resolve()
backup_dir = resolved.parent / 'backups'
if not backup_dir.exists():
backup_dir.mkdir(parents=True)
raise Exception('Backup directory doesn\'t exist.')
backup = backup_dir / f'{file.name}-{backup_number}'
if not backup.exists():
raise Exception(f'Backup {backup.name} doesn\'t exist in {backup_dir}.')
return copy(backup, resolved)
|
StarcoderdataPython
|
6692634
|
<filename>util.py<gh_stars>100-1000
import csv
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
class Util:
@staticmethod
def read_games(file):
""" Initializes game objects from csv """
games = [item for item in csv.DictReader(open(file))]
# Uncommenting these three lines will grab the latest game results for this season, update team ratings accordingly, and make forecasts for upcoming games
#file_latest = file.replace(".", "_2021.")
#urlretrieve("https://projects.fivethirtyeight.com/nfl-api/2021/nfl_games_2021.csv", file_latest)
#games += [item for item in csv.DictReader(open(file_latest))]
for game in games:
game['season'], game['neutral'], game['playoff'] = int(game['season']), int(game['neutral']), int(game['playoff'])
game['score1'], game['score2'] = int(game['score1']) if game['score1'] != '' else None, int(game['score2']) if game['score2'] != '' else None
game['elo_prob1'], game['result1'] = float(game['elo_prob1']) if game['elo_prob1'] != '' else None, float(game['result1']) if game['result1'] != '' else None
return games
@staticmethod
def evaluate_forecasts(games):
""" Evaluates and scores forecasts in the my_prob1 field against those in the elo_prob1 field for each game """
my_points_by_season, elo_points_by_season = {}, {}
forecasted_games = [g for g in games if g['result1'] != None]
upcoming_games = [g for g in games if g['result1'] == None and 'my_prob1' in g]
# Evaluate forecasts and group by season
for game in forecasted_games:
# Skip unplayed games and ties
if game['result1'] == None or game['result1'] == 0.5:
continue
if game['season'] not in elo_points_by_season:
elo_points_by_season[game['season']] = 0.0
my_points_by_season[game['season']] = 0.0
# Calculate elo's points for game
rounded_elo_prob = round(game['elo_prob1'], 2)
elo_brier = (rounded_elo_prob - game['result1']) * (rounded_elo_prob - game['result1'])
elo_points = 25 - (100 * elo_brier)
elo_points = round(elo_points + 0.001 if elo_points < 0 else elo_points, 1) # Round half up
if game['playoff'] == 1:
elo_points *= 2
elo_points_by_season[game['season']] += elo_points
# Calculate my points for game
rounded_my_prob = round(game['my_prob1'], 2)
my_brier = (rounded_my_prob - game['result1']) * (rounded_my_prob - game['result1'])
my_points = 25 - (100 * my_brier)
my_points = round(my_points + 0.001 if my_points < 0 else my_points, 1) # Round half up
if game['playoff'] == 1:
my_points *= 2
my_points_by_season[game['season']] += my_points
# Print individual seasons
for season in my_points_by_season:
print("In %s, your forecasts would have gotten %s points. Elo got %s points." % (season, round(my_points_by_season[season], 2), round(elo_points_by_season[season], 2)))
# Show overall performance
my_avg = sum(my_points_by_season.values())/len(my_points_by_season.values())
elo_avg = sum(elo_points_by_season.values())/len(elo_points_by_season.values())
print("\nOn average, your forecasts would have gotten %s points per season. Elo got %s points per season.\n" % (round(my_avg, 2), round(elo_avg, 2)))
# Print forecasts for upcoming games
if len(upcoming_games) > 0:
print("Forecasts for upcoming games:")
for game in upcoming_games:
print("%s\t%s vs. %s\t\t%s%% (Elo)\t\t%s%% (You)" % (game['date'], game['team1'], game['team2'], int(round(100*game['elo_prob1'])), int(round(100*game['my_prob1']))))
print("")
|
StarcoderdataPython
|
3408969
|
<reponame>shayweitzman/MyAutoBook<gh_stars>1-10
# Generated by Django 3.1.3 on 2021-01-08 00:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book_catalog', '0028_auto_20210107_2204'),
('review', '0007_auto_20210107_1804'),
('authentication', '0027_merge_20210107_0104'),
]
operations = [
migrations.AlterField(
model_name='adult',
name='Adultposses',
field=models.ManyToManyField(blank=True, related_name='adultposses', to='book_catalog.Book'),
),
migrations.AlterField(
model_name='adult',
name='FavouriteBooks',
field=models.ManyToManyField(blank=True, related_name='favourite', to='book_catalog.Book'),
),
migrations.AlterField(
model_name='adult',
name='reviews',
field=models.ManyToManyField(blank=True, to='review.Review'),
),
]
|
StarcoderdataPython
|
3550109
|
import logging
import pytest
import zenko_e2e.util as util
from ..fixtures import *
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s: %(message)s',
datefmt='%S')
@pytest.mark.conformance
def test_aws_storage(aws_loc_bucket, aws_target_bucket, testfile, objkey):
util.mark_test('AWS STORAGE LOCATION CONSTRAINT')
aws_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey, testfile, aws_loc_bucket, aws_target_bucket)
@pytest.mark.conformance
def test_gcp_storage(gcp_loc_bucket, gcp_target_bucket, testfile, objkey):
util.mark_test('GCP STORAGE LOCATION CONSTRAINT')
gcp_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey, testfile, gcp_loc_bucket, gcp_target_bucket)
@pytest.mark.conformance
def test_azure_storage(
azure_loc_bucket, azure_target_bucket, testfile, objkey):
util.mark_test('AZURE STORAGE LOCATION CONSTRAINT')
azure_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey, testfile, azure_loc_bucket, azure_target_bucket)
@pytest.mark.conformance
def test_ceph_storage(
ceph_loc_bucket, ceph_target_bucket, testfile, objkey):
util.mark_test('CEPH STORAGE LOCATION CONSTRAINT')
ceph_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey, testfile, ceph_loc_bucket, ceph_target_bucket)
@pytest.mark.skip(reason='Wasabi Not implemented in CI')
@pytest.mark.conformance
def test_wasabi_storage(
wasabi_loc_bucket, wasabi_target_bucket, testfile, objkey):
util.mark_test('WASABI STORAGE LOCATION CONSTRAINT')
wasabi_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey, testfile, wasabi_loc_bucket, wasabi_target_bucket)
@pytest.mark.skip(
reason='Digital Ocean Spaces is super flakey causing this test to fail')
@pytest.mark.conformance
def test_digital_ocean_storage(
digital_ocean_loc_bucket,
digital_ocean_target_bucket,
testfile,
objkey):
util.mark_test('DIGITAL OCEAN STORAGE LOCATION CONSTRAINT')
digital_ocean_loc_bucket.put_object(
Body=testfile,
Key=objkey
)
assert util.check_object(
objkey,
testfile,
digital_ocean_loc_bucket,
digital_ocean_target_bucket)
|
StarcoderdataPython
|
4978792
|
from bottle import *
import db
import os
port = os.environ.get("PORT", 17995)
@route('<filename>')
def server_static(filename):
return static_file(filename, root="/files")
@get('/')
def index():
return template("index")
@get('/sign_up')
def s_u():
return template("sign-up")
@post('/validator')
def validator():
username = request.forms["username"]
pwd = request.forms["password"]
print("trying", username, pwd)
login_status = db.validate(username, pwd)
print(login_status)
if login_status is not None:
return template("dynamic")
else:
return template("rolled")
@get("/user_list")
def give_list():
all_users = db.just_sql("SELECT * FROM users;")
html_str = "<ol>"
for user in all_users:
html_str += f"<li id='{user[0]}'>{user[1]}</li>"
return html_str + "</ol>"
@get("/remove_user/<username>")
def remove_user(username):
db.remove_user(username)
return give_list()
@post("/register")
def register():
username = request.forms["username"]
pwd = request.forms["password"]
db.register(username,pwd)
print("registering", username, pwd, db.validate(username,pwd))
if db.validate(username,pwd) is not None:
return template("dynamic")
else:
return "reg fail" + "<a href='/'>home</a>"
@get("/sign_up_form")
def send_signup_form():
return '''
<!-- Tabs Titles -->
<h2 class="active"> Sign In </h2>
<h2 class="inactive underlineHover" ><a href="/sign_up">Sign Up</a> </h2>
<!-- Icon -->
<div class="fadeIn first">
<img src="https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQ6QTESaLQXNJDpokdt6XZd3CmvIevt1VKmGQ&usqp=CAU" id="icon" alt="User Icon" />
</div>
<!-- Login Form -->
<form action="/validator" method = "POST">
<input type="text" id="login" class="fadeIn second" name="username" placeholder="login">
<input type="password" id="password" class="fadeIn third" name="password" placeholder="password">
<input type="submit" class="fadeIn fourth" value="Log In">
</form>
<!-- Remind Passowrd -->
<div id="formFooter">
<a class="underlineHover" href="#">Forgot Password?</a>
</div>
'''
@get('/all')
def show_all_users():
return str(db.just_sql("SELECT * FROM users;")) + "<a href='/'>all</a>"
run(host='0.0.0.0', port=port, debug = True)
|
StarcoderdataPython
|
4863823
|
<reponame>chenjiancan/pantyhose<gh_stars>1-10
# coding=utf-8
"""
remote server
Run a TCP server to proxy tcp traffic from LOCAL SERVER.
Assume that the first few bytes are `header` that carrying target address info and parse the target from it.
Once the target server was known and connection established, do that traffic relay as we do in local_server
"""
import logging
import platform
import socket
import struct
from socketserver import ThreadingTCPServer, StreamRequestHandler
# SERVER_ADDRESS = "172.16.58.3"
import select
SERVER_ADDRESS = ''
SERVER_PORT = 9988
SOCKS_ATYP_IPV4 = 0x01
SOCKS_ATYP_DOMAINNAME = 0x03
SOCKS_ATYP_IPV6 = 0x04
HEADER = b'header'
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class Handler(StreamRequestHandler):
def handle(self):
remote = self.issue_connection()
if not remote:
logger.warning("fail to issue connection")
self.server.close_request(self.request)
return
self.data_loop(self.request, remote)
self.server.close_request(self.request)
def data_loop(self, local: socket.socket, remote: socket.socket):
selector_set = [local, remote]
while True:
try:
r, _, _ = select.select(selector_set, [], [])
if local in r:
data = local.recv(4096)
if len(data) <= 0:
break
remote.sendall(data)
if remote in r:
data = remote.recv(4096)
if len(data) <= 0:
break
local.sendall(data)
except Exception as e:
logger.error("exception in data_loop: " + e)
break
remote.close()
def issue_connection(self):
header = self.connection.recv(len(HEADER))
if len(header) != len(HEADER) or header != HEADER:
logger.warning("header not match, quit")
return None
atyp = self.connection.recv(1)[0]
dst_addr_len = self.connection.recv(1)[0]
dst_addr = self.connection.recv(dst_addr_len)
if atyp == SOCKS_ATYP_DOMAINNAME:
dst_addr = dst_addr
else:
dst_addr = socket.inet_ntoa(dst_addr)
dst_port = struct.unpack("!H", self.connection.recv(2))[0]
logger.warning("issued remote: {0}, {1}".format(dst_addr, dst_port))
try:
remote = socket.socket()
remote.connect((dst_addr, dst_port))
except:
logger.warning(
"establishing remote connection error: fail to connect to {0}, {1}".format(dst_addr, dst_port))
return None
else:
return remote
def start_server():
if platform.python_version_tuple()[0] == '3' and int(platform.python_version_tuple()[1]) >= 6:
with ThreadingTCPServer((SERVER_ADDRESS, SERVER_PORT), Handler, bind_and_activate=False) as server:
server.allow_reuse_address = True
server.server_bind()
server.server_activate()
server.serve_forever()
else:
server = ThreadingTCPServer((SERVER_ADDRESS, SERVER_PORT), Handler, bind_and_activate=False)
server.allow_reuse_address = True
server.server_bind()
server.server_activate()
server.serve_forever()
server.server_close()
if __name__ == '__main__':
start_server()
|
StarcoderdataPython
|
9548
|
description = 'Mezei spin flipper using TTI power supply'
group = 'optional'
tango_base = 'tango://miractrl.mira.frm2:10000/mira/'
devices = dict(
dct1 = device('nicos.devices.entangle.PowerSupply',
description = 'current in first channel of supply (flipper current)',
tangodevice = tango_base + 'tti1/out1',
timeout = 1,
precision = 0.01,
),
dct2 = device('nicos.devices.entangle.PowerSupply',
description = 'current in second channel of supply (compensation current)',
tangodevice = tango_base + 'tti1/out2',
timeout = 1,
precision = 0.01,
),
flip = device('nicos.devices.polarized.MezeiFlipper',
description = 'Mezei flipper before sample (in shielding table)',
flip = 'dct1',
corr = 'dct2',
),
)
|
StarcoderdataPython
|
8175967
|
import os, json
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.result import Result, ResultByKey
class DatabaseAccess():
def __init__(self):
vcap_cloudant = json.loads(os.environ['VCAP_SERVICES'])['cloudantNoSQLDB'][0]['credentials']
self.cloudant = Cloudant(vcap_cloudant['username'], vcap_cloudant['password'], url=vcap_cloudant['url'])
self.cloudant.connect()
self.database = self.cloudant['conversations_watson']
def save(self, data):
self.database.create_document(data)
def listConversations(self):
result = Result(self.database.all_docs, include_docs=True)
return result
def __del__(self):
self.cloudant.disconnect()
|
StarcoderdataPython
|
316474
|
<gh_stars>10-100
import os
import sys
import unittest
import numpy as np
ROOT_DIR = os.path.dirname(os.getcwd())
if ROOT_DIR not in sys.path: sys.path.append(ROOT_DIR)
import DeepSparseCoding.utils.loaders as loaders
import DeepSparseCoding.utils.dataset_utils as datasets
import DeepSparseCoding.utils.run_utils as run_utils
class TestModels(unittest.TestCase):
def setUp(self):
self.dsc_dir = os.path.join(*[ROOT_DIR, 'DeepSparseCoding'])
self.model_list = loaders.get_model_list(self.dsc_dir)
self.test_params_file = os.path.join(*[self.dsc_dir, 'params', 'test_params.py'])
def test_model_loading(self):
for model_type in self.model_list:
model_type = ''.join(model_type.split('_')[:-1]) # remove '_model' at the end
model = loaders.load_model(model_type)
params = loaders.load_params(self.test_params_file, key=model_type+'_params')
train_loader, val_loader, test_loader, data_params = datasets.load_dataset(params)
for key, value in data_params.items():
setattr(params, key, value)
model.setup(params)
### TODO - more basic test to compute gradients per model###
#def test_gradients(self):
# for model_type in self.model_list:
# model_type = ''.join(model_type.split('_')[:-1]) # remove '_model' at the end
# model = loaders.load_model(model_type)
def test_lca_ensemble_gradients(self):
params = {}
models = {}
params['lca'] = loaders.load_params(self.test_params_file, key='lca_params')
params['lca'].train_logs_per_epoch = None
params['lca'].shuffle_data = False
train_loader, val_loader, test_loader, data_params = datasets.load_dataset(params['lca'])
for key, value in data_params.items():
setattr(params['lca'], key, value)
models['lca'] = loaders.load_model(params['lca'].model_type)
models['lca'].setup(params['lca'])
models['lca'].to(params['lca'].device)
params['ensemble'] = loaders.load_params(self.test_params_file, key='ensemble_params')
for key, value in data_params.items():
setattr(params['ensemble'], key, value)
err_msg = f'\ndata_shape={params["ensemble"].data_shape}'
err_msg += f'\nnum_pixels={params["ensemble"].num_pixels}'
err_msg += f'\nbatch_size={params["ensemble"].batch_size}'
err_msg += f'\nepoch_size={params["ensemble"].epoch_size}'
models['ensemble'] = loaders.load_model(params['ensemble'].model_type)
models['ensemble'].setup(params['ensemble'])
models['ensemble'].to(params['ensemble'].device)
ensemble_state_dict = models['ensemble'].state_dict()
ensemble_state_dict['lca.w'] = models['lca'].w.clone()
models['ensemble'].load_state_dict(ensemble_state_dict)
data, target = next(iter(train_loader))
train_data_batch = models['lca'].preprocess_data(data.to(params['lca'].device))
train_target_batch = target.to(params['lca'].device)
models['lca'].optimizer.zero_grad()
for submodel in models['ensemble']:
submodel.optimizer.zero_grad()
inputs = [train_data_batch] # only the first model acts on input
for submodel in models['ensemble']:
inputs.append(submodel.get_encodings(inputs[-1]).detach())
lca_loss = models['lca'].get_total_loss((train_data_batch, train_target_batch))
ensemble_losses = [models['ensemble'].get_total_loss((inputs[0], train_target_batch), 0)]
ensemble_losses.append(models['ensemble'].get_total_loss((inputs[1], train_target_batch), 1))
lca_loss.backward()
ensemble_losses[0].backward()
ensemble_losses[1].backward()
lca_loss_val = lca_loss.cpu().detach().numpy()
lca_w_grad = models['lca'].w.grad.cpu().numpy()
ensemble_loss_val = ensemble_losses[0].cpu().detach().numpy()
ensemble_w_grad = models['ensemble'][0].w.grad.cpu().numpy()
assert lca_loss_val == ensemble_loss_val, (err_msg+'\n'
+'Losses should be equal, but are lca={lca_loss_val} and ensemble={ensemble_loss_val}')
assert np.all(lca_w_grad == ensemble_w_grad), (err_msg+'\nGrads should be equal, but are not.')
lca_pre_train_w = models['lca'].w.cpu().detach().numpy().copy()
ensemble_pre_train_w = models['ensemble'][0].w.cpu().detach().numpy().copy()
run_utils.train_epoch(1, models['lca'], train_loader)
run_utils.train_epoch(1, models['ensemble'], train_loader)
lca_w = models['lca'].w.cpu().detach().numpy().copy()
ensemble_w = models['ensemble'][0].w.cpu().detach().numpy().copy()
assert np.all(lca_pre_train_w == ensemble_pre_train_w), (err_msg+'\n'
+"lca & ensemble weights are not equal before one epoch of training")
assert not np.all(lca_pre_train_w == lca_w), (err_msg+'\n'
+"lca weights are not different from init after one epoch of training")
assert not np.all(ensemble_pre_train_w == ensemble_w), (err_msg+'\n'
+"ensemble weights are not different from init after one epoch of training")
assert np.all(lca_w == ensemble_w), (err_msg+'\n'
+"lca & ensemble weights are not equal after one epoch of training")
|
StarcoderdataPython
|
6679427
|
<reponame>ZhuoZhuoCrayon/bk-sops
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from __future__ import absolute_import
from django.test import TestCase
from pipeline_web.drawing_new.constants import POSITION
from pipeline_web.drawing_new.drawing import draw_pipeline
from pipeline_web.tests.drawing_new.data import pipeline_without_gateways
START_X, START_Y = POSITION['start']
# 节点之间的平均距离
SHIFT_X = int(max(POSITION['activity_size'][0], POSITION['event_size'][0], POSITION['gateway_size'][0]) * 1.2)
SHIFT_Y = int(max(POSITION['activity_size'][1], POSITION['event_size'][1], POSITION['gateway_size'][1]) * 2)
# 开始/结束事件节点纵坐标偏差
EVENT_SHIFT_Y = int((POSITION['activity_size'][1] - POSITION['event_size'][1]) * 0.5)
GATEWAY_SHIFT_Y = int((POSITION['activity_size'][1] - POSITION['gateway_size'][1]) * 0.5)
class DrawingTest(TestCase):
def test_draw_pipeline_without_gateways(self):
pipeline_tree = pipeline_without_gateways
location = [
{
'status': '',
'name': '',
'y': int(POSITION['start'][1] + EVENT_SHIFT_Y),
'x': int(POSITION['start'][0]),
'type': 'startpoint',
'id': 'nodeb200c52ea911f7a74cd478e5a7dd'
},
{
'status': '',
'name': 'node1',
'y': int(START_Y),
'x': int(START_X + SHIFT_X),
'type': 'tasknode',
'id': 'nodedd50630d1029bca78ad6efaf89d4'
},
{
'status': '',
'name': 'node2',
'y': int(START_Y),
'x': int(START_X + SHIFT_X * 2),
'type': 'tasknode',
'id': 'nodeed4e2b6a13801df5c9a95cf9a233'
},
{
'status': '',
'name': 'node3',
'y': int(START_Y),
'x': int(START_X + SHIFT_X * 3),
'type': 'tasknode',
'id': 'node28b5acddd6ddd48c8d7728b48931'
},
{
'status': '',
'name': '',
'y': int(START_Y + EVENT_SHIFT_Y),
'x': int(START_X + SHIFT_X * 4),
'type': 'endpoint',
'id': 'nodecf7ef57aef3cb6a412ae2ac10516'
}
]
line = [
{
'source': {
'id': 'nodeb200c52ea911f7a74cd478e5a7dd',
'arrow': 'Right'
},
'id': 'line3d44c1d88e8720f4be5f871c9d58',
'target': {
'id': 'nodedd50630d1029bca78ad6efaf89d4',
'arrow': 'Left'
}
},
{
'source': {
'id': 'nodeed4e2b6a13801df5c9a95cf9a233',
'arrow': 'Right'
},
'id': 'line756f60ed487a3e62e0fe5f2f9e7a',
'target': {
'id': 'node28b5acddd6ddd48c8d7728b48931',
'arrow': 'Left'
}
},
{
'source': {
'id': 'node28b5acddd6ddd48c8d7728b48931',
'arrow': 'Right'
},
'id': 'linecd908c241504aa274508bd116202',
'target': {
'id': 'nodecf7ef57aef3cb6a412ae2ac10516',
'arrow': 'Left'
}
},
{
'source': {
'id': 'nodedd50630d1029bca78ad6efaf89d4',
'arrow': 'Right'
},
'id': 'line1b5f377dc55b244a30691f132086',
'target': {
'id': 'nodeed4e2b6a13801df5c9a95cf9a233',
'arrow': 'Left'
}
}
]
draw_pipeline(pipeline_tree)
self.assertEqual(pipeline_tree['location'], location)
self.assertEqual(pipeline_tree['line'], line)
|
StarcoderdataPython
|
6458328
|
<filename>nsdperfTool.py
#!/usr/bin/python
import os
import sys
import time
import getopt
import json
import math
import re
import threading
import subprocess
try:
raw_input # Python 2
PYTHON3 = False
except NameError: # Python 3
raw_input = input
PYTHON3 = True
# Global variables with default value
nsdperfPath = "/tmp/nsdperf"
toolPath = os.path.split(os.path.realpath(__file__))[0]
sshOption = "-o StrictHostKeyChecking=no -o LogLevel=error"
ssh = "ssh %s" % (sshOption)
scp = "scp %s" % (sshOption)
LOG_LOCK = threading.Lock()
timerWindow = 1200
# Regular expressions for IP
IPPATT = re.compile(r'inet\s+(?P<ip>\d+[\.]\d+[\.]\d+[\.]\d+)')
# Subroutines
def processArgs():
if (not conf["server"] or not conf["client"]):
halt("Error: you have to provide both --client and --server")
dupNodes = [i for i in conf["server"] if i in conf["client"]]
if (dupNodes):
halt("Error: %s cannot be server and client at the same time, there "
"shouldn't be duplicated nodes in servers and clients" % dupNodes)
allowedTests = ["write", "read", "nwrite", "swrite", "sread", "rw"]
for test in conf["test"]:
if (test not in allowedTests):
halt("Error: unknown test <%s>, please choose from <%s>"
% (test, allowedTests))
if (not conf["test"]):
conf["test"] = ["read", "nwrite"]
def createExecutable(node):
rc = runcmd("%s %s \"test -d %s\"" % (ssh, node, nsdperfPath))[0]
if (rc):
chkcmd("%s %s \"mkdir -p %s\"" % (ssh, node, nsdperfPath))
else:
rc = runcmd("%s %s \"test -x %s_%s\"" %
(ssh, node, nsdperfexe, node))[0]
cmd = ""
if (rc or conf["rebuild"]):
if (conf["rebuild"]):
log("Force rebuild nsdperfexe on node %s as -r is specified" %
(node))
chkcmd("%s %s/nsdperf.C %s/makefile %s:%s/"
% (scp, toolPath, toolPath, node, nsdperfPath))
uname = chkcmd("%s %s \"uname -a\"" % (ssh, node))
if (re.search("linux", uname, re.I)):
verbsh = runcmd(
"%s %s \"test -e /usr/include/infiniband/verbs.h\"" %
(ssh, node))[0]
rdmacmh = runcmd("%s %s \"test -e /usr/include/rdma/rdma_cma.h\"" %
(ssh, node))[0]
if (verbsh or rdmacmh):
log("INFO: verbs.h or rdma_cma.h could not be found. "
"nsdperf could not support RDMA on node %s." % (node))
log("Excluding RDMA in compilation.")
cmd = "cd %s; g++ -O2 -o nsdperfexe_%s -lpthread -lrt " \
"nsdperf.C" % (nsdperfPath, node)
else:
cmd = "cd %s; g++ -O2 -DRDMA -o nsdperfexe_%s -lpthread " \
"-lrt -libverbs -lrdmacm nsdperf.C" % (nsdperfPath, node)
# elif (re.search("AIX", uname, re.I)):
# TODO: support AIX?
else:
halt("Error: cannot compile %s/nsdperf.C on node $node, "
"OS is not supported." % (nsdperfPath))
log("INFO: building nsdperfexe on node %s" % (node))
chkcmd("%s %s \"%s\"" % (ssh, node, cmd))
else:
log("INFO: skip building nsdperfexe on node %s as %s_%s already "
"exists. Use -r if you want to force rebuild." %
(node, nsdperfexe, node))
def runTest(server, client):
log("---------- Running nsdperf test with server %s client %s ----------"
% (server, client))
cliOptions = makeCmds(server, client)
allNodes = []
allNodes.extend(server)
allNodes.extend(client)
threads = []
for node in allNodes:
thr = threading.Thread(target=startServerThr, args=(node, cliOptions))
thr.start()
threads.append(thr)
for thr in threads:
thr.join()
log("Get retransmit and packet loss data before test")
netDataBefore = getNetData(client)
if (conf["rdmaPorts"]):
if (conf["rdmaPorts"][localNode]):
localOpts = cliOptions + "-r %s " % (conf["rdmaPorts"][localNode])
else:
localOpts = cliOptions
output = chkcmdLiveOutput(
"%s_%s -i %s %s" % (nsdperfexe, localNode, nsdperfCmdFile, localOpts))
log("Get retransmit and packet loss data after test")
netDataAfter = getNetData(client)
netData = {}
for node in netDataBefore.keys():
netData[node] = {}
for key in netDataBefore[node].keys():
netData[node][key] = int(netDataAfter[node][key]) - \
int(netDataBefore[node][key])
parseOutput(server, client, output, netData)
def makeCmds(server, client):
cmdsInFile = ""
cliOptions = ""
joinStr = " "
servers = joinStr.join(server)
clients = joinStr.join(client)
# File based options to nsdperf
cmdsInFile = "server %s\nclient %s\n" % (servers, clients)
if (conf["debugLevel"]):
cmdsInFile = cmdsInFile + "debug %s\n" % (conf["debugLevel"])
cliOptions = cliOptions + "-d "
if (conf["ttime"]):
cmdsInFile = cmdsInFile + "ttime %s\n" % (conf["ttime"])
if (conf["testerThr"]):
cmdsInFile = cmdsInFile + "threads %s\n" % (conf["testerThr"])
if (conf["buffsize"]):
cmdsInFile = cmdsInFile + "buffsize %s\n" % (conf["buffsize"])
if (conf["socksize"]):
cmdsInFile = cmdsInFile + "socksize %s\n" % (conf["socksize"])
if (conf["rdmaPorts"]):
cmdsInFile = cmdsInFile + "rdma on\n"
for test in conf["test"]:
cmdsInFile = cmdsInFile + "test %s\n" % (test)
cmdsInFile = cmdsInFile + "killall\nquit"
cmdFile = open(nsdperfCmdFile, 'w')
cmdFile.write(cmdsInFile)
cmdFile.close()
# Parameters based to nsdperf (except debugLevel)
if (conf["receiverThr"]):
cliOptions = cliOptions + "-t %s " % (conf["receiverThr"])
if (conf["workerThr"]):
cliOptions = cliOptions + "-w %s " % (conf["workerThr"])
return cliOptions
def startServerThr(node, cliOptions):
killer(node, "nsdperfexe")
# Give some time to die
time.sleep(5)
if (conf["rdmaPorts"]):
nodeOpts = cliOptions + "-r %s " % (conf["rdmaPorts"][node])
else:
nodeOpts = cliOptions
chkcmd("%s %s \"%s_%s -s %s > %s/server_thread_log 2>&1 &\""
% (ssh, node, nsdperfexe, node, nodeOpts, nsdperfPath))
# Give some time to start
time.sleep(5)
def parseOutput(server, client, output, netData):
resultFile = open(nsdperfResultFile, 'a')
pattern = r"(\d+)-(\d+) (\w+) ([\d\.]+) MB/sec \(([\d\.]+) msg/sec\), " \
r"cli (\d+\%) srv (\d+\%), time (\d+), buff (\d+)(.*)(\s*?(\S+ " \
r"network delay times[\S\s]*?msec nevents\s*(\s*\d+ +\d+\s*)*\s+)+)"
resultSize = 0
for match in (re.finditer(pattern, output)):
result = {"server(s)": server, "client(s)": client}
result["nServer"] = match.group(1)
result["nClient"] = match.group(2)
result["test"] = match.group(3)
result["throughput(MB/sec)"] = match.group(4)
result["throughput(msg/sec)"] = match.group(5)
result["cli%"] = match.group(6)
result["srv%"] = match.group(7)
result["testTime"] = match.group(8)
result["buffsize"] = match.group(9)
sockThInfo = match.group(10)
sock = re.search(r"sock (\d+)", sockThInfo)
if (sock):
result["socksize"] = sock.group(1)
th = re.search(r"th (\d+)", sockThInfo)
if (th):
result["nTesterThread"] = th.group(1)
result["networkDelay"] = []
delay = {}
allDelayInfo = match.group(11)
oneDelayPattern = r"\S+ network delay times[\S\s]*?msec nevents" \
r"\s*(\s*\d+ +\d+\s*)*"
for oneDelay in (re.finditer(oneDelayPattern, allDelayInfo)):
detailedDelayPattern = r"(\S+) network delay times \(average " \
r"([\d\.]+) msec, median ([\d\.]+) msec, std deviation " \
r"([\d\.]+) msec\)\s+msec nevents\s*((\s*\d+ +\d+\s*)*)"
detailedDelay = re.search(detailedDelayPattern, oneDelay.group())
if (detailedDelay):
delay = {}
delay["client"] = detailedDelay.group(1)
delay["average"] = detailedDelay.group(2)
delay["median"] = detailedDelay.group(3)
delay["standardDeviation"] = detailedDelay.group(4)
delay["histogram"] = {}
allEvents = detailedDelay.group(5)
eventPattern = r"(\d+) +(\d+)"
for event in (re.finditer(eventPattern, allEvents)):
delay["histogram"][event.group(1)] = event.group(2)
result["networkDelay"].append(delay)
else:
halt("Error, cannot match for network delay info")
result["netData"] = netData
outputJson = json.dumps(result)
resultSize += sys.getsizeof(outputJson)
resultFile.write(outputJson)
resultFile.close()
# Detect nsdperf test errors
if (not resultSize):
halt("Error, nsdperf test seems failed, please check command output")
def getLocalNode(allNodes):
localNode = None
rc, ipaddr_output, ec = runcmd("ip addr show")
if (rc == 0):
# create a list of allip addresses for local node
iplist = IPPATT.findall(ipaddr_output)
# check for match with one of input ip addresses
for node in allNodes:
if node in iplist:
localNode = node
break
if localNode is None:
halt("Error: cannot decide local node")
return localNode
def getNodeDev(allNodes):
# TODO: add support for hostname?
netDev = {}
for node in allNodes:
ipInfo = chkcmd("%s %s ip -f inet addr show" % (ssh, node))
ipPattern = r"[\S\s]*\d+: (\w+): [\S\s]*?inet %s" % (node)
try:
netDev[node] = re.search(ipPattern, ipInfo).group(1)
log("netDev: %s -> %s" % (node, netDev[node]))
except Exception:
halt("Error, cannot match for network device of node %s in "
"\"ip addr show\" output" % (node))
return netDev
def getNetData(allNodes):
netData = {}
for node in allNodes:
# TODO
netData[node] = {}
retransInfo = chkcmd(
"%s %s nstat -az TcpRetransSegs" % (ssh, node))
try:
netData[node]["retransmit"] = re.search(
r"TcpRetransSegs *(\d+)", retransInfo).group(1)
except Exception:
halt("Error, cannot match for retransmit data in "
"\"nstat -az TcpRetransSegs\" output on node %s" % (node))
ipLinkInfo = chkcmd(
"%s %s \"ip -s link show %s\"" % (ssh, node, netDev[node]))
ipLinkFormat = r"RX: bytes packets errors dropped overrun mcast" \
r"\s+\d+\s+\d+\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+TX: bytes " \
r"packets errors dropped carrier collsns\s+\d+\s+\d+\s+(\d+)" \
r"\s+(\d+)\s+(\d+)\s+(\d+)"
ipLink = re.search(ipLinkFormat, ipLinkInfo)
if (not ipLink):
halt("Error, cannot match for network related data in "
"\"ip -s link\" output on node %s" % (node))
netData[node]["rxErrors"] = ipLink.group(1)
netData[node]["rxDropped"] = ipLink.group(2)
netData[node]["rxOverrun"] = ipLink.group(3)
netData[node]["rxMcast"] = ipLink.group(4)
netData[node]["txErrors"] = ipLink.group(5)
netData[node]["txDropped"] = ipLink.group(6)
netData[node]["txCarrier"] = ipLink.group(7)
netData[node]["txCollsns"] = ipLink.group(8)
return netData
def shortUsage():
print("Usage: %s -s|--server server1,server2,... "
"-c|--client client1,client2,..." % (os.path.realpath(__file__)))
print(" [-t|--test test1,test2,...] "
"[-l|--testTime testTimeInSec]")
print(" [-b|--buffsize buffsizeInByte] "
"[-k|--socksize sockSizeInByte]")
print(" [-R|--receiverThr nReceiverThread] "
"[-W|--workerThr nWorkerThread] [-T|--testerThr nTesterThread]")
print(" [-r|--rebuild] [-d|--directory dir] [-h|--help]")
print(" [-p|--rdmaPorts]")
def longUsage():
print("Usage: %s -s|--server server1,server2,... "
"-c|--client client1,client2,..." % (os.path.realpath(__file__)))
print(" [-t|--test test1,test2,...] "
"[-l|--testTime testTimeInSec]")
print(" [-b|--buffsize buffsizeInByte] "
"[-k|--socksize sockSizeInByte]")
print(" [-R|--receiverThr nReceiverThread] "
"[-W|--workerThr nWorkerThread] [-T|--testerThr nTesterThread]")
print(" [-r|--rebuild] [-d|--directory dir] [-h|--help]")
print(" [--RDMA]")
print("")
print("This tool is a wrapper over nsdperf.C which helps to "
"automatically build and execute nsdperf tests with given "
"configurations.")
print("All needed files and also test results in json format will be "
"put under %s." % (nsdperfPath))
print("")
print("Node settings:")
print("-s|--server server1,server2,...: server node list "
"saparated by comma")
print("-c|--client client1,client2,...: client node list "
"saparated by comma")
print("")
print("Test settings:")
print("-t|--test test1,test2,...: tests saparated by comma")
print("-l|--testTime testTimeInSec: test time duration in seconds")
print("Accepted tests: write|read|nwrite|swrite|sread|rw, default is "
"\"read,nwrite\"")
print("")
print("Buffer settings:")
print("-b|--buffsize buffsizeInByte: test buffer size in bytes")
print("-k|--socksize sockSizeInByte: socket buffer size in bytes")
print("")
print("Thread settings:")
print("-R|--receiverThr nReceiverThread: receiver thread number")
print("-W|--workerThr nWorkerThread: worker thread number")
print("-T|--testerThr nTesterThread: tester thread number")
print("")
print("Others:")
print("-r|--rebuild: force rebuild the nsdperf executable before tests")
print("-d|--directory dir: absolute path of local directory on "
"each node to save nsdperf executable and output files, "
"default is \"/tmp/nsdperf\"")
print("-p|--rdmaPorts '{\"node1\": \"port11,port12\", "
"\"node2\": \"port21,port22\", ...}': "
"set different RDMA ports for each node and enable RDMA tests. ")
print("OR -p|--rdmaPorts port1,port2: "
"set same RDMA ports for all nodes and enable RDMA tests.")
print("-h|--help: print this help message")
def log(msg):
LOG_LOCK.acquire()
timeStamp = getTimeStamp()
tid = threading.currentThread().name
print("%s: %s: %s" % (timeStamp, tid, msg))
LOG_LOCK.release()
def getTimeStamp():
return time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())
def halt(msg):
log('\033[91m' + msg + '\033[0m')
sys.exit(1)
def chkcmdLiveOutput(cmd):
cmd = cmd.rstrip()
log("CMD: %s" % (cmd))
p = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
lines = []
rc = p.poll()
while True:
line = p.stdout.readline()
rc = p.poll()
line = line.rstrip()
if (rc != None and (line == '' or line == b'')):
break
if PYTHON3:
strline = ''.join(chr(x) for x in line)
log(line)
lines.append(strline)
else:
log(line)
lines.append(line)
if (rc):
halt("Error, command failed with rc = %s" % (rc))
out = '\n'
return out.join(lines)
def chkcmd(cmd):
[rc, out, err] = runcmd(cmd)
out = out.rstrip()
err = err.rstrip()
if (rc):
halt("Error, command <%s> get rc <%s> output <%s> error <%s>"
% (cmd, rc, out, err))
return out
def runcmd(cmd):
cmd.rstrip()
log("CMD: %s" % (cmd))
if (re.search("2>&1", str(cmd))):
cmd = cmd + " 2>&1"
p = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if PYTHON3:
try:
[out, err] = p.communicate(timeout = int(conf["ttime"]) + timerWindow)
except subprocess.TimeoutExpired:
killProcess(p)
[out, err] = p.communicate()
rc = p.wait()
strout = ''.join(chr(x) for x in out)
return [rc, strout, err]
else:
timer = threading.Timer(int(conf["ttime"]) + timerWindow, killProcess, [p])
try:
timer.start()
[out, err] = p.communicate()
finally:
timer.cancel()
rc = p.wait()
return [rc, out, err]
def killProcess(process):
log("Warning: test command not completing within (ttime + timerWindow), killing the subprocess")
process.kill()
def killer(node, string):
runcmd("%s %s killall -r .*%s.*" % (ssh, node, string))
# ========== main ==========
# Obtain command line options
conf = {'server': '', 'client': '', 'test': '', 'ttime': '', 'buffsize': '',
'socksize': '', 'receiverThr': '', 'workerThr': '', 'testerThr': '',
'rebuild': '', 'directory': '', 'rdmaPorts': '', 'debugLevel': ''}
try:
opts, args = getopt.getopt(
sys.argv[1:], "hs:c:n:t:l:b:k:R:W:T:rd:p:v",
["help", "server=", "client=", "test=", "testTime=", "buffsize=",
"socksize=", "nReciverThr=", "nWorkerThr=", "nTesterThr=", "rebuild",
"directory=", "rdmaPorts=", "debugLevel"])
except getopt.GetoptError:
shortUsage()
sys.exit(1)
for op, value in opts:
if op in ("-h", "--help"):
longUsage()
sys.exit(0)
elif op in ("-s", "--server"):
conf["server"] = value.split(",")
elif op in ("-c", "--client"):
conf["client"] = value.split(",")
elif op in ("-t", "--test"):
conf["test"] = value.split(",")
elif op in ("-l", "--testTime"):
conf["ttime"] = value
elif op in ("-b", "--buffsize"):
conf["buffsize"] = value
elif op in ("-k", "--socksize"):
conf["socksize"] = value
elif op in ("-R", "--nReciverThr"):
conf["receiverThr"] = value
elif op in ("-W", "--nWorkerThr"):
conf["workerThr"] = value
elif op in ("-T", "--nTesterThr"):
conf["testerThr"] = value
elif op in ("-r", "--rebuild"):
conf["rebuild"] = True
elif op in ("-d", "--directory"):
nsdperfPath = value
elif op in ("-p", "--rdmaPorts"):
try:
conf["rdmaPorts"] = json.loads(str(value))
except Exception as e:
log("I get non-json format --rdmaPorts input: <%s>" % value)
log("Set it to be the RDMA ports for all nodes")
rdmaPorts = {}
for node in conf["server"]:
rdmaPorts[node] = value
for node in conf["client"]:
rdmaPorts[node] = value
conf["rdmaPorts"] = rdmaPorts
elif op in ("-v", "--debugLevel"):
conf["debugLevel"] = 3
else:
log("Error: Unknown option %s" % (op))
shortUsage()
sys.exit(1)
# process input arguments
processArgs()
# global variables that needs processing based on input args
nsdperfCmdFile = "%s/nsdperfCmd" % (nsdperfPath)
nsdperfResultFile = "%s/nsdperfResult.json" % (nsdperfPath)
nsdperfexe = "%s/nsdperfexe" % (nsdperfPath)
# allNodes
allNodes = []
allNodes.extend(conf["server"])
allNodes.extend(conf["client"])
# localNode
localNode = getLocalNode(allNodes)
# netDev
netDev = getNodeDev(allNodes)
# create nsdperfexe executable on all nodes if needed
threads = []
for node in allNodes:
thr = threading.Thread(target=createExecutable, args=(node,))
thr.start()
threads.append(thr)
for thr in threads:
thr.join()
# delete old result file before test
runcmd("rm -rf %s" % (nsdperfResultFile))
# run test
runTest(conf["server"], conf["client"])
log("========== All tests completed, congratulations! ==========")
log("========== Test result with json format is in file <%s> ==========" %
(nsdperfResultFile))
|
StarcoderdataPython
|
394051
|
<reponame>Gornak40/sicily<gh_stars>0
from engine import *
from random import choice, randrange
from results import *
take = lambda arr: arr.pop(randrange(len(arr)))
redCards = CARDS.copy()
blackCards = CARDS.copy()
# traitors turn
redTraitor = take(redCards)
blackTraitor = take(blackCards)
redCards.append(blackTraitor)
blackCards.append(redTraitor)
# making hands
redHand = [take(redCards) for _ in range(3)]
blackHand = [take(blackCards) for _ in range(3)]
Game = Game()
# game process
while Game.redScore < 4 and Game.blackScore < 4:
Game.blackCard = take(blackHand)
blackHand.append(take(blackCards)) if blackCards else None
print('Gang Info:', Game.blackCard) if Game.redGangBoost else None
print(redHand)
redNum = int(input())
ind = [i for i in range(len(redHand)) if redHand[i].power == redNum][0]
Game.redCard = redHand.pop(ind)
redHand.append(take(redCards)) if redCards else None
fight(Game)
print('Scores:', Game.redScore, Game.blackScore)
print('HoldScore:', Game.holdScore)
# print('GangBoosts:', Game.redGangBoost, Game.blackGangBoost)
# print('Boosts:', Game.redBoost, Game.blackBoost)
print()
# results
print('{} PLAYER IS THE WINNER'.format('RED' if Game.redScore > Game.blackScore else 'BLACK'))
|
StarcoderdataPython
|
9717752
|
print('-=' * 10, '<NAME>', '-=' * 10)
preco = float(input('Preço das Compras: R$ '))
print('''[1] á vista Dinheiro/cheque
[2] á vista cartão \n[3] 2x no cartão \n[4] 3x no cartão.''')
escolha = int(input('Qual é a opção: '))
if escolha == 1:
valor = preco - (preco * 0.1)
print(f'O valor da sua compra é {preco}R$ mas com desconto de 10%, o valor será {valor}R$.')
elif escolha == 2:
valor = preco - (preco * 0.05)
print(f'O valor da sua compra é {preco}R$ msa com desconto de 5%, o valor será {valor}R$.')
elif escolha == 3:
print(f'O valor da sua compra é {preco}R$R.')
elif escolha == 4:
parcela = int(input('Quantas parcelas? '))
valor = preco + (preco * 0.2)
print(f'Sua compra será parcelada em {parcela}x de {valor / parcela:.2f} com Juros.')
print(f'A sua compra de R${preco} vai custar R${valor} no final.')
else:
print('Opção inválida.')
|
StarcoderdataPython
|
5141909
|
'''
An example of solving global optimization problem
with guaranteed precision |f-f*| < fTol
'''
from numpy import zeros
from FuncDesigner import *
from openopt import *
a, b, c = oovars(3) # create 3 variables
d = oovars(4) # create 4 variables in a single vector
# some oofuns
f1 = cos(5*a) + 0.2*(b-0.2)**2 + exp(4*abs(c-0.9))
f2 = 0.05*sum(sin(d-0.1*(a+b+c))) + 3 * abs(d[0] - 0.2)
# objective function:
F = f1 + f2 + 4 * abs(d[2] - 0.2)
startPoint = {a:0.5, b:0.50123, c:0.5, d: zeros(4)}
# set box-bound domain:
constraints = [a>0, a<1, b>0, b<1, c>0, c<1, d>-1, d<1, d[3] < 0.5]
# set some general constraints:
constraints += [
(a*b + sin(c) < 0.5)(tol=1e-5),
d < cos(a) + 0.5, # default tol 10^-6
cos(d[0]) +a < sin(d[3]) + b,
(d[1] + c == 0.7)(tol=1e-3)
]
# choose required objective function tolerance:
# |f-f*| < fTol, where f* is objective function value in optimal point
fTol = 0.0005
solver='interalg'
# another global solver to compare (it cannot handle required tolerance fTol)
#solver=oosolver('de', iprint=10, maxFunEvals = 10000, maxIter = 1500)
# or this solver with some non-default parameters:
#solver=oosolver('interalg', fStart = 5.56, maxIter = 1000,maxNodes = 1000000, maxActiveNodes = 15)
p = GLP(F, startPoint, fTol = fTol, constraints = constraints, dataHandling='raw')
r = p.minimize(solver, iprint = 100)
print(r(a, b, c, d))
'''
------------------------- OpenOpt 0.37 -------------------------
solver: interalg problem: unnamed type: GLP
iter objFunVal log10(MaxResidual/ConTol)
0 5.540e+00 6.00
OpenOpt info: Solution with required tolerance 5.0e-04
is guarantied (obtained precision: 4.9e-04)
100 5.558e+00 -0.12
istop: 1000 (solution has been obtained)
Solver: Time Elapsed = 6.06 CPU Time Elapsed = 6.04
objFunValue: 5.5583354 (feasible, max(residuals/requiredTolerances) = 0.754547)
[7.62939453125e-06, 0.501708984375, 0.52358627319335938, [0.199951171875, 0.1756591796875, 0.199951171875, 0.4989013671875]]
'''
|
StarcoderdataPython
|
8051927
|
<reponame>HKBU-HPML/gtopkssgd
# -*- coding: utf-8 -*-
from __future__ import print_function
def update_fontsize(ax, fontsize=12.):
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(fontsize)
def autolabel(rects, ax, label, rotation=90):
"""
Attach a text label above each bar displaying its height
"""
for rect in rects:
height = rect.get_y() + rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., 1.03*height,
label,
ha='center', va='bottom', rotation=rotation)
|
StarcoderdataPython
|
5092075
|
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from functools import reduce
from xosconfig import Config
def f7(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def elim_dups(backend_str):
strs = backend_str.split(" // ")
strs2 = f7(strs)
return " // ".join(strs2)
def deepgetattr(obj, attr):
return reduce(getattr, attr.split("."), obj)
def obj_class_name(obj):
return getattr(obj, "model_name", obj.__class__.__name__)
class InnocuousException(Exception):
pass
class DeferredException(Exception):
pass
class FailedDependency(Exception):
pass
class SyncStep(object):
""" An XOS Sync step.
Attributes:
psmodel Model name the step synchronizes
dependencies list of names of models that must be synchronized first if the current model depends on them
"""
slow = False
def get_prop(self, prop):
# NOTE config_dir is never define, is this used?
sync_config_dir = Config.get("config_dir")
prop_config_path = "/".join(sync_config_dir, self.name, prop)
return open(prop_config_path).read().rstrip()
def __init__(self, **args):
"""Initialize a sync step
Keyword arguments:
model_accessor: class used to access models
driver: used by openstack synchronizer (DEPRECATED)
error_map: used by openstack synchronizer (DEPRECATED)
"""
self.model_accessor = args.get("model_accessor")
self.driver = args.get("driver")
self.error_map = args.get("error_map")
assert self.model_accessor is not None
try:
self.soft_deadline = int(self.get_prop("soft_deadline_seconds"))
except BaseException:
self.soft_deadline = 5 # 5 seconds
if "log" in args:
self.log = args.get("log")
return
@property
def observes_classes(self):
""" Return a list of classes that this syncstep observes. The "observes" class member can be either a list of
items or a single item. Those items may be either classes or names of classes. This function always returns
a list of classes.
"""
if not self.observes:
return []
if isinstance(self.observes, list):
observes = self.observes
else:
observes = [self.observes]
result = []
for class_or_name in observes:
if isinstance(class_or_name, str):
result.append(self.model_accessor.get_model_class(class_or_name))
else:
result.append(class_or_name)
return result
def fetch_pending(self, deletion=False):
# This is the most common implementation of fetch_pending
# Steps should override it if they have their own logic
# for figuring out what objects are outstanding.
return self.model_accessor.fetch_pending(self.observes_classes, deletion)
def sync_record(self, o):
self.log.debug("In abstract sync record", **o.tologdict())
# This method should be overridden by the service
def delete_record(self, o):
self.log.debug("In abstract delete record", **o.tologdict())
# This method should be overridden by the service
|
StarcoderdataPython
|
12810803
|
"""Tests for the once bitten strategy."""
import random
import axelrod
from .test_player import TestPlayer
C, D = axelrod.Action.C, axelrod.Action.D
class TestOnceBitten(TestPlayer):
name = "<NAME>"
player = axelrod.OnceBitten
expected_classifier = {
"memory_depth": 12,
"stochastic": False,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
"""If opponent defects at any point then the player will defect
forever."""
# Become grudged if the opponent defects twice in a row
opponent = axelrod.MockPlayer([C, C, C, D])
actions = [(C, C), (C, C), (C, C), (C, D), (C, C)]
self.versus_test(
opponent=opponent,
expected_actions=actions,
attrs={"grudged": False, "grudge_memory": 0},
)
opponent = axelrod.MockPlayer([C, C, C, D, D, D])
actions = [
(C, C),
(C, C),
(C, C),
(C, D),
(C, D),
(D, D),
(D, C),
(D, C),
(D, C),
(D, D),
(D, D),
]
self.versus_test(
opponent=opponent,
expected_actions=actions,
attrs={"grudged": True, "grudge_memory": 5},
)
# After 10 rounds of being grudged: forgives
opponent = axelrod.MockPlayer([C, D, D, C] + [C] * 10)
actions = [(C, C), (C, D), (C, D), (D, C)] + [(D, C)] * 10 + [(C, C)]
self.versus_test(
opponent=opponent,
expected_actions=actions,
attrs={"grudged": False, "grudge_memory": 0},
)
def test_reset(self):
"""Check that grudged gets reset properly"""
p1 = self.player()
p2 = axelrod.Defector()
p1.play(p2)
p1.play(p2)
p1.play(p2)
self.assertTrue(p1.grudged)
p1.reset()
self.assertFalse(p1.grudged)
class TestFoolMeOnce(TestPlayer):
name = "Fool Me Once"
player = axelrod.FoolMeOnce
expected_classifier = {
"memory_depth": float("inf"), # Long memory
"stochastic": False,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# If opponent defects more than once, defect forever
actions = [(C, C)] * 10
self.versus_test(opponent=axelrod.Cooperator(), expected_actions=actions)
opponent = axelrod.MockPlayer([D] + [C] * 9)
actions = [(C, D)] + [(C, C)] * 9
self.versus_test(opponent=opponent, expected_actions=actions)
actions = [(C, D)] * 2 + [(D, D)] * 8
self.versus_test(opponent=axelrod.Defector(), expected_actions=actions)
opponent = axelrod.MockPlayer([D, D] + [C] * 9)
actions = [(C, D)] * 2 + [(D, C)] * 8
self.versus_test(opponent=opponent, expected_actions=actions)
class TestForgetfulFoolMeOnce(TestPlayer):
name = "Forgetful Fool Me Once: 0.05"
player = axelrod.ForgetfulFoolMeOnce
expected_classifier = {
"memory_depth": float("inf"), # Long memory
"stochastic": True,
"makes_use_of": set(),
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# Test that will forgive one D but will grudge after 2 Ds, randomly
# forgets count.
actions = [(C, C), (C, D), (C, C), (C, D), (D, C)]
self.versus_test(
opponent=axelrod.Alternator(),
expected_actions=actions,
seed=2,
attrs={"D_count": 2},
)
# Sometime eventually forget count:
actions = [(C, D), (C, D)] + [(D, D)] * 18 + [(C, D)]
self.versus_test(
opponent=axelrod.Defector(),
expected_actions=actions,
seed=2,
attrs={"D_count": 0},
)
|
StarcoderdataPython
|
4278
|
<filename>Evaluation/batch_detection.py
'''
Autor: <NAME>
Start data: 15th May 2016
purpose: of this file is read frame level predictions and process them to produce a label per video
'''
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import pickle
import os
import time,json
import pylab as plt
from eval_detection import ANETdetection
import scipy.io as sio
#######baseDir = "/mnt/sun-alpha/actnet/";
baseDir = "/data/shared/solar-machines/actnet/";
#baseDir = "/mnt/solar-machines/actnet/";
########imgDir = "/mnt/sun-alpha/actnet/rgb-images/";
######## imgDir = "/mnt/DATADISK2/ss-workspace/actnet/rgb-images/";
annotPklFile = "../Evaluation/data/actNet200-V1-3.pkl"
def getscore(ground_truth_filename, prediction_filename,
tiou_thr=0.5,subset='validation', verbose=True, check_status=True):
anet_detection = ANETdetection(ground_truth_filename, prediction_filename,
subset=subset, tiou_thr=tiou_thr,
verbose=verbose, check_status=True)
ap = anet_detection.evaluate()
return ap
def saveAPs():
K = 5;
subset = 'validation';#,'testing']:
featType = 'IMS-MBH'
# savename = '{}data/predictions-{}-{}.pkl'.format(baseDir,subset,featType)
# with open(savename,'r') as f:
# data = pickle.load(f)
outfilename = '{}results/classification/{}-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3))
gtfiile = 'data/activity_net.v1-3.min.json'
ap = getscore(gtfiile,outfilename,top_k=1)
print ap
print np.mean(ap)
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'w') as f:
pickle.dump(ap,f)
def plotAPs():
K = 1;
subset = 'validation';#,'testing']:
aps = [];
count = 0;
colors = ['red','green','blue']
for featType in ['IMS-MBH','IMS','MBH']:
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'r') as f:
ap = pickle.load(f)
ind = np.arange(count,600+count,3)
plt.bar(ind,ap,width=0.5,color=colors[count])
count += 1
plt.show()
def evalAll():
K = 10;
subset = 'validation';#,'testing']:
gtfiile = 'data/activity_net.v1-3.min.json'
result = []; count = 0;
featType = 'C3D-BIN-BOOST-LONG'
# outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-001')
for alpha in [1,3,5,]:
outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-{}'.format(str(int(alpha*10)).zfill(3)))
print 'Evaluating results from ',outfilename
for tioth in [0.5,0.4,0.3,0.2,0.1]:
ap = getscore(gtfiile,outfilename,tiou_thr=tioth)
result.append([alpha,tioth,np.mean(ap)])
result = np.aaarray(result)
sio.savemat('result-{}.mat'.format(featType),mdict={'ap':ap})
if __name__=="__main__":
#processOnePredictions()
# saveAps()
# plotmAPs()
evalALL()
|
StarcoderdataPython
|
6509040
|
<reponame>tencentcloudapi-test/tencentcloud-cli
# -*- coding: utf-8 -*-
import os
import sys
import six
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.dbbrain.v20210527 import dbbrain_client as dbbrain_client_v20210527
from tencentcloud.dbbrain.v20210527 import models as models_v20210527
from tencentcloud.dbbrain.v20191016 import dbbrain_client as dbbrain_client_v20191016
from tencentcloud.dbbrain.v20191016 import models as models_v20191016
from jmespath import search
import time
def doDescribeSecurityAuditLogDownloadUrls(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityAuditLogDownloadUrlsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityAuditLogDownloadUrls(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMailProfile(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMailProfileRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMailProfile(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSchedulerMailProfile(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSchedulerMailProfileRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSchedulerMailProfile(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTopSpaceSchemas(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTopSpaceSchemasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTopSpaceSchemas(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMySqlProcessList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMySqlProcessListRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMySqlProcessList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMailProfile(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMailProfileRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateMailProfile(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBDiagReportTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBDiagReportTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDBDiagReportTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAllUserGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAllUserGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeAllUserGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBDiagEvents(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBDiagEventsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDBDiagEvents(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSlowLogTimeSeriesStats(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSlowLogTimeSeriesStatsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSlowLogTimeSeriesStats(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSlowLogUserHostStats(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSlowLogUserHostStatsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSlowLogUserHostStats(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTopSpaceTables(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTopSpaceTablesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTopSpaceTables(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBSpaceStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBSpaceStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDBSpaceStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxySessionKillTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxySessionKillTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateProxySessionKillTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHealthScore(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHealthScoreRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeHealthScore(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityAuditLogExportTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityAuditLogExportTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateSecurityAuditLogExportTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityAuditLogExportTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityAuditLogExportTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteSecurityAuditLogExportTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSlowLogTopSqls(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSlowLogTopSqlsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSlowLogTopSqls(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityAuditLogExportTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityAuditLogExportTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeSecurityAuditLogExportTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAllUserContact(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAllUserContactRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeAllUserContact(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBDiagEvent(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBDiagEventRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDBDiagEvent(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBDiagHistory(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBDiagHistoryRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDBDiagHistory(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDBDiagReportTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDBDiagReportTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateDBDiagReportTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDiagDBInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDiagDBInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDiagDBInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddUserContact(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddUserContactRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AddUserContact(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTopSpaceSchemaTimeSeries(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTopSpaceSchemaTimeSeriesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTopSpaceSchemaTimeSeries(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateKillTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateKillTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateKillTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUserSqlAdvice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUserSqlAdviceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeUserSqlAdvice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTopSpaceTableTimeSeries(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTopSpaceTableTimeSeriesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTopSpaceTableTimeSeries(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDiagDBInstanceConf(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDiagDBInstanceConfRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyDiagDBInstanceConf(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doKillMySqlThreads(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.KillMySqlThreadsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.KillMySqlThreads(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDBDiagReportUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDBDiagReportUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateDBDiagReportUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxySessionKillTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DbbrainClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxySessionKillTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeProxySessionKillTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20210527": dbbrain_client_v20210527,
"v20191016": dbbrain_client_v20191016,
}
MODELS_MAP = {
"v20210527": models_v20210527,
"v20191016": models_v20191016,
}
ACTION_MAP = {
"DescribeSecurityAuditLogDownloadUrls": doDescribeSecurityAuditLogDownloadUrls,
"DescribeMailProfile": doDescribeMailProfile,
"CreateSchedulerMailProfile": doCreateSchedulerMailProfile,
"DescribeTopSpaceSchemas": doDescribeTopSpaceSchemas,
"DescribeMySqlProcessList": doDescribeMySqlProcessList,
"CreateMailProfile": doCreateMailProfile,
"DescribeDBDiagReportTasks": doDescribeDBDiagReportTasks,
"DescribeAllUserGroup": doDescribeAllUserGroup,
"DescribeDBDiagEvents": doDescribeDBDiagEvents,
"DescribeSlowLogTimeSeriesStats": doDescribeSlowLogTimeSeriesStats,
"DescribeSlowLogUserHostStats": doDescribeSlowLogUserHostStats,
"DescribeTopSpaceTables": doDescribeTopSpaceTables,
"DescribeDBSpaceStatus": doDescribeDBSpaceStatus,
"CreateProxySessionKillTask": doCreateProxySessionKillTask,
"DescribeHealthScore": doDescribeHealthScore,
"CreateSecurityAuditLogExportTask": doCreateSecurityAuditLogExportTask,
"DeleteSecurityAuditLogExportTasks": doDeleteSecurityAuditLogExportTasks,
"DescribeSlowLogTopSqls": doDescribeSlowLogTopSqls,
"DescribeSecurityAuditLogExportTasks": doDescribeSecurityAuditLogExportTasks,
"DescribeAllUserContact": doDescribeAllUserContact,
"DescribeDBDiagEvent": doDescribeDBDiagEvent,
"DescribeDBDiagHistory": doDescribeDBDiagHistory,
"CreateDBDiagReportTask": doCreateDBDiagReportTask,
"DescribeDiagDBInstances": doDescribeDiagDBInstances,
"AddUserContact": doAddUserContact,
"DescribeTopSpaceSchemaTimeSeries": doDescribeTopSpaceSchemaTimeSeries,
"CreateKillTask": doCreateKillTask,
"DescribeUserSqlAdvice": doDescribeUserSqlAdvice,
"DescribeTopSpaceTableTimeSeries": doDescribeTopSpaceTableTimeSeries,
"ModifyDiagDBInstanceConf": doModifyDiagDBInstanceConf,
"KillMySqlThreads": doKillMySqlThreads,
"CreateDBDiagReportUrl": doCreateDBDiagReportUrl,
"DescribeProxySessionKillTasks": doDescribeProxySessionKillTasks,
}
AVAILABLE_VERSION_LIST = [
"v20210527",
"v20191016",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["dbbrain"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["dbbrain"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
|
StarcoderdataPython
|
4864285
|
<gh_stars>1-10
"""This file contains functionality for sending Halo data into SQS."""
from haloevents import HaloEvents
from haloscans import HaloScans
from utility import Utility
import boto3
import json
class Sender(object):
"""Initialize with an instance of ``halosqs.ConfigHelper``."""
def __init__(self, config):
self.config = config
self.halo_stream = self.build_halo_stream()
self.sqs = boto3.client('sqs')
return
def build_halo_stream(self):
"""Create Halo data streamer based on settings in ``self.config``."""
if self.config.halo_module == "events":
stream = HaloEvents(self.config.halo_key,
self.config.halo_secret,
api_host=self.config.halo_api_hostname,
start_timestamp=self.config.start_time,
integration_name=self.config.integration_name)
else:
search_params = {"since": self.config.start_time,
"sort_by": "created_at.asc"}
search_params.update(self.config.search_params)
stream = HaloScans(self.config.halo_key,
self.config.halo_secret,
api_host=self.config.halo_api_hostname,
search_params=search_params,
start_timestamp=self.config.start_time,
integration_name=self.config.integration_name,
report_performance=True,
scan_timeout=self.config.scan_timeout)
return stream
def print_start_message(self):
print("Starting sender.\n Queue: %s\n Start time: %s\n Module: %s" %
(self.config.sqs_queue_url, self.config.start_time,
self.config.halo_module))
def run(self):
"""Send every item produced by the configured Halo stream to SQS."""
self.print_start_message()
for item in self.halo_stream:
self.send_item_to_sqs(item)
return
def send_item_to_sqs(self, item):
"""Send one item to SQS.
Args:
item(dict): Halo event or scan.
"""
message_attributes = {"HaloObjectType":
{"DataType": "String",
"StringValue": self.config.halo_module}}
self.sqs.send_message(QueueUrl=self.config.sqs_queue_url,
MessageAttributes=message_attributes,
MessageBody=Utility.pack_message(
json.dumps(item))
)
return
|
StarcoderdataPython
|
3573492
|
<filename>MaoCuHuiSpider/bsdemo.py
# coding=utf-8
"""
bs4 测试:看bs能否同时解析多个HTML
测试结果:只能解析一个HTML DOM树
"""
from bs4 import BeautifulSoup
html = """
<html>
<body>
<div class="title">标题1</div>
<div class="title">标题2</div>
</body>
</html>
<html>
<body>
<div class="title">标题3</div>
<div class="title">标题4</div>
</body>
</html>
"""
def main():
soup = BeautifulSoup(html,"lxml")
titles = soup.select(".title")
for title in titles:
print(title.string)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
1682556
|
print("Hello, Python!!")
A = 23
B = "Python Bangladesh!!"
print(A)
print(B)
print(A,B)
# this is python singel line Comment
"""
This is a comment
written in
more than just one line
"""
#if you run this file with command Prompt type "python Syntax.py"
|
StarcoderdataPython
|
4884900
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'MiracleWong'
try:
f = open('/Users/miraclewong/test.txt', 'r')
print(f.read())
finally:
if f:
f.close()
with open('/Users/miraclewong/test.txt', 'r') as f:
for line in f.readlines():
print(line.strip())
# 读取非UTF-8编码
# f = open('/Users/michael/gbk.txt', 'r', encoding='gbk', errors='ignore')
# 写文件
with open('/Users/miraclewong/writes.txt', 'w') as f:
f.write('Hello, world!')
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.