hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
248
max_stars_repo_name
stringlengths
5
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
248
max_issues_repo_name
stringlengths
5
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
248
max_forks_repo_name
stringlengths
5
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
2.06M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.03M
alphanum_fraction
float64
0
1
count_classes
int64
0
1.6M
score_classes
float64
0
1
count_generators
int64
0
651k
score_generators
float64
0
1
count_decorators
int64
0
990k
score_decorators
float64
0
1
count_async_functions
int64
0
235k
score_async_functions
float64
0
1
count_documentation
int64
0
1.04M
score_documentation
float64
0
1
a3c289b2ddb7ec4ef9412f5ae94e7553200e0202
4,668
py
Python
mojoco trivial/mujocoSim/UR5/simple_example/Mujoco_py_example.py
garlicbutter/Jonathan-Tom
c1696f0a94da46911b3566a3d4f49791e877373f
[ "MIT" ]
2
2021-10-05T04:31:19.000Z
2021-10-05T04:31:26.000Z
mojoco trivial/mujocoSim/UR5/simple_example/Mujoco_py_example.py
garlicbutter/Tom-Jonathan
c1696f0a94da46911b3566a3d4f49791e877373f
[ "MIT" ]
null
null
null
mojoco trivial/mujocoSim/UR5/simple_example/Mujoco_py_example.py
garlicbutter/Tom-Jonathan
c1696f0a94da46911b3566a3d4f49791e877373f
[ "MIT" ]
null
null
null
import numpy as np import mujoco_py as mj from mujoco_py_renderer import SimulationError, XMLError, MujocoPyRenderer from mujoco_py import (MjSim, load_model_from_xml,functions, load_model_from_path, MjSimState, ignore_mujoco_warnings, load_model_from_mjb) from matplotlib import pyplot as plt import time xml = """ <mujoco model="example"> <compiler coordinate="global"/> <default> <geom rgba=".8 .6 .4 1"/> </default> <asset> <texture type="skybox" builtin="gradient" rgb1="1 1 1" rgb2=".6 .8 1" width="256" height="256"/> </asset> <worldbody> <light pos="0 1 1" dir="0 -1 -1" diffuse="1 1 1"/> <geom name="floor" pos="0 0 0" rgba="0.8 0.9 0.8 1" size="10 10 10" type="plane"/> <body> <site name="world" size="0.1" pos="0 0 0" /> <geom name="first_pole" type="capsule" fromto="0 0 0 0 0 0.5" size="0.04"/> <joint name='a' type="hinge" pos="0 0 0" axis="0 0 1" /> <body name="second_pole"> <inertial pos="0 0 0" mass="0.00000001" diaginertia="1e-008 1e-008 1e-008" /> <geom type="capsule" fromto="0 0 0.5 0.5 0 0.5" size="0.04" name="second_pole"/> <joint name='b' type="hinge" pos="0 0 0.5" axis="0 1 0"/> <body name='third_pole'> <inertial pos="0 0 0" mass="0.00000001" diaginertia="1e-008 1e-008 1e-008" /> <geom type="capsule" fromto="0.5 0 0.5 1 0 0.5" size="0.04" name="third_pole"/> <joint name='c' type="hinge" pos="0.5 0 0.5" axis="0 1 0"/> <site name="target" size="0.1" pos="1 0 0.5" /> <body name="mass"> <inertial pos="1 0 0.5" mass="1e-2" diaginertia="1e-008 1e-008 1e-008" /> <geom type="sphere" pos="1 0 0.5" size="0.2" name="mass"/> </body> </body> </body> </body> </worldbody> <actuator> <motor joint="a"/> <motor joint="b"/> <motor joint="c"/> </actuator> </mujoco> """ model = load_model_from_xml(xml) sim = MjSim(model) viewer = MujocoPyRenderer(sim) sim.reset() # After reset jacobians are all zeros sim.forward() target_jacp = np.zeros(3 * sim.model.nv) target_jacr= np.zeros(3 * sim.model.nv) F=np.array([0,0,-9.81*1e-2,0,0,0]).T #np.testing.assert_allclose(target_jacp, np.zeros(3 * sim.model.nv)) # After first forward, jacobians are real #sim.forward() K_diag=2000 C_diag=100 A_diag=1e-3 K=np.identity(3)*K_diag C=np.identity(3)*C_diag A=np.identity(3)*A_diag #K_diag=0.3 #C_diag=0.05 for i in range(3): K[i, i]=K_diag C[i,i]=C_diag A[i, i] = A_diag x_intial=sim.data.site_xpos[1] print(x_intial) x_desired=np.array([0,1,0.3]) v_intial=sim.data.site_xvelp[1] v_desired=np.array([0,0,0]) a_desired=np.array([0,0,0]) a_intial=np.array([0,0,0]) dt=sim.model.opt.timestep #sim.data.get_site_jacp('target', jacp=target_jacp) # Should be unchanged after steps (zero action) graph=[] for _ in range(100000): F[:3]=np.dot(K,x_desired-x_intial)+np.dot(C,v_desired-v_intial)+np.dot(A,a_desired-a_intial) H = np.zeros(sim.model.nv* sim.model.nv) functions.mj_fullM(sim.model, H, sim.data.qM) sim.data.get_site_jacp('target', jacp=target_jacp) sim.data.get_site_jacr('target', jacr=target_jacr) J_L = target_jacp.reshape((3, sim.model.nv)) J_A = target_jacr.reshape((3, sim.model.nv)) J = np.concatenate((J_L, J_A), axis=0) H_L =np.dot(np.linalg.pinv(J_L.T),np.dot(H.reshape(sim.model.nv, sim.model.nv), np.linalg.pinv(J_L))) H_all=np.dot(np.linalg.pinv(J.T),np.dot(H.reshape(sim.model.nv, sim.model.nv), np.linalg.pinv(J))) #F_a=np.dot(A,0.3-sim.data.qacc) #action = np.dot(J_L.T, np.dot(H_L, F[:3]))+sim.data.qfrc_bias action = sim.data.qfrc_bias+np.dot(H.reshape(3,3),np.dot(J_L.T,F[:3])) #print(action) #action = np.dot(J.T, F) sim.data.ctrl[:] = action sim.step() sim.forward() #print(np.max(action)) #print(sim.data.qacc) viewer.render() x_intial = sim.data.site_xpos[1] a_intial=(v_intial-sim.data.site_xvelp[1])/dt print(a_intial) v_intial = sim.data.site_xvelp[1] normal=np.linalg.norm(x_intial-x_desired) #print(normal) if normal<0.1: print("in") if x_desired[0]==0: x_desired = np.array([-1, 0, 0.5]) elif x_desired[0]==1: x_desired = np.array([0, 1, 0.3]) elif x_desired[0] == -1: x_desired = np.array([1, 0, 0.5]) graph.append(np.abs(x_intial-x_desired)) # sim.forward() print("the desired is {} and the intial is{}".format(x_desired,x_intial)) plt.plot(graph) plt.show()
29.923077
105
0.610111
0
0
0
0
0
0
0
0
2,184
0.467866
a3c2ca7e8eeb8a5b7daf690508f0da4c87ebd47d
3,323
py
Python
evaluation/wordpress/pull_docker_images_from_private_registry.py
seveirbian/gear-old
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
[ "Apache-2.0" ]
null
null
null
evaluation/wordpress/pull_docker_images_from_private_registry.py
seveirbian/gear-old
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
[ "Apache-2.0" ]
null
null
null
evaluation/wordpress/pull_docker_images_from_private_registry.py
seveirbian/gear-old
8d3529a9bf42e652a9d7475c9d14e9a6afc69a76
[ "Apache-2.0" ]
null
null
null
import sys # package need to be installed, pip install docker import docker import time import yaml import os import xlwt auto = False private_registry = "202.114.10.146:9999/" # result result = [["tag", "finishTime", "size", "data"], ] class Puller: def __init__(self, images): self.images_to_pull = images def check(self): # detect whether the file exists, if true, delete it if os.path.exists("./images_pulled.txt"): os.remove("./images_pulled.txt") def pull(self): self.check() client = docker.from_env() # if don't give a tag, then all image under this registry will be pulled repos = self.images_to_pull[0]["repo"] for repo in repos: tags = self.images_to_pull[1][repo] for tag in tags: print "start pulling: ", private_registry+repo, ":", tag # get present time startTime = time.time() # get present net data cnetdata = get_net_data() # pull images try: image_pulled = client.images.pull(repository=private_registry+repo, tag=str(tag)) # print pull time finishTime = time.time() - startTime print "finished in " , finishTime, "s" # get image's size size = image_pulled.attrs[u'Size'] / 1000000.0 print "image size: ", size data = get_net_data() - cnetdata print "pull data: ", data print "\n" # record the image and its pulling time result.append([tag, finishTime, size, data]) except docker.errors.NotFound: print private_registry+repo + " not found...\n\n" except docker.errors.ImageNotFound: print private_registry+repo + " image not fount...\n\n" if auto != True: raw_input("Next?") class Generator: def __init__(self, profilePath=""): self.profilePath = profilePath def generateFromProfile(self): if self.profilePath == "": print "Error: profile path is null" with open(self.profilePath, 'r') as f: self.images = yaml.load(f, Loader=yaml.FullLoader) return self.images def get_net_data(): netCard = "/proc/net/dev" fd = open(netCard, "r") for line in fd.readlines(): if line.find("enp0s3") >= 0: field = line.split() data = float(field[1]) / 1024.0 / 1024.0 fd.close() return data if __name__ == "__main__": if len(sys.argv) == 2: auto = True generator = Generator(os.path.split(os.path.realpath(__file__))[0]+"/image_versions.yaml") images = generator.generateFromProfile() puller = Puller(images) puller.pull() # create a workbook sheet workbook = xlwt.Workbook() sheet = workbook.add_sheet("run_time") for row in range(len(result)): for column in range(len(result[row])): sheet.write(row, column, result[row][column]) workbook.save(os.path.split(os.path.realpath(__file__))[0]+"/pull.xls")
27.46281
101
0.550707
2,223
0.668974
0
0
0
0
0
0
674
0.202829
a3c4634520b2ba72e01bed684e08b442a5657f9b
385
py
Python
jiminy/envs/vnc_wog.py
sibeshkar/jiminy
7754f86fb0f246e7d039ea0cbfd9950fcae4adfb
[ "MIT" ]
3
2020-03-16T13:50:40.000Z
2021-06-09T05:26:13.000Z
jiminy/envs/vnc_wog.py
sibeshkar/jiminy
7754f86fb0f246e7d039ea0cbfd9950fcae4adfb
[ "MIT" ]
null
null
null
jiminy/envs/vnc_wog.py
sibeshkar/jiminy
7754f86fb0f246e7d039ea0cbfd9950fcae4adfb
[ "MIT" ]
null
null
null
from jiminy.envs import vnc_env from jiminy.spaces import VNCActionSpace class WorldOfGooEnv(vnc_env.VNCEnv): def __init__(self): super(WorldOfGooEnv, self).__init__() # TODO: set action space screen shape to match # HACK: empty keys list fails for some weird reason, give it an 'a' self.action_space = VNCActionSpace(keys=['a'], buttonmasks=[1])
35
75
0.703896
309
0.802597
0
0
0
0
0
0
116
0.301299
a3c726cfaf4ab3b53d1df8bd6d6c24aef693e3ab
5,066
py
Python
fedml_api/standalone/federated_sgan/fedssgan_api.py
arj119/FedML
5b7c098659f3e61f9e44583965300d8d0829f7a8
[ "Apache-2.0" ]
null
null
null
fedml_api/standalone/federated_sgan/fedssgan_api.py
arj119/FedML
5b7c098659f3e61f9e44583965300d8d0829f7a8
[ "Apache-2.0" ]
null
null
null
fedml_api/standalone/federated_sgan/fedssgan_api.py
arj119/FedML
5b7c098659f3e61f9e44583965300d8d0829f7a8
[ "Apache-2.0" ]
null
null
null
import copy import logging import random from typing import List, Tuple import numpy as np import torch import wandb from torch.utils.data import ConcatDataset from fedml_api.standalone.fedavg.my_model_trainer import MyModelTrainer from fedml_api.standalone.federated_sgan.ac_gan_model_trainer import ACGANModelTrainer from fedml_api.standalone.federated_sgan.client import FedSSGANClient from fedml_api.standalone.federated_sgan.model_trainer import FedSSGANModelTrainer from fedml_api.standalone.utils.HeterogeneousModelBaseTrainerAPI import HeterogeneousModelBaseTrainerAPI class FedSSGANAPI(HeterogeneousModelBaseTrainerAPI): def __init__(self, dataset, device, args, adapter_model, client_models: List[Tuple[torch.nn.Module, int]]): """ Args: dataset: Dataset presplit into data loaders device: Device to run training on args: Additional args client_models: List of client models and their frequency participating (assuming a stateful algorithm for simplicity) """ super().__init__(dataset, device, args) self.global_model = MyModelTrainer(adapter_model) self._setup_clients(self.train_data_local_num_dict, self.train_data_local_dict, self.test_data_local_dict, client_models) self._plot_client_training_data_distribution() def _setup_clients(self, train_data_local_num_dict, train_data_local_dict, test_data_local_dict, client_models): logging.info("############setup_clients (START)#############") c_idx = 0 for local_model, freq in client_models: for i in range(freq): model_trainer = ACGANModelTrainer( copy.deepcopy(self.global_model.model), copy.deepcopy(local_model) ) c = FedSSGANClient(c_idx, train_data_local_dict[c_idx], test_data_local_dict[c_idx], train_data_local_num_dict[c_idx], self.test_global, self.args, self.device, model_trainer) c_idx += 1 self.client_list.append(c) logging.info("############setup_clients (END)#############") def train(self): logging.info('\n###############Pre-Training clients#############\n') for i, c in enumerate(self.client_list): logging.info(f'Pre=training client: {i}') c.pre_train() logging.info('###############Pre-Training clients (END)###########\n') unlabelled_synthesised_data = None w_global = self.global_model.get_model_params() for round_idx in range(self.args.comm_round): logging.info("################Communication round : {}".format(round_idx)) w_locals = [] synthesised_data_locals = [] client_synthesised_data_lens = {'round': round_idx} client: FedSSGANClient for idx, client in enumerate(self.client_list): # Update client synthetic datasets # client.set_synthetic_dataset(unlabelled_synthesised_data) # Local round w = client.train(copy.deepcopy(w_global), round_idx) # self.logger.info("local weights = " + str(w)) w_locals.append((client.get_sample_number(), copy.deepcopy(w))) # synthetic_data = client.generate_synthetic_dataset() # if synthetic_data is not None: # synthesised_data_locals.append(synthetic_data) # client_synthesised_data_lens[f'Client_{idx}: Synthetic Dataset Size'] = len(synthetic_data) # else: # client_synthesised_data_lens[f'Client_{idx}: Synthetic Dataset Size'] = 0 # # if len(synthesised_data_locals) > 0: # unlabelled_synthesised_data = ConcatDataset(synthesised_data_locals) # logging.info(f'\n Synthetic Unlabelled Dataset Size: {len(unlabelled_synthesised_data)}\n') # client_synthesised_data_lens['Total Synthetic Dataset Size'] = len(unlabelled_synthesised_data) # else: # unlabelled_synthesised_data = None # client_synthesised_data_lens['Total Synthetic Dataset Size'] = 0 # wandb.log(client_synthesised_data_lens) # update global weights w_global = self._aggregate(w_locals) self.global_model.set_model_params(w_global) # test results # at last round if round_idx == self.args.comm_round - 1: self._local_test_on_all_clients(round_idx) # per {frequency_of_the_test} round elif round_idx % self.args.frequency_of_the_test == 0: if self.args.dataset.startswith("stackoverflow"): self._local_test_on_validation_set(round_idx) else: self._local_test_on_all_clients(round_idx)
44.831858
129
0.627319
4,484
0.885116
0
0
0
0
0
0
1,644
0.324516
a3c78b4ed55d10de069695bce6f3d899ee02cc99
20,932
py
Python
pytorch-word2vec-master/csv.py
arjun-sai-krishnan/tamil-morpho-embeddings
a33bcb427d635dba3b1857f26ea7ab287e1a44c5
[ "MIT" ]
2
2021-04-11T18:25:16.000Z
2022-03-16T03:48:52.000Z
pytorch-word2vec-master/csv.py
arjun-sai-krishnan/tamil-morpho-embeddings
a33bcb427d635dba3b1857f26ea7ab287e1a44c5
[ "MIT" ]
null
null
null
pytorch-word2vec-master/csv.py
arjun-sai-krishnan/tamil-morpho-embeddings
a33bcb427d635dba3b1857f26ea7ab287e1a44c5
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import argparse from collections import Counter import pdb import pickle import re import sys import time import numpy as np import torch import torch.nn as nn from torch.autograd import Variable from torch import optim import torch.nn.functional as F import torch.multiprocessing as mp import data_producer from multiprocessing import set_start_method parser = argparse.ArgumentParser() parser.add_argument("--train", type=str, default="", help="training file") parser.add_argument("--vocab", type=str, default="", help="vocab pickle file") parser.add_argument("--save", type=str, default="csv.pth.tar", help="saved model filename") parser.add_argument("--size", type=int, default=300, help="word embedding dimension") parser.add_argument("--window", type=int, default=5, help="context window size") parser.add_argument("--sample", type=float, default=1e-5, help="subsample threshold") parser.add_argument("--negative", type=int, default=10, help="number of negative samples") parser.add_argument("--delta", type=float, default=0.15, help="create new sense for a type if similarity lower than this value.") parser.add_argument("--min_count", type=int, default=5, help="minimum frequency of a word") parser.add_argument("--processes", type=int, default=4, help="number of processes") parser.add_argument("--num_workers", type=int, default=6, help="number of workers for data processsing") parser.add_argument("--iter", type=int, default=3, help="number of iterations") parser.add_argument("--lr", type=float, default=-1.0, help="initial learning rate") parser.add_argument("--batch_size", type=int, default=100, help="(max) batch size") parser.add_argument("--cuda", action='store_true', default=False, help="enable cuda") parser.add_argument("--multi_proto", action='store_true', default=False, help="True: multi-prototype, False:single-prototype") MAX_SENT_LEN = 1000 # Build the vocabulary. def file_split(f, delim=' \t\n', bufsize=1024): prev = '' while True: s = f.read(bufsize) if not s: break tokens = re.split('['+delim+']{1,}', s) if len(tokens) > 1: yield prev + tokens[0] prev = tokens[-1] for x in tokens[1:-1]: yield x else: prev += s if prev: yield prev def build_vocab(args): vocab = Counter() word_count = 0 for word in file_split(open(args.train)): vocab[word] += 1 word_count += 1 if word_count % 10000 == 0: sys.stdout.write('%d\r' % len(vocab)) freq = {k:v for k,v in vocab.items() if v >= args.min_count} word_count = sum([freq[k] for k in freq]) word_list = sorted(freq, key=freq.get, reverse=True) word2idx = {} for i,w in enumerate(word_list): word2idx[w] = i print("Vocab size: %ld" % len(word2idx)) print("Words in train file: %ld" % word_count) vars(args)['vocab_size'] = len(word2idx) vars(args)['train_words'] = word_count return word2idx, word_list, freq class CSV(nn.Module): def __init__(self, args): super(CSV, self).__init__() self.global_embs = nn.Embedding(args.vocab_size+1, args.size, padding_idx=args.vocab_size, sparse=True) self.sense_embs = nn.Embedding(args.vocab_size*5, args.size, sparse=True) self.ctx_weight = torch.nn.Parameter(torch.ones(2*args.window, args.size)) self.word2sense = [ [i] for i in range(args.vocab_size) ] ''' word2sense = np.zeros((args.vocab_size, 5), dtype='int32') for i in range(args.vocab_size): word2sense[i, 0] = i self.word2sense = torch.nn.Parameter(torch.from_numpy(word2sense).int()) self.word_sense_cnts = torch.nn.Parameter(torch.ones((args.vocab_size,)).int()) ''' self.global_embs.weight.data.uniform_(-0.5/args.size, 0.5/args.size) self.sense_embs.weight.data.uniform_(-0.5/args.size, 0.5/args.size) self.n_senses = args.vocab_size self.sense_capacity = args.vocab_size*5 self.batch_size = args.batch_size self.size = args.size self.window = args.window self.negative = args.negative self.pad_idx = args.vocab_size def get_context_feats(self, ctx_type_indices): ctx_type_embs = self.global_embs(ctx_type_indices) return torch.sum(ctx_type_embs * self.ctx_weight, 1).cpu().data.numpy() def get_possible_sense_embs(self, type_indices, cuda=True): sense_indices = [] sense2idx = {} for type_id in type_indices: for s_id in self.word2sense[type_id]: if s_id not in sense2idx: sense2idx[s_id] = len(sense_indices) sense_indices.append( s_id ) sense_indices = np.array(sense_indices) if cuda: sense_embs = self.sense_embs(Variable(torch.LongTensor(sense_indices).cuda())) return sense2idx, sense_embs.cpu().data.numpy() else: sense_embs = self.sense_embs(Variable(torch.LongTensor(sense_indices))) return sense2idx, sense_embs.data.numpy() def forward(self, data): ctx_type_indices = data[:, 0:2*self.window] pos_sense_idx = data[:, 2*self.window+1] neg_sense_indices = data[:, 2*self.window+2:2*self.window+2+self.negative] neg_mask = data[:, 2*self.window+2+self.negative:].float() ctx_type_embs = self.global_embs(ctx_type_indices) pos_sense_embs = self.sense_embs(pos_sense_idx) neg_sense_embs = self.sense_embs(neg_sense_indices) ctx_feats = torch.sum(ctx_type_embs * self.ctx_weight, 1, keepdim=True) # Neg Log Likelihood pos_ips = torch.sum(ctx_feats[:,0,:] * pos_sense_embs, 1) pos_loss = torch.sum( -F.logsigmoid(torch.clamp(pos_ips,max=10,min=-10))) neg_ips = torch.bmm(neg_sense_embs, ctx_feats.permute(0,2,1))[:,:,0] neg_loss = torch.sum( -F.logsigmoid(torch.clamp(-neg_ips,max=10,min=-10)) * neg_mask ) return pos_loss + neg_loss # Initialize model. def init_net(args): if args.lr == -1.0: vars(args)['lr'] = 0.05 return CSV(args) def save_model(filename, model, args, word2idx): torch.save({ 'word2idx':word2idx, 'args':args, #'word2sense': model.word2sense, 'n_senses': model.n_senses, 'params': model.state_dict() }, filename) def load_model(filename): checkpoint = torch.load(filename) word2idx = checkpoint['word2idx'] args = checkpoint['args'] model = CSV(args) if args.cuda: model.cuda() model.global_embs.weight.data = checkpoint['params']['global_embs.weight'] model.sense_embs.weight.data = checkpoint['params']['sense_embs.weight'] model.ctx_weight.data = checkpoint['params']['ctx_weight'] model.word2sense = checkpoint['word2sense'] #model.word2sense.data = checkpoint['params']['word2sense'] #model.word_sense_cnts.data = checkpoint['params']['word_sense_cnts'] model.n_senses = checkpoint['n_senses'] return model, word2idx # Training def train_process_sent_producer(p_id, data_queue, word_count_actual, word_list, word2idx, freq, args): n_proc = 1 if args.stage == 2 else args.processes N = 1 if args.stage == 2 else args.iter neg = 0 if args.stage == 2 else args.negative if args.negative > 0: table_ptr_val = data_producer.init_unigram_table(word_list, freq, args.train_words) train_file = open(args.train) file_pos = args.file_size * p_id // n_proc train_file.seek(file_pos, 0) while True: try: train_file.read(1) except UnicodeDecodeError: file_pos -= 1 train_file.seek(file_pos, 0) else: train_file.seek(file_pos, 0) break batch_count = 0 batch_placeholder = np.zeros((args.batch_size, 2*args.window+2+2*neg), 'int64') for it in range(N): train_file.seek(file_pos, 0) last_word_cnt = 0 word_cnt = 0 sentence = [] prev = '' eof = False while True: if eof or train_file.tell() > file_pos + args.file_size / n_proc: break while True: s = train_file.read(1) if not s: eof = True break elif s == ' ' or s == '\t': if prev in word2idx: sentence.append(prev) prev = '' if len(sentence) >= MAX_SENT_LEN: break elif s == '\n': if prev in word2idx: sentence.append(prev) prev = '' break else: prev += s if len(sentence) > 0: # subsampling sent_id = [] if args.sample != 0: sent_len = len(sentence) i = 0 while i < sent_len: word = sentence[i] f = freq[word] / args.train_words pb = (np.sqrt(f / args.sample) + 1) * args.sample / f; if pb > np.random.random_sample(): sent_id.append( word2idx[word] ) i += 1 if len(sent_id) < 2: word_cnt += len(sentence) sentence.clear() continue next_random = (2**24) * np.random.randint(0, 2**24) + np.random.randint(0, 2**24) chunk = data_producer.cbow_producer(sent_id, len(sent_id), table_ptr_val, args.window, neg, args.vocab_size, args.batch_size, next_random) chunk_pos = 0 while chunk_pos < chunk.shape[0]: remain_space = args.batch_size - batch_count remain_chunk = chunk.shape[0] - chunk_pos if remain_chunk < remain_space: take_from_chunk = remain_chunk else: take_from_chunk = remain_space batch_placeholder[batch_count:batch_count+take_from_chunk, :] = chunk[chunk_pos:chunk_pos+take_from_chunk, :] batch_count += take_from_chunk if batch_count == args.batch_size: data_queue.put(batch_placeholder) batch_count = 0 chunk_pos += take_from_chunk word_cnt += len(sentence) if word_cnt - last_word_cnt > 10000: with word_count_actual.get_lock(): word_count_actual.value += word_cnt - last_word_cnt last_word_cnt = word_cnt sentence.clear() with word_count_actual.get_lock(): word_count_actual.value += word_cnt - last_word_cnt print(p_id, it, file_pos, train_file.tell(), args.file_size) if batch_count > 0: data_queue.put(batch_placeholder[:batch_count,:]) data_queue.put(None) print(p_id, file_pos, train_file.tell(), args.file_size) def train_process(p_id, word_count_actual, word2idx, word_list, freq, args, model): data_queue = mp.SimpleQueue() lr = args.lr #optimizer = optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=lr) optimizer = optim.Adagrad(filter(lambda p: p.requires_grad, model.parameters()), lr=lr) t = mp.Process(target=train_process_sent_producer, args=(p_id, data_queue, word_count_actual, word_list, word2idx, freq, args)) t.start() #n_iter = 1 if args.stage == 2 else args.iter n_iter = args.iter # get from data_queue and feed to model prev_word_cnt = 0 while True: chunk = data_queue.get() if chunk is None: break else: # lr anneal & output if word_count_actual.value - prev_word_cnt > 10000: #if args.lr_anneal: # lr = args.lr * (1 - word_count_actual.value / (n_iter * args.train_words)) # if lr < 0.0001 * args.lr: # lr = 0.0001 * args.lr # for param_group in optimizer.param_groups: # param_group['lr'] = lr #sys.stdout.write("\rAlpha: %0.8f, Progess: %0.2f, Words/sec: %f, word_cnt: %d" % (lr, word_count_actual.value / (n_iter * args.train_words) * 100, word_count_actual.value / (time.monotonic() - args.t_start), word_count_actual.value)) sys.stdout.write("\rProgess: %0.2f, Words/sec: %f, word_cnt: %d" % (word_count_actual.value / (n_iter * args.train_words) * 100, word_count_actual.value / (time.monotonic() - args.t_start), word_count_actual.value)) sys.stdout.flush() prev_word_cnt = word_count_actual.value if args.stage == 1: if args.cuda: data = Variable(torch.LongTensor(chunk).cuda(), requires_grad=False) else: data = Variable(torch.LongTensor(chunk), requires_grad=False) optimizer.zero_grad() loss = model(data) loss.backward() optimizer.step() model.global_embs.weight.data[args.vocab_size].fill_(0) elif args.stage == 3: if args.cuda: data = Variable(torch.LongTensor(chunk).cuda(), requires_grad=False) else: data = Variable(torch.LongTensor(chunk), requires_grad=False) #type_ids = chunk[:, 2*args.window+1:2*args.window+2+2*args.negative] type_ids = chunk[:, 2*args.window+1:2*args.window+2+args.negative] type_ids = np.reshape(type_ids, (type_ids.shape[0] * type_ids.shape[1])) sense2idx, sense_embs = model.get_possible_sense_embs(type_ids.tolist()) # get type_idx from chunk, and do sense selection here. context_feats = model.get_context_feats(data[:, :2*args.window]) chunk = data_producer.select_sense(chunk, context_feats, sense2idx, sense_embs, model.word2sense, chunk.shape[0], args.size, args.window, args.negative) if args.cuda: data = Variable(torch.LongTensor(chunk).cuda(), requires_grad=False) else: data = Variable(torch.LongTensor(chunk), requires_grad=False) optimizer.zero_grad() loss = model(data) loss.backward() optimizer.step() model.global_embs.weight.data[args.vocab_size].fill_(0) t.join() def train_process_stage2(p_id, word_count_actual, word2idx, word_list, freq, args, model): data_queue = mp.SimpleQueue() sense_embs = model.sense_embs.weight.data.numpy() counter_list = np.zeros((model.sense_capacity), dtype='float32') t = mp.Process(target=train_process_sent_producer, args=(p_id, data_queue, word_count_actual, word_list, word2idx, freq, args)) t.start() n_iter = 1 # get from data_queue and feed to model prev_word_cnt = 0 while True: chunk = data_queue.get() if chunk is None: break else: if word_count_actual.value - prev_word_cnt > 10000: sys.stdout.write("\rProgess: %0.2f, Words/sec: %f, word_cnt: %d" % (word_count_actual.value / (n_iter * args.train_words) * 100, word_count_actual.value / (time.monotonic() - args.t_start), word_count_actual.value)) sys.stdout.flush() prev_word_cnt = word_count_actual.value if args.cuda: data = Variable(torch.LongTensor(chunk).cuda(), requires_grad=False) else: data = Variable(torch.LongTensor(chunk), requires_grad=False) context_feats = model.get_context_feats(data[:, :2*args.window]) # update sense_embs create_cnt = data_producer.create_n_update_sense(chunk[:, 2*args.window+1], context_feats, sense_embs, model.word2sense, counter_list, chunk.shape[0], args.size, args.delta, model.n_senses) model.n_senses += create_cnt #if model.n_senses + args.batch_size > model.sense_capacity: # new_capacity = model.sense_capacity * 3 // 2 # counter_list = np.concatenate( (counter_list, np.ones((new_capacity - model.sense_capacity),dtype='float32')), axis=0) # zero = np.zeros((new_capacity - model.sense_capacity, args.size), 'float32') # sense_embs = np.concatenate((sense_embs, zero), 0) # model.sense_capacity = new_capacity # print("\nexapnded sense_embs: %d" % model.n_senses) t.join() sense_embs[:model.n_senses, :] = sense_embs[:model.n_senses, :] / counter_list[:model.n_senses, None] if __name__ == '__main__': set_start_method('forkserver') args = parser.parse_args() print("Starting training using file %s" % args.train) train_file = open(args.train) train_file.seek(0, 2) vars(args)['file_size'] = train_file.tell() word_count_actual = mp.Value('L', 0) if args.vocab == '': word2idx, word_list, freq = build_vocab(args) else: with open(args.vocab, 'rb') as f: word2idx, word_list, freq, pos2idx, dep2id = pickle.load(f) word_count = sum([freq[k] for k in freq]) vars(args)['vocab_size'] = len(word2idx) vars(args)['train_words'] = word_count print("Vocab size: %ld" % len(word2idx)) print("Words in train file: %ld" % word_count) model = init_net(args) model.share_memory() if args.cuda: model.cuda() # stage 1, learn robust context representation. vars(args)['stage'] = 1 print("Stage 1") vars(args)['lr_anneal'] = True vars(args)['t_start'] = time.monotonic() processes = [] for p_id in range(args.processes): p = mp.Process(target=train_process, args=(p_id, word_count_actual, word2idx, word_list, freq, args, model)) p.start() processes.append(p) for p in processes: p.join() del processes print("\nStage 1, ", time.monotonic() - args.t_start, " secs ", word_count_actual.value) filename = args.save if not filename.endswith('.pth.tar'): filename += '.stage1.pth.tar' save_model(filename, model, args, word2idx) if args.multi_proto: # stage 2, create new sense in a non-parametric way. # Freeze model paramters except sense_embs, and use only 1 process to prevent race condition old_batch_size = vars(args)['batch_size'] model.global_embs.requires_grad = False model.ctx_weight.requires_grad = False model.sense_embs = model.sense_embs.cpu() vars(args)['stage'] = 2 vars(args)['batch_size'] = 5000 print("\nStage 2") word_count_actual.value = 0 vars(args)['t_start'] = time.monotonic() train_process_stage2(0, word_count_actual, word2idx, word_list, freq, args, model) if args.cuda: model.cuda() print("\nStage 2, ", time.monotonic() - args.t_start, " secs") print("Current # of senses: %d" % model.n_senses) pdb.set_trace() filename = args.save if not filename.endswith('.pth.tar'): filename += '.stage2.pth.tar' save_model(filename, model, args, word2idx) # stage 3, no more sense creation. vars(args)['lr'] = args.lr * 0.01 vars(args)['batch_size'] = old_batch_size model.global_embs.requires_grad = True model.ctx_weight.requires_grad = True vars(args)['stage'] = 3 print("\nBegin stage 3") word_count_actual.value = 0 vars(args)['t_start'] = time.monotonic() processes = [] for p_id in range(args.processes): p = mp.Process(target=train_process, args=(p_id, word_count_actual, word2idx, word_list, freq, args, model)) p.start() processes.append(p) for p in processes: p.join() print("\nStage 3, ", time.monotonic() - args.t_start, " secs") # save model filename = args.save if not filename.endswith('.pth.tar'): filename += '.stage3.pth.tar' save_model(filename, model, args, word2idx) print("")
40.487427
250
0.591821
3,156
0.150774
409
0.019539
0
0
0
0
3,814
0.182209
a3c8721ad82d9b0c4f4bbb5e4ea027824401f22d
339
py
Python
Ogrenciler/Varol/buyuksayi.py
ProEgitim/Python-Dersleri-BEM
b25e9fdb1fa3026925a46b2fcbcba348726b775c
[ "MIT" ]
1
2021-04-18T17:35:22.000Z
2021-04-18T17:35:22.000Z
Ogrenciler/Varol/buyuksayi.py
waroi/Python-Dersleri-BEM
b25e9fdb1fa3026925a46b2fcbcba348726b775c
[ "MIT" ]
null
null
null
Ogrenciler/Varol/buyuksayi.py
waroi/Python-Dersleri-BEM
b25e9fdb1fa3026925a46b2fcbcba348726b775c
[ "MIT" ]
2
2021-04-18T18:22:26.000Z
2021-04-24T17:16:19.000Z
sayi1 = int(input("1. Sayı: ")) sayi2 = int(input("2. Sayı: ")) sayi3 = int(input("3. Sayı: ")) sayi4 = int(input("4. Sayı: ")) sayi5 = int(input("5. Sayı: ")) sayilar=[]; sayilar.append(sayi1) sayilar.append(sayi2) sayilar.append(sayi3) sayilar.append(sayi4) sayilar.append(sayi5) sayilar.sort() print("En büyük sayimiz..",sayilar[-1])
21.1875
39
0.663717
0
0
0
0
0
0
0
0
82
0.236994
a3c959da81854ccd184aefdeb715f7df8413b8b8
8,899
py
Python
baselines/deepq/build_graph_mfec.py
MouseHu/emdqn
ba907e959f21dd0b5a17117accccae9c82a79a3b
[ "MIT" ]
null
null
null
baselines/deepq/build_graph_mfec.py
MouseHu/emdqn
ba907e959f21dd0b5a17117accccae9c82a79a3b
[ "MIT" ]
null
null
null
baselines/deepq/build_graph_mfec.py
MouseHu/emdqn
ba907e959f21dd0b5a17117accccae9c82a79a3b
[ "MIT" ]
1
2021-04-26T13:55:47.000Z
2021-04-26T13:55:47.000Z
"""Deep Q learning graph The functions in this file can are used to create the following functions: ======= act ======== Function to chose an action given an observation Parameters ---------- observation: object Observation that can be feed into the output of make_obs_ph stochastic: bool if set to False all the actions are always deterministic (default False) update_eps_ph: float update epsilon a new value, if negative not update happens (default: no update) Returns ------- Tensor of dtype tf.int64 and shape (BATCH_SIZE,) with an action to be performed for every element of the batch. ======= train ======= Function that takes a transition (s,a,r,s') and optimizes Bellman equation's error: td_error = Q(s,a) - (r + gamma * max_a' Q(s', a')) loss = huber_loss[td_error] Parameters ---------- obs_t: object a batch of observations action: np.array actions that were selected upon seeing obs_t. dtype must be int32 and shape must be (batch_size,) reward: np.array immediate reward attained after executing those actions dtype must be float32 and shape must be (batch_size,) obs_tp1: object observations that followed obs_t done: np.array 1 if obs_t was the last observation in the episode and 0 otherwise obs_tp1 gets ignored, but must be of the valid shape. dtype must be float32 and shape must be (batch_size,) weight: np.array imporance weights for every element of the batch (gradient is multiplied by the importance weight) dtype must be float32 and shape must be (batch_size,) Returns ------- td_error: np.array a list of differences between Q(s,a) and the target in Bellman's equation. dtype is float32 and shape is (batch_size,) ======= update_target ======== copy the parameters from optimized Q function to the target Q function. In Q learning we actually optimize the following error: Q(s,a) - (r + gamma * max_a' Q'(s', a')) Where Q' is lagging behind Q to stablize the learning. For example for Atari Q' is set to Q once every 10000 updates training steps. """ import tensorflow as tf import baselines.common.tf_util as U import numpy as np def build_act_mf(make_obs_ph, q_func, z_noise, num_actions, scope="deepq", reuse=None): with tf.variable_scope(scope, reuse=reuse): observations_ph = U.ensure_tf_input(make_obs_ph("observation")) q, q_deterministic, v_mean, v_logvar, z_mean, z_logvar, recon_obs = q_func(observations_ph.get(), z_noise, num_actions, scope="q_func", reuse=tf.AUTO_REUSE) act = U.function(inputs=[observations_ph,z_noise], outputs=[z_mean, z_logvar]) return act def build_train_mf(make_obs_ph, q_func, num_actions, optimizer, grad_norm_clipping=None, gamma=1.0, scope="mfec", alpha=1.0, beta=1.0, theta=1.0, latent_dim=32, ib=True, reuse=None): """Creates the train function: Parameters ---------- make_obs_ph: str -> tf.placeholder or TfInput a function that takes a name and creates a placeholder of input with that name q_func: (tf.Variable, int, str, bool) -> tf.Variable the model that takes the following inputs: observation_in: object the output of observation placeholder num_actions: int number of actions scope: str reuse: bool should be passed to outer variable scope and returns a tensor of shape (batch_size, num_actions) with values of every action. num_actions: int number of actions reuse: bool whether or not to reuse the graph variables optimizer: tf.train.Optimizer optimizer to use for the Q-learning objective. grad_norm_clipping: float or None clip gradient norms to this value. If None no clipping is performed. gamma: float discount rate. double_q: bool if true will use Double Q Learning (https://arxiv.org/abs/1509.06461). In general it is a good idea to keep it enabled. scope: str or VariableScope optional scope for variable_scope. reuse: bool or None whether or not the variables should be reused. To be able to reuse the scope must be given. Returns ------- act: (tf.Variable, bool, float) -> tf.Variable function to select and action given observation. ` See the top of the file for details. train: (object, np.array, np.array, object, np.array, np.array) -> np.array optimize the error in Bellman's equation. ` See the top of the file for details. update_target: () -> () copy the parameters from optimized Q function to the target Q function. ` See the top of the file for details. debug: {str: function} a bunch of functions to print debug data like q_values. """ act_noise = tf.placeholder(tf.float32, [None, latent_dim], name="act_noise") act_f = build_act_mf(make_obs_ph, q_func, act_noise, num_actions, scope=scope, reuse=reuse) with tf.variable_scope(scope, reuse=reuse): # set up placeholders # EMDQN obs_vae_input = U.ensure_tf_input(make_obs_ph("obs_vae")) z_noise_vae = tf.placeholder(tf.float32, [None, latent_dim], name="z_noise_vae") inputs = [obs_vae_input,z_noise_vae] if ib: qec_input = tf.placeholder(tf.float32, [None], name='qec') inputs.append(qec_input) outputs = [] q_vae, q_deterministic_vae, v_mean_vae, v_logvar_vae, z_mean_vae, z_logvar_vae, recon_obs = q_func(obs_vae_input.get(), z_noise_vae, num_actions, scope="q_func", reuse=True) q_func_vars = U.scope_vars(U.absolute_scope_name("q_func")) encoder_loss = -1 + z_mean_vae ** 2 + tf.exp(z_logvar_vae) - z_logvar_vae total_loss = tf.reduce_mean(beta * encoder_loss) decoder_loss = tf.keras.losses.binary_crossentropy(tf.reshape(recon_obs, [-1]), tf.reshape( tf.dtypes.cast(obs_vae_input._placeholder, tf.float32), [-1])) print("here", z_mean_vae.shape, z_logvar_vae.shape, encoder_loss.shape, decoder_loss.shape) vae_loss = beta * encoder_loss + theta * decoder_loss outputs.append(encoder_loss) outputs.append(decoder_loss) outputs.append(vae_loss) total_loss += tf.reduce_mean(theta * decoder_loss) if ib: ib_loss = (v_mean_vae - tf.stop_gradient(tf.expand_dims(qec_input, 1))) ** 2 / tf.exp( v_logvar_vae) + v_logvar_vae print("here2", v_mean_vae.shape, tf.expand_dims(qec_input, 1).shape, v_logvar_vae.shape, ib_loss.shape) total_ib_loss = alpha * ib_loss + beta * encoder_loss outputs.append(total_ib_loss) total_loss += tf.reduce_mean(alpha * ib_loss) if grad_norm_clipping is not None: optimize_expr = U.minimize_and_clip(optimizer, total_loss, var_list=q_func_vars, clip_val=grad_norm_clipping) else: optimize_expr = optimizer.minimize(total_loss, var_list=q_func_vars) # Create callable functions # EMDQN total_loss_summary = tf.summary.scalar("total loss", total_loss) z_var_summary = tf.summary.scalar("z_var", tf.reduce_mean(tf.exp(z_logvar_vae))) encoder_loss_summary = tf.summary.scalar("encoder loss", tf.reduce_mean(encoder_loss)) decoder_loss_summary = tf.summary.scalar("decoder loss", tf.reduce_mean(decoder_loss)) summaries = [total_loss_summary, z_var_summary, encoder_loss_summary, decoder_loss_summary] if ib: ib_loss_summary = tf.summary.scalar("ib loss", tf.reduce_mean(ib_loss)) total_ib_loss_summary = tf.summary.scalar("total ib loss", tf.reduce_mean(total_ib_loss)) summaries.append(ib_loss_summary) summaries.append(total_ib_loss_summary) summary = tf.summary.merge(summaries) outputs.append(summary) train = U.function( inputs=inputs, outputs=[total_loss,summary], updates=[optimize_expr] ) return act_f, train
42.37619
127
0.618047
0
0
0
0
0
0
0
0
4,465
0.501742
a3c978469e28670107c4646aa77b54f6269dda05
2,244
py
Python
tests/test_prior.py
frodre/LMR
4c00d3f9db96447e69bd3f426d59524f7b5f3ef5
[ "BSD-3-Clause" ]
17
2018-08-27T18:50:36.000Z
2021-03-17T22:48:55.000Z
tests/test_prior.py
mingsongli/LMR
4c00d3f9db96447e69bd3f426d59524f7b5f3ef5
[ "BSD-3-Clause" ]
5
2018-10-15T22:13:27.000Z
2019-04-26T11:45:58.000Z
tests/test_prior.py
mingsongli/LMR
4c00d3f9db96447e69bd3f426d59524f7b5f3ef5
[ "BSD-3-Clause" ]
11
2018-10-11T19:35:34.000Z
2021-08-17T12:08:11.000Z
import sys sys.path.append('../') import LMR_config as cfg import LMR_prior import numpy as np import pytest def test_prior_seed(): cfg_obj = cfg.Config(**{'core':{'seed': 2}}) prior_cfg = cfg_obj.prior prior_source = '20cr' datadir_prior = 'data' datafile_prior = '[vardef_template]_gridded_dat.nc' state_variables = {'air': 'anom'} state_kind = 'anom' X = LMR_prior.prior_assignment(prior_source) X.prior_datadir = datadir_prior X.prior_datafile = datafile_prior X.statevars = state_variables X.Nens = 1 X.detrend = False X.kind = state_kind X.avgInterval = [1,2,3,4,5,6,7,8,9,10,11,12] X.populate_ensemble(prior_source, prior_cfg) X2 = LMR_prior.prior_assignment(prior_source) X2.prior_datadir = datadir_prior X2.prior_datafile = datafile_prior X2.statevars = state_variables X2.Nens = 1 X2.detrend = False X2.kind = state_kind X2.avgInterval = [1,2,3,4,5,6,7,8,9,10,11,12] X2.populate_ensemble(prior_source, prior_cfg) np.testing.assert_equal(X2.ens, X.ens) def test_prior_use_full_prior(): cfg_obj = cfg.Config(**{'core': {'seed': None}}) prior_cfg = cfg_obj.prior prior_source = '20cr' datadir_prior = 'data' datafile_prior = '[vardef_template]_gridded_dat.nc' state_variables = {'air': 'anom'} state_kind = 'anom' avgInterval = [1,2,3,4,5,6,7,8,9,10,11,12] X = LMR_prior.prior_assignment(prior_source) X.prior_datadir = datadir_prior X.prior_datafile = datafile_prior X.statevars = state_variables X.Nens = None X.detrend = False X.kind = state_kind X.avgInterval = avgInterval X.populate_ensemble(prior_source, prior_cfg) X2 = LMR_prior.prior_assignment(prior_source) X2.prior_datadir = datadir_prior X2.prior_datafile = datafile_prior X2.statevars = state_variables X2.Nens = None X2.detrend = False X2.kind = state_kind X2.avgInterval = avgInterval X2.read_prior() # Transform full prior into ensemble-like shape prior_vals = X2.prior_dict['air']['value'] prior_vals = prior_vals.reshape(prior_vals.shape[0], -1) prior_vals = prior_vals.T np.testing.assert_equal(X.ens, prior_vals)
24.933333
60
0.685829
0
0
0
0
0
0
0
0
214
0.095365
a3cadf1c1469dc28d63f965c32ff3b98b7eb9d52
8,719
py
Python
src/salgan_dhf1k/train_bce.py
juanjo3ns/SalGAN2
ac52af743b94961cdb44c5d89774b72fc8acfd3e
[ "MIT" ]
null
null
null
src/salgan_dhf1k/train_bce.py
juanjo3ns/SalGAN2
ac52af743b94961cdb44c5d89774b72fc8acfd3e
[ "MIT" ]
null
null
null
src/salgan_dhf1k/train_bce.py
juanjo3ns/SalGAN2
ac52af743b94961cdb44c5d89774b72fc8acfd3e
[ "MIT" ]
null
null
null
import os from dataloader.datasetDHF1K import DHF1K from torch.utils.data import DataLoader from utils.salgan_utils import save_model, get_lr_optimizer from utils.sendTelegram import send from utils.printer import param_print from utils.salgan_generator import create_model, add_bn from evaluation.fast_evaluation import compute_metrics import numpy as np import torch from torch.nn import AvgPool2d from torch.nn.modules.loss import BCELoss import torch.backends.cudnn as cudnn from torch.optim import SGD, Adam from torch.optim.lr_scheduler import ReduceLROnPlateau, StepLR from time import time from IPython import embed from tensorboard_logger import configure, log_value, log_histogram TRAIN = 'train' VAL = 'val' TEST = 'test' def add_layer_weights(vgg_weights): # Mean of RGB weights of first layer with size [64,1,3,3] layer1 = vgg_weights['0.weight'] mean_rgb = layer1.mean(dim=1,keepdim=True) vgg_weights['0.weight'] = torch.cat([layer1.cuda(),mean_rgb.cuda()],1) # We could do it easily accessing to the weights trought model[0].weight and change dimension 1, but as we # already have the 4th channel we'd be doing the mean of all of the channels, inicializing it in the wrong way. return vgg_weights def train_eval(mode, model, optimizer, dataloader): if mode == TRAIN: N = len(ds_train)/batch_size model.train() else: N = len(ds_validate)/batch_size model.eval() total_loss = [] #iterate epoch... #iterate epoch... for i, X in enumerate(dataloader[mode]): inputs = X[0].cuda() # noramlize saliency maps values between [0,1] gt_maps = X[1].cuda()/255 embed() predictions = model.forward(inputs).squeeze() # reduce size for loss reduce_size = AvgPool2d((4,4)) pred_ = reduce_size(predictions) gt_maps_ = reduce_size(gt_maps) pred_ = pred_.view(pred_.size()[0], -1) gt_maps_ = gt_maps_.view(gt_maps_.size()[0], -1) loss = bce_loss(pred_, gt_maps_) # make actual step update if mode==TRAIN: # compute gradients loss.backward() # step optimizer optimizer.step() # reset grads for next step optimizer.zero_grad() print("\t{}/{} loss:{}".format(i, int(N), loss.item()), end="\r") total_loss.append(loss.item()) total_loss=np.mean(total_loss) return total_loss if __name__ == '__main__': import argparse parser = argparse.ArgumentParser() parser.add_argument("--path_out", default='sal_dhf1k_adamdepthcoordaugm2_frombestsaldepth', type=str, help="""set output path for the trained model""") parser.add_argument("--batch_size", default=12, type=int, help="""Set batch size""") parser.add_argument("--n_epochs", default=10, type=int, help="""Set total number of epochs""") parser.add_argument("--depth", default=False, type=bool, help="""Enable 4th channel with depth""") parser.add_argument("--augment", default=False, type=bool, help="""Enable data augmentation""") parser.add_argument("--coord", default=False, type=bool, help="""Enable coordconv""") parser.add_argument("--flow", default=False, type=bool, help="""Enable opticalflow""") parser.add_argument("--lr", type=float, default=0.00001, help="""Learning rate for training""") parser.add_argument("--patience", type=int, default=3, help="""Patience for learning rate scheduler (default 10)""") args = parser.parse_args() # set output path ========================================================== path_out = '../trained_models/batch12_/' + args.path_out if not os.path.exists(path_out): # create output path os.makedirs(path_out) # create output for models path_models = os.path.join(path_out, 'models') if not os.path.exists(path_models): os.makedirs(path_models) # tensorboard configure("{}".format(path_out), flush_secs=5) # data ===================================================================== batch_size = args.batch_size n_epochs = args.n_epochs lr = args.lr DEPTH = args.depth AUGMENT = args.augment COORD = args.coord FLOW = args.flow # Datasets for DHF1K ds_train = DHF1K(mode=TRAIN, transformation=True, depth=DEPTH, d_augm=AUGMENT, coord=COORD) ds_validate = DHF1K(mode=VAL, transformation=False, depth=DEPTH, d_augm=False, coord=COORD) # Dataloaders dataloader = { TRAIN: DataLoader(ds_train, batch_size=batch_size, shuffle=True, num_workers=2), VAL: DataLoader(ds_validate, batch_size=batch_size, shuffle=False, num_workers=2) } # POSSIBILITY OF CHOOSING GPU torch.cuda.set_device(1) # MODEL INITIALIZATION print("Init model...") vgg_weights = torch.load('../trained_models/salgan_baseline.pt')['state_dict'] model = create_model(3) # if DEPTH and COORD: # model = create_model(6) # for i in range(0,3): # vgg_weights = add_layer_weights(vgg_weights) # elif DEPTH: # model = create_model(4) # add_layer_weights(vgg_weights) # elif COORD: # model = create_model(5) # for i in range(0,2): # vgg_weights = add_layer_weights(vgg_weights) # else: model = create_model(3) # Instead of adding manually the layer of new weights, we could use strict=False model.load_state_dict(vgg_weights) # Add batch normalization to current model if needed model = add_bn(model) model.train() model.cuda() cudnn.benchmark = True # NOT WORKING UNMOUNTED DISK # If we have the two GPU's available we are going to use both # if torch.cuda.device_count() > 1: # print("Using ", torch.cuda.device_count(), "GPUs!") # model = torch.nn.DataParallel(model) # LOSS FUNCTION bce_loss = BCELoss() # FINE-TUNE WHOLE NETWORK OR JUST DECODER => uncomment / or different lr for each part # decoder_parameters = [] # base_params = [] # for i, (a, p) in enumerate(model.named_parameters()): # embed() # if i>25: # # print(i, a, p.shape) # decoder_parameters.append(p) # else: # base_params.append(p) # If you wanna train just the decoder put this # p.requires_grad = False # ADAM OPTIMIZER optimizer = Adam(model.parameters(), lr = lr, weight_decay=0.000001) # STOCHASTIC GRADIENT DESCENT OPTIMIZER # optimizer = SGD(model.parameters(), # lr = 0.00001, # momentum=0.9, # weight_decay=0.00001, # nesterov=True) # NUMBER OF TOTAL PARAMETERS # pytorch_total_params = sum(p.numel() for p in model.parameters()) # NUMBER OF TRAINABLE PARAMETERS trainable_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad) print("Trainable parameters: ", trainable_parameters) send("Trainable parameters: " + str(trainable_parameters)) send("Experiment: " + args.path_out) # PRINT TABLE OF PARAMETERS param_print([path_out,"",DEPTH,AUGMENT,COORD,FLOW,batch_size,lr,n_epochs, trainable_parameters]) # set learning rate scheduler # ReduceLROnPlateau( # optimizer, # mode (str) 'min':lr es reduira quan la metrica no es redueixi mes, 'max' al contrari, # factor (float) factor de reduccio de la lr, # patience (int) num epochs sense millora a partir dels quals es redueix lr, # verbose (bool), # ) # scheduler = ReduceLROnPlateau(optimizer, # 'min', # patience=args.patience, # verbose=True) scheduler = StepLR(optimizer, step_size=3, gamma=0.1) best_loss=9999999 # main loop training ======================================================= for id_epoch in range(n_epochs): for mode in [VAL, TRAIN]: # select dataloader data_iterator = dataloader[mode] # # # saliency metrics # if mode ==VAL: # print("Evaluating metrics....") # # only do 100 images from validation # metrics = compute_metrics(model, 100, DEPTH, COORD) # # # log metric values # for metric in metrics.keys(): # log_value("Metrics/{}".format(metric), # metrics[metric], id_epoch) # # # get epoch loss # print("--> {} epoch {}".format(mode, id_epoch)) epoch_loss = train_eval(mode, model, optimizer, dataloader) lr = list(get_lr_optimizer(optimizer))[0] print("-----------") print("Done! {} epoch {} loss {} lr {}".format(mode, id_epoch, epoch_loss, lr)) send("{} epoch {}/{} loss {}".format(mode, id_epoch, n_epochs, epoch_loss)) print("\n") # record loss log_value("loss/{}".format(mode), epoch_loss, id_epoch) log_value("lr/{}".format(mode), lr, id_epoch) # for v in model.state_dict(): # log_histogram("Layer {}".format(v), model.state_dict()[v], id_epoch) if (id_epoch%2)==0: save_model(model, optimizer, id_epoch, path_out, name_model='{:03d}'.format(id_epoch)) # store model if val loss improves if mode==VAL: if best_loss > epoch_loss: # update loss best_loss = epoch_loss save_model(model, optimizer, id_epoch, path_out, name_model='best') # scheduler.step(epoch_loss) scheduler.step()
31.139286
112
0.686661
0
0
0
0
0
0
0
0
3,975
0.455901
a3cae716974e2bebe27ab17e3253013ab6b42f7b
782
py
Python
dragontail/content/models/basicpage.py
tracon/dragontail
aae860acb5fe400015557f659b6d4221b939747a
[ "MIT" ]
null
null
null
dragontail/content/models/basicpage.py
tracon/dragontail
aae860acb5fe400015557f659b6d4221b939747a
[ "MIT" ]
null
null
null
dragontail/content/models/basicpage.py
tracon/dragontail
aae860acb5fe400015557f659b6d4221b939747a
[ "MIT" ]
null
null
null
# encoding: utf-8 from django.db import models from wagtail.wagtailcore.models import Page from wagtail.wagtailcore.fields import StreamField from wagtail.wagtailcore import blocks from wagtail.wagtailadmin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.wagtailimages.blocks import ImageChooserBlock class BasicPage(Page): body = StreamField([ ('paragraph', blocks.RichTextBlock()), ('image', ImageChooserBlock()), ]) content_panels = Page.content_panels + [ StreamFieldPanel('body'), ] def get_template(self, request, *args, **kwargs): from .templatesettings import TemplateSettings template_settings = TemplateSettings.for_site(request.site) return template_settings.basic_page_template
28.962963
75
0.742967
460
0.588235
0
0
0
0
0
0
41
0.05243
a3cc11867421204e587bf63f6a7dd58a6716ea01
2,030
py
Python
infapy/v3/agentService.py
infapy/infapy
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
[ "Apache-2.0" ]
null
null
null
infapy/v3/agentService.py
infapy/infapy
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
[ "Apache-2.0" ]
null
null
null
infapy/v3/agentService.py
infapy/infapy
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
[ "Apache-2.0" ]
1
2021-09-23T10:31:56.000Z
2021-09-23T10:31:56.000Z
# Copyright (c) 2021-Present (Prashanth Pradeep) # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import requests as re import infapy from infapy.exceptions import InvalidDetailsProvided class AgentService(): def __init__(self,v3,v3BaseURL,v3SessionID): self._v3 = v3 self._v3BaseURL = v3BaseURL self._v3SessionID = v3SessionID def updateAgentService(self,serviceName, serviceAction, agentId): url=self._v3BaseURL + "/public/core/v3/agent/service" headers = {'Content-Type': "application/json", 'Accept': "application/json","INFA-SESSION-ID":self._v3SessionID} body = { 'serviceName':serviceName, 'serviceAction':serviceAction, 'agentId':agentId} infapy.log.info("agentService API URL - " + url) infapy.log.info("API Headers: " + str(headers)) infapy.log.info("Body: " + str(body)) try: response = re.post(url=url, json=body, headers=headers) data = response.json() infapy.log.debug(str(data)) try: if ("error" in data): infapy.log.error("Please validate the details passed") infapy.log.error(str(data)) raise InvalidDetailsProvided except Exception as e: infapy.log.exception(e) raise except Exception as e: infapy.log.exception(e) raise infapy.log.info(data["message"]) return data
38.301887
120
0.634975
1,347
0.663547
0
0
0
0
0
0
822
0.404926
a3cd937793e2d0c588285b6a5f1e77f851ebcc85
5,703
py
Python
home_application/views.py
pengwow/test-demo
9d5c460b534d93d84f39ae24db82aa101027d199
[ "Apache-2.0" ]
null
null
null
home_application/views.py
pengwow/test-demo
9d5c460b534d93d84f39ae24db82aa101027d199
[ "Apache-2.0" ]
4
2020-02-12T01:47:04.000Z
2021-06-10T21:34:36.000Z
home_application/views.py
pengwow/test-demo
9d5c460b534d93d84f39ae24db82aa101027d199
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available. Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from common.mymako import render_mako_context, render_json from blueking.component.shortcuts import get_client_by_request from django.views.decorators.csrf import csrf_exempt from models import TEST, HostDisk, ScriptExecInfo import json import base64 def home(request): """ 首页 """ # yewu = [ # {'id': 1, "name": u"业务1"}, # {'id': 2, "name": u"业务2"}, # {'id': 3, "name": u"业务3"}, # ] # 从环境配置获取APP信息,从request获取当前用户信息 client = get_client_by_request(request) kwargs = {} result = client.cc.search_business(kwargs) print(result) yewu = result['data']['info'] return render_mako_context(request, '/home_application/home.html', { "yewu": yewu, "AAA": u"业务列表" }) def submit_template(request): """ 首页 """ print(request.body) return render_json({"1111111": "dddddddddd"}) def dev_guide(request): """ 开发指引 """ return render_mako_context(request, '/home_application/dev_guide.html') def contactus(request): """ 联系我们 """ return render_mako_context(request, '/home_application/contact.html') def tijiao(request): data = json.loads(request.body) print(type(data)) sss = TEST(**data) sss.save() return render_json({"DATA": "AAAAAAAA"}) def host_disk(request): host_list = HostDisk.objects.all() re_list = list() for item in host_list: temp_dict = dict() temp_dict['os'] = item.os temp_dict['host_ip'] = item.host_ip temp_dict['host_name'] = item.host_name temp_dict['host_path'] = item.host_path temp_dict['create_time'] = item.create_time re_list.append(temp_dict) print(re_list) return render_mako_context(request, '/home_application/host_disk.html', {'host_all': re_list} ) def host_tijiao(request): data = request.body print(type(data)) data = json.loads(data) host = HostDisk(**data) host.save() return render_json({"status": "OK"}) def host_script(request): # 根据作业id查询日志 data = ScriptExecInfo.objects.all() client = get_client_by_request(request) script_all = list() for item in data: temp_dict = dict() kwargs = {} kwargs['bk_biz_id'] = item.bk_biz_id kwargs['job_instance_id'] = item.job_instance_id result = client.job.get_job_instance_log(kwargs) log_content = result['data'][0]['step_results'][0]['ip_logs'][0]['log_content'] temp_dict['host_ip'] = item.host_ip temp_dict['log_content'] = log_content temp_dict['script_content'] = item.script_content temp_dict['create_time'] = item.create_time script_all.append(temp_dict) return render_mako_context(request, '/home_application/host_script.html', {'script_all': script_all}, ) def script_tijiao(request): try: print(request.user.username) except Exception as e: print(str(e)) data = json.loads(request.body) client = get_client_by_request(request) kwargs = {} result = client.cc.search_business(kwargs) bk_biz_id = result['data']['info'][0]['bk_biz_id'] script_content = base64.b64encode(data['script_content']) kwargs = dict() kwargs['bk_biz_id'] = bk_biz_id kwargs['script_content'] = script_content kwargs["account"] = "root" kwargs['ip_list'] = [{'bk_cloud_id': 0, "ip": data['host_ip']}] result = client.job.fast_execute_script(kwargs) script_dict = dict() script_dict["host_ip"] = data['host_ip'] script_dict["script_content"] = data['script_content'] script_dict["job_instance_id"] = result['data']['job_instance_id'] script_dict['bk_biz_id'] = bk_biz_id scriptexecinfo = ScriptExecInfo(**script_dict) scriptexecinfo.save() return render_json({"status": "OK"}) # ####################其他 def other(request): return render_mako_context(request, '/home_application/other.html') @csrf_exempt # 注意:需要添加此装饰器 def upload_file(request): # 接收的为文件列表,需要遍历操作 files = request.FILES for item in files: _file = files.get(item) print(_file.name) print(_file.size) with open('./' + str(_file.name), 'wb') as fd: fd.write(_file.file.read()) return render_json({"status": "OK"}) def download_file(request): """ 文件下载 :param request: :return: 文件response """ from django.http import FileResponse # 接收文件名请求 file_name = request.GET.get('filename') fd = open('./' + file_name, 'rb') response = FileResponse(fd) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="%s"' % file_name return response
30.015789
115
0.627389
0
0
0
0
396
0.067221
0
0
2,054
0.348667
a3cdf292bfc1d114fbf7d5d60cd7d8fcf12221e7
455
py
Python
Chapter 6/09 - The built-in multiprocessing module/basic_multiprocessing.py
moseskim/Expert-Python-Programming-Fourth-Edition
5160f974deb2365597b7be9cc032f24bfa13471a
[ "MIT" ]
null
null
null
Chapter 6/09 - The built-in multiprocessing module/basic_multiprocessing.py
moseskim/Expert-Python-Programming-Fourth-Edition
5160f974deb2365597b7be9cc032f24bfa13471a
[ "MIT" ]
null
null
null
Chapter 6/09 - The built-in multiprocessing module/basic_multiprocessing.py
moseskim/Expert-Python-Programming-Fourth-Edition
5160f974deb2365597b7be9cc032f24bfa13471a
[ "MIT" ]
null
null
null
""" "멀티프로세싱"절 예시 `multiprocessing` 모듈을 이용해 새로운 프로세스들을 생성하는 방법을 설명한다. """ from multiprocessing import Process import os def work(identifier): print(f'Hey, I am the process ' f'{identifier}, pid: {os.getpid()}') def main(): processes = [Process(target=work, args=(number,)) for number in range(5)] for process in processes: process.start() while processes: processes.pop().join() if __name__ == "__main__": main()
18.2
77
0.650549
0
0
0
0
0
0
0
0
212
0.40381
a3ce427e7608fff21718948d99c9396b801b2425
670
py
Python
sweeper/cloud/localhost/manager.py
dominoFire/sweeper
26c5497b81c8d0c50671f8ab75c1cf5c4c8191c9
[ "MIT" ]
null
null
null
sweeper/cloud/localhost/manager.py
dominoFire/sweeper
26c5497b81c8d0c50671f8ab75c1cf5c4c8191c9
[ "MIT" ]
null
null
null
sweeper/cloud/localhost/manager.py
dominoFire/sweeper
26c5497b81c8d0c50671f8ab75c1cf5c4c8191c9
[ "MIT" ]
null
null
null
__author__ = '@dominofire' import os from sweeper.cloud import resource_config_combinations from sweeper.cloud.localhost import resource_config_factory as config_factory from sweeper.resource import Resource def possible_configs(num): configs = config_factory.list_configs() combs = resource_config_combinations(num, configs) return combs def create_resource(name, config_object): res = Resource(config_object, name, 'localhost', None, None) return res def mount_distributed_file_system(name, vm_resources): vm_first = vm_resources[0] vm_first.execute_command('mkdir ./fileshare') return os.path.join(os.getcwd(), 'fileshare')
23.928571
77
0.773134
0
0
0
0
0
0
0
0
54
0.080597
a3ced405166d997be98745f69fe1f51cd0fcd9c9
3,193
py
Python
tfx/orchestration/experimental/core/service_jobs_test.py
BACtaki/tfx
29db845200beccbb0ffa1e1e1a091e314a3a470f
[ "Apache-2.0" ]
1,813
2019-02-04T17:17:30.000Z
2022-03-29T13:39:30.000Z
tfx/orchestration/experimental/core/service_jobs_test.py
BACtaki/tfx
29db845200beccbb0ffa1e1e1a091e314a3a470f
[ "Apache-2.0" ]
2,710
2019-02-14T00:41:00.000Z
2022-03-31T07:23:00.000Z
tfx/orchestration/experimental/core/service_jobs_test.py
BACtaki/tfx
29db845200beccbb0ffa1e1e1a091e314a3a470f
[ "Apache-2.0" ]
731
2019-02-04T17:59:18.000Z
2022-03-31T06:45:51.000Z
# Copyright 2021 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for tfx.orchestration.experimental.core.service_jobs.""" from absl.testing.absltest import mock import tensorflow as tf from tfx.orchestration.experimental.core import service_jobs from tfx.orchestration.experimental.core import test_utils class ExceptionHandlingServiceJobManagerWrapperTest(test_utils.TfxTest): def setUp(self): super().setUp() self._mock_service_job_manager = mock.create_autospec( service_jobs.ServiceJobManager, instance=True) self._mock_service_job_manager.ensure_node_services.return_value = ( service_jobs.ServiceStatus.SUCCESS) self._mock_service_job_manager.stop_node_services.return_value = True self._mock_service_job_manager.is_pure_service_node.return_value = True self._mock_service_job_manager.is_mixed_service_node.return_value = False self._wrapper = service_jobs.ExceptionHandlingServiceJobManagerWrapper( self._mock_service_job_manager) def test_calls_forwarded_to_underlying_instance(self): self.assertEqual(service_jobs.ServiceStatus.SUCCESS, self._wrapper.ensure_node_services(mock.Mock(), 'node1')) self.assertTrue(self._wrapper.stop_node_services(mock.Mock(), 'node2')) self.assertTrue(self._wrapper.is_pure_service_node(mock.Mock(), 'node3')) self.assertFalse(self._wrapper.is_mixed_service_node(mock.Mock(), 'node4')) self._mock_service_job_manager.ensure_node_services.assert_called_once_with( mock.ANY, 'node1') self._mock_service_job_manager.stop_node_services.assert_called_once_with( mock.ANY, 'node2') self._mock_service_job_manager.is_pure_service_node.assert_called_once_with( mock.ANY, 'node3') self._mock_service_job_manager.is_mixed_service_node.assert_called_once_with( mock.ANY, 'node4') def test_ensure_node_services_exception_handling(self): self._mock_service_job_manager.ensure_node_services.side_effect = RuntimeError( 'test error') self.assertEqual(service_jobs.ServiceStatus.FAILED, self._wrapper.ensure_node_services(mock.Mock(), 'node1')) self._mock_service_job_manager.ensure_node_services.assert_called_once_with( mock.ANY, 'node1') def test_stop_node_services_exception_handling(self): self._mock_service_job_manager.stop_node_services.side_effect = RuntimeError( 'test error') self.assertFalse(self._wrapper.stop_node_services(mock.Mock(), 'node2')) self._mock_service_job_manager.stop_node_services.assert_called_once_with( mock.ANY, 'node2') if __name__ == '__main__': tf.test.main()
46.275362
83
0.777639
2,298
0.719699
0
0
0
0
0
0
766
0.2399
a3d03f04854e2e542f97a3c9c4b2caeaa5e05045
17,041
py
Python
dragonn/models.py
kundajelab/dragonn
431e7c6b94a82972ac0fc3ef76d76e9ce8ba67fc
[ "MIT" ]
251
2016-06-20T20:18:27.000Z
2022-03-03T23:31:38.000Z
dragonn/models.py
kundajelab/dragonn
431e7c6b94a82972ac0fc3ef76d76e9ce8ba67fc
[ "MIT" ]
39
2016-07-01T20:40:59.000Z
2022-02-09T23:30:24.000Z
dragonn/models.py
kundajelab/dragonn
431e7c6b94a82972ac0fc3ef76d76e9ce8ba67fc
[ "MIT" ]
89
2016-06-09T17:59:21.000Z
2021-12-20T03:00:09.000Z
from __future__ import absolute_import, division, print_function import matplotlib import numpy as np import os import subprocess import sys import tempfile matplotlib.use('pdf') import matplotlib.pyplot as plt from abc import abstractmethod, ABCMeta from dragonn.metrics import ClassificationResult from sklearn.svm import SVC as scikit_SVC from sklearn.tree import DecisionTreeClassifier as scikit_DecisionTree from sklearn.ensemble import RandomForestClassifier from keras.models import load_model from dragonn.runtime_metrics import * from dragonn.custom_losses import * import warnings warnings.filterwarnings('ignore') def load_dragonn_model(model_string): custom_objects={"recall":recall, "sensitivity":recall, "specificity":specificity, "fpr":fpr, "fnr":fnr, "fdr":fdr, "precision":precision, "f1":f1, "spearman_corr":spearman_corr, "ambig_binary_crossentropy":ambig_binary_crossentropy, "ambig_mean_squared_error":ambig_mean_squared_error} model=load_model(model_string,custom_objects=custom_objects) return model class Model(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self, **hyperparameters): pass @abstractmethod def train(self, X, y, validation_data): pass @abstractmethod def predict(self, X): pass def test(self, X, y): return ClassificationResult(y, self.predict(X)) def score(self, X, y, metric): return self.test(X, y)[metric] class SequenceDNN(Model): """ Sequence DNN models. Parameters ---------- seq_length : int, optional length of input sequence. keras_model : instance of keras.models.Sequential, optional seq_length or keras_model must be specified. num_tasks : int, optional number of tasks. Default: 1. num_filters : list[int] | tuple[int] number of convolutional filters in each layer. Default: (15,). conv_width : list[int] | tuple[int] width of each layer's convolutional filters. Default: (15,). pool_width : int width of max pooling after the last layer. Default: 35. L1 : float strength of L1 penalty. dropout : float dropout probability in every convolutional layer. Default: 0. verbose: int Verbosity level during training. Valida values: 0, 1, 2. Returns ------- Compiled DNN model. """ def __init__(self, seq_length=None, keras_model=None, use_RNN=False, num_tasks=1, num_filters=(15, 15, 15), conv_width=(15, 15, 15), pool_width=35, GRU_size=35, TDD_size=15, L1=0, dropout=0.0, num_epochs=100, verbose=1): from keras.models import Sequential from keras.layers.core import ( Activation, Dense, Dropout, Flatten, Permute, Reshape ) from keras.layers.convolutional import Convolution2D, MaxPooling2D from keras.layers.recurrent import GRU from keras.regularizers import l1 self.num_tasks = num_tasks self.num_epochs = num_epochs self.verbose = verbose self.train_metrics = [] self.valid_metrics = [] if keras_model is not None and seq_length is None: self.model = keras_model self.num_tasks = keras_model.layers[-1].output_shape[-1] elif seq_length is not None and keras_model is None: self.model = Sequential() assert len(num_filters) == len(conv_width) for i, (nb_filter, nb_col) in enumerate(zip(num_filters, conv_width)): conv_height = 4 if i == 0 else 1 self.model.add(Convolution2D( nb_filter=nb_filter, nb_row=conv_height, nb_col=nb_col, activation='linear', init='he_normal', input_shape=(1, 4, seq_length), W_regularizer=l1(L1), b_regularizer=l1(L1))) self.model.add(Activation('relu')) self.model.add(Dropout(dropout)) self.model.add(MaxPooling2D(pool_size=(1, pool_width))) if use_RNN: num_max_pool_outputs = self.model.layers[-1].output_shape[-1] self.model.add(Reshape((num_filters[-1], num_max_pool_outputs))) self.model.add(Permute((2, 1))) self.model.add(GRU(GRU_size, return_sequences=True)) self.model.add(TimeDistributedDense(TDD_size, activation='relu')) self.model.add(Flatten()) self.model.add(Dense(output_dim=self.num_tasks)) self.model.add(Activation('sigmoid')) self.model.compile(optimizer='adam', loss='binary_crossentropy') else: raise ValueError("Exactly one of seq_length or keras_model must be specified!") def train(self, X, y, validation_data, early_stopping_metric='Loss', early_stopping_patience=5, save_best_model_to_prefix=None): if y.dtype != bool: assert set(np.unique(y)) == {0, 1} y = y.astype(bool) multitask = y.shape[1] > 1 if not multitask: num_positives = y.sum() num_sequences = len(y) num_negatives = num_sequences - num_positives if self.verbose >= 1: print('Training model (* indicates new best result)...') X_valid, y_valid = validation_data early_stopping_wait = 0 best_metric = np.inf if early_stopping_metric == 'Loss' else -np.inf for epoch in range(1, self.num_epochs + 1): self.model.fit(X, y, batch_size=128, nb_epoch=1, class_weight={True: num_sequences / num_positives, False: num_sequences / num_negatives} if not multitask else None, verbose=self.verbose >= 2) epoch_train_metrics = self.test(X, y) epoch_valid_metrics = self.test(X_valid, y_valid) self.train_metrics.append(epoch_train_metrics) self.valid_metrics.append(epoch_valid_metrics) if self.verbose >= 1: print('Epoch {}:'.format(epoch)) print('Train {}'.format(epoch_train_metrics)) print('Valid {}'.format(epoch_valid_metrics), end='') current_metric = epoch_valid_metrics[early_stopping_metric].mean() if (early_stopping_metric == 'Loss') == (current_metric <= best_metric): if self.verbose >= 1: print(' *') best_metric = current_metric best_epoch = epoch early_stopping_wait = 0 if save_best_model_to_prefix is not None: self.save(save_best_model_to_prefix) else: if self.verbose >= 1: print() if early_stopping_wait >= early_stopping_patience: break early_stopping_wait += 1 if self.verbose >= 1: print('Finished training after {} epochs.'.format(epoch)) if save_best_model_to_prefix is not None: print("The best model's architecture and weights (from epoch {0}) " 'were saved to {1}.arch.json and {1}.weights.h5'.format( best_epoch, save_best_model_to_prefix)) def predict(self, X): return self.model.predict(X, batch_size=128, verbose=False) def get_sequence_filters(self): """ Returns 3D array of 2D sequence filters. """ return self.model.layers[0].get_weights()[0].squeeze(axis=1) @staticmethod def _plot_scores(X, output_directory, peak_width, score_func, score_name): from dragonn.plot import plot_bases_on_ax scores = score_func(X).squeeze(axis=2) # (num_task, num_samples, num_bases, sequence_length) try: os.makedirs(output_directory) except OSError: pass num_tasks = len(scores) for task_index, task_scores in enumerate(scores): for sequence_index, sequence_scores in enumerate(task_scores): # sequence_scores is num_bases x sequence_length basewise_max_sequence_scores = sequence_scores.max(axis=0) plt.clf() figure, (top_axis, bottom_axis) = plt.subplots(2) top_axis.plot(range(1, len(basewise_max_sequence_scores) + 1), basewise_max_sequence_scores) top_axis.set_title('{} scores (motif highlighted)'.format(score_name)) peak_position = basewise_max_sequence_scores.argmax() top_axis.axvspan(peak_position - peak_width, peak_position + peak_width, color='grey', alpha=0.1) peak_sequence_scores = sequence_scores[:, peak_position - peak_width : peak_position + peak_width].T # Set non-max letter_heights to zero letter_heights = np.zeros_like(peak_sequence_scores) letter_heights[np.arange(len(letter_heights)), peak_sequence_scores.argmax(axis=1)] = \ basewise_max_sequence_scores[peak_position - peak_width : peak_position + peak_width] plot_bases_on_ax(letter_heights, bottom_axis) bottom_axis.set_xticklabels(tuple(map( str, np.arange(peak_position - peak_width, peak_position + peak_width + 1)))) bottom_axis.tick_params(axis='x', labelsize='small') plt.xlabel('Position') plt.ylabel('Score') plt.savefig(os.path.join(output_directory, 'sequence_{}{}'.format( sequence_index, '_task_{}'.format(task_index) if num_tasks > 1 else ''))) plt.close() def plot_deeplift(self, X, output_directory, peak_width=10): self._plot_scores(X, output_directory, peak_width, score_func=self.deeplift, score_name='DeepLift') def plot_in_silico_mutagenesis(self, X, output_directory, peak_width=10): self._plot_scores(X, output_directory, peak_width, score_func=self.in_silico_mutagenesis, score_name='ISM') def plot_architecture(self, output_file): from dragonn.visualize_util import plot as plot_keras_model plot_keras_model(self.model, output_file, show_shape=True) def save(self, save_best_model_to_prefix): arch_fname = save_best_model_to_prefix + '.arch.json' weights_fname = save_best_model_to_prefix + '.weights.h5' open(arch_fname, 'w').write(self.model.to_json()) self.model.save_weights(weights_fname, overwrite=True) @staticmethod def load(model_hdf5_fname=None, arch_fname=None, weights_fname=None): if model_hdf5_fname!=None: from keras.models import load_model sequence_dnn=SequenceDNN(keras_model=load_model(model_hdf5_fname)) else: from keras.models import model_from_json model_json_string = open(arch_fname).read() sequence_dnn = SequenceDNN(keras_model=model_from_json(model_json_string)) if weights_fname is not None: sequence_dnn.model.load_weights(weights_fname) return sequence_dnn class MotifScoreRNN(Model): def __init__(self, input_shape, gru_size=10, tdd_size=4): from keras.models import Sequential from keras.layers.core import ( Activation, Dense, Flatten, TimeDistributedDense ) from keras.layers.recurrent import GRU self.model = Sequential() self.model.add(GRU(gru_size, return_sequences=True, input_shape=input_shape)) if tdd_size is not None: self.model.add(TimeDistributedDense(tdd_size)) self.model.add(Flatten()) self.model.add(Dense(1)) self.model.add(Activation('sigmoid')) print('Compiling model...') self.model.compile(optimizer='adam', loss='binary_crossentropy') def train(self, X, y, validation_data): from keras.callbacks import EarlyStopping print('Training model...') multitask = y.shape[1] > 1 if not multitask: num_positives = y.sum() num_sequences = len(y) num_negatives = num_sequences - num_positives self.model.fit( X, y, batch_size=128, nb_epoch=100, validation_data=validation_data, class_weight={True: num_sequences / num_positives, False: num_sequences / num_negatives} if not multitask else None, callbacks=[EarlyStopping(monitor='val_loss', patience=10)], verbose=True) def predict(self, X): return self.model.predict(X, batch_size=128, verbose=False) class gkmSVM(Model): def __init__(self, prefix='./gkmSVM', word_length=11, mismatches=3, C=1, threads=1, cache_memory=100, verbosity=4): self.word_length = word_length self.mismatches = mismatches self.C = C self.threads = threads self.prefix = '_'.join(map(str, (prefix, word_length, mismatches, C))) options_list = zip( ['-l', '-d', '-c', '-T', '-m', '-v'], map(str, (word_length, mismatches, C, threads, cache_memory, verbosity))) self.options = ' '.join([' '.join(option) for option in options_list]) @property def model_file(self): model_fname = '{}.model.txt'.format(self.prefix) return model_fname if os.path.isfile(model_fname) else None @staticmethod def encode_sequence_into_fasta_file(sequence_iterator, ofname): """writes sequences into fasta file """ with open(ofname, "w") as wf: for i, seq in enumerate(sequence_iterator): print('>{}'.format(i), file=wf) print(seq, file=wf) def train(self, X, y, validation_data=None): """ Trains gkm-svm, saves model file. """ y = y.squeeze() pos_sequence = X[y] neg_sequence = X[~y] pos_fname = "%s.pos_seq.fa" % self.prefix neg_fname = "%s.neg_seq.fa" % self.prefix # create temporary fasta files self.encode_sequence_into_fasta_file(pos_sequence, pos_fname) self.encode_sequence_into_fasta_file(neg_sequence, neg_fname) # run command command = ' '.join( ('gkmtrain', self.options, pos_fname, neg_fname, self.prefix)) process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) process.wait() # wait for it to finish # remove fasta files os.system("rm %s" % pos_fname) os.system("rm %s" % neg_fname) def predict(self, X): if self.model_file is None: raise RuntimeError("GkmSvm hasn't been trained!") # write test fasta file test_fname = "%s.test.fa" % self.prefix self.encode_sequence_into_fasta_file(X, test_fname) # test gkmsvm temp_ofp = tempfile.NamedTemporaryFile() threads_option = '-T %s' % (str(self.threads)) command = ' '.join(['gkmpredict', test_fname, self.model_file, temp_ofp.name, threads_option]) process = subprocess.Popen(command, shell=True) process.wait() # wait for it to finish os.system("rm %s" % test_fname) # remove fasta file # get classification results temp_ofp.seek(0) y = np.array([line.split()[-1] for line in temp_ofp], dtype=float) temp_ofp.close() return np.expand_dims(y, 1) class SVC(Model): def __init__(self): self.classifier = scikit_SVC(probability=True, kernel='linear') def train(self, X, y, validation_data=None): self.classifier.fit(X, y) def predict(self, X): return self.classifier.predict_proba(X)[:, 1:] class DecisionTree(Model): def __init__(self): self.classifier = scikit_DecisionTree() def train(self, X, y, validation_data=None): self.classifier.fit(X, y) def predict(self, X): predictions = np.asarray(self.classifier.predict_proba(X))[..., 1] if len(predictions.shape) == 2: # multitask predictions = predictions.T else: # single-task predictions = np.expand_dims(predictions, 1) return predictions class RandomForest(DecisionTree): def __init__(self): self.classifier = RandomForestClassifier(n_estimators=100)
41.261501
101
0.607007
15,780
0.926002
0
0
3,595
0.210962
0
0
2,352
0.13802
a3d04895a38a041247e2747afe97c42331c17ee1
3,866
py
Python
src/mpass/mpass/migrations/0001_initial.py
haltu/velmu-mpass-demo
19eb0e14fa6710e4aee5d47c898cf570bf7621e5
[ "MIT" ]
null
null
null
src/mpass/mpass/migrations/0001_initial.py
haltu/velmu-mpass-demo
19eb0e14fa6710e4aee5d47c898cf570bf7621e5
[ "MIT" ]
11
2018-08-16T12:09:57.000Z
2018-08-22T14:26:15.000Z
src/mpass/mpass/migrations/0001_initial.py
haltu/velmu-mpass-demosp
31b609d1413ab1bd9f833f42eac30366a6d3e6d0
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.10 on 2018-03-20 08:34 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import parler.models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='AuthenticationSource', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('modified_at', models.DateTimeField(auto_now=True)), ('auth_id', models.CharField(max_length=128)), ('icon_url', models.CharField(blank=True, max_length=2048, null=True)), ], options={ 'abstract': False, }, bases=(parler.models.TranslatableModelMixin, models.Model), ), migrations.CreateModel( name='AuthenticationSourceTranslation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('language_code', models.CharField(db_index=True, max_length=15, verbose_name='Language')), ('title', models.CharField(max_length=2048)), ('master', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='translations', to='mpass.AuthenticationSource')), ], options={ 'managed': True, 'db_table': 'mpass_authenticationsource_translation', 'db_tablespace': '', 'default_permissions': (), 'verbose_name': 'authentication source Translation', }, ), migrations.CreateModel( name='AuthenticationTag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('modified_at', models.DateTimeField(auto_now=True)), ('tag_id', models.CharField(max_length=128)), ], options={ 'abstract': False, }, bases=(parler.models.TranslatableModelMixin, models.Model), ), migrations.CreateModel( name='AuthenticationTagTranslation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('language_code', models.CharField(db_index=True, max_length=15, verbose_name='Language')), ('title', models.CharField(max_length=2048)), ('master', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='translations', to='mpass.AuthenticationTag')), ], options={ 'managed': True, 'db_table': 'mpass_authenticationtag_translation', 'db_tablespace': '', 'default_permissions': (), 'verbose_name': 'authentication tag Translation', }, ), migrations.AddField( model_name='authenticationsource', name='tags', field=models.ManyToManyField(blank=True, to='mpass.AuthenticationTag'), ), migrations.AlterUniqueTogether( name='authenticationtagtranslation', unique_together=set([('language_code', 'master')]), ), migrations.AlterUniqueTogether( name='authenticationsourcetranslation', unique_together=set([('language_code', 'master')]), ), ]
42.483516
180
0.58148
3,653
0.944904
0
0
0
0
0
0
914
0.23642
a3d07703df62a187a4037e7b46931b65c218c987
3,921
py
Python
dgt/inference/forward_inference.py
fractalego/dgt
6781b9445d93c4a1680ab3d5636803c81062cc67
[ "MIT" ]
3
2021-07-26T02:07:15.000Z
2021-12-21T22:36:15.000Z
dgt/inference/forward_inference.py
fractalego/dgt
6781b9445d93c4a1680ab3d5636803c81062cc67
[ "MIT" ]
null
null
null
dgt/inference/forward_inference.py
fractalego/dgt
6781b9445d93c4a1680ab3d5636803c81062cc67
[ "MIT" ]
null
null
null
import logging import random from dgt.graph.graph_matcher import GraphWeightedMatch from dgt.utils import graph_iterations _logger = logging.getLogger(__name__) def find_weight_between(s, first, last): try: start = s.index(first) + len(first) end = s.index(last, start) return s[start:end] except ValueError: return 1 def clean_between(s, first, last): try: start = s.index(first) + len(first) end = s.index(last, start) new_s = s[:start - 1] + s[end + 1:] return new_s except ValueError: return s def eliminate_spaces(line): line = line.replace(' ', '') line = line.replace('\t', '') line = line.replace('\n', '') return line class UniqueNamesModifier: def apply(self, g): from ..auxiliary import get_random_name substitution_dict = {} for v in g.vs: random_name = get_random_name() old_name = v['name'] new_name = old_name + random_name v['name'] = new_name substitution_dict[old_name] = new_name try: for v in g.vs: referring_name = v['refers_to'] if referring_name: v['refers_to'] = substitution_dict[referring_name] except Exception as e: _logger.warning("Exception while substituting refers_to ID: " + str(e)) for e in g.es: e['name'] += get_random_name() class BaseForwardInference: def compute(self): return None class ForwardInference(BaseForwardInference): _unique = UniqueNamesModifier() def __init__(self, data, knowledge, permutation_shift, max_depth=1): self.permutations = permutation_shift self.data = data self.knowledge = knowledge self._max_depth = max_depth self.permutation_shift = permutation_shift def __apply_clause_to_graph(self, rule, data, i): drs = data.copy() drs.visit(self._unique) w = 1 iterations = graph_iterations(drs._g) if not iterations: return drs, 0 drs._g = iterations[self.permutations[i] % len(iterations)] if not rule.gradient: weighted_match = GraphWeightedMatch(rule.get_hypothesis(), self.knowledge._metric, self.knowledge._relations_metric) w = drs.visit(weighted_match) is_match = drs.visit(rule) if not is_match: return drs, 0 return drs, w def _compute_step(self, data_tuple, i): """ Applies all the rules to a drs :return: all the variants of the drs after a rule match as a pair (<NEW_DRS>, <WEIGHT>) """ data = data_tuple[0] prior_w = data_tuple[1] clauses = self.knowledge.ask_rule(data) results = [] for clause_tuple in clauses: rule = clause_tuple[0] rule_weight = rule.weight prior_rules = list(data_tuple[2]) if rule in prior_rules: # A rule can be used only once per path continue drs, w = self.__apply_clause_to_graph(rule, data, i) if w > 0: prior_rules.append(rule) prior_rules.append(drs) results.append((drs, prior_w * w * rule_weight, prior_rules)) return results def compute(self): results = [] to_process = [(self.data, 1, [self.data])] for i in range(self._max_depth): new_results = [] for data_tuple in to_process: new_results += self._compute_step(data_tuple, i) if not new_results: break to_process = sorted(new_results, key=lambda x: -x[1]) results += to_process results = sorted(results, key=lambda x: -x[1]) return results
30.632813
95
0.579444
3,173
0.809232
0
0
0
0
0
0
291
0.074216
a3d0a689ffb0010c1b8ab3fafb0b2e1dd2c2562d
1,528
py
Python
serverPythonClient/client.py
ikekilinc/dnnSuperBinoculars
b0fc584b1d449961bdbab37cf9d72c0b466f197f
[ "MIT" ]
null
null
null
serverPythonClient/client.py
ikekilinc/dnnSuperBinoculars
b0fc584b1d449961bdbab37cf9d72c0b466f197f
[ "MIT" ]
null
null
null
serverPythonClient/client.py
ikekilinc/dnnSuperBinoculars
b0fc584b1d449961bdbab37cf9d72c0b466f197f
[ "MIT" ]
null
null
null
import argparse import cv2 import common # from .utils.cropAtCenter import cropImageCenter # from cropAtCenter import cropImageCenter from gabriel_client.websocket_client import WebsocketClient from gabriel_client.opencv_adapter import OpencvAdapter DEFAULT_SERVER_HOST = '128.2.212.50' DEFAULT_ZOOM_FACTOR = 10 def preprocess(frame): # return frame print(type(frame), frame.shape) width, height = frame.shape[1], frame.shape[0] left = int(width/2 * (1 - 1/DEFAULT_ZOOM_FACTOR)) top = int(height/2 * (1 - 1/DEFAULT_ZOOM_FACTOR)) right = int(width/2 * (1 + 1/DEFAULT_ZOOM_FACTOR)) bottom = int(height/2 * (1 + 1/DEFAULT_ZOOM_FACTOR)) cropped_frame = frame[top:bottom, left:right] return cropped_frame def produce_extras(): return None def consume_frame(frame, _): cv2.imshow('Image from server', frame) cv2.waitKey(1) def main(): common.configure_logging() parser = argparse.ArgumentParser() parser.add_argument( 'source_name', nargs='?', default=common.DEFAULT_SOURCE_NAME) parser.add_argument('server_host', nargs='?', default=DEFAULT_SERVER_HOST) args = parser.parse_args() capture = cv2.VideoCapture(0) opencv_adapter = OpencvAdapter( preprocess, produce_extras, consume_frame, capture, args.source_name) client = WebsocketClient( args.server_host, common.WEBSOCKET_PORT, opencv_adapter.get_producer_wrappers(), opencv_adapter.consumer) client.launch() if __name__ == '__main__': main()
25.898305
78
0.719241
0
0
0
0
0
0
0
0
180
0.117801
a3d10c9654ae4266e8db0dc3b63e312a5537bc75
719
py
Python
src/DeepCard.API/batch.py
SharsDela/BankCardRecognize
ce80589bc5a5afaba2b97b1ccab35354fb99b548
[ "Apache-2.0" ]
7
2019-09-01T13:36:52.000Z
2021-05-20T19:38:40.000Z
src/DeepCard.API/batch.py
SharsDela/BankCardRecognize
ce80589bc5a5afaba2b97b1ccab35354fb99b548
[ "Apache-2.0" ]
1
2019-09-01T13:37:50.000Z
2020-09-18T10:35:20.000Z
src/DeepCard.API/batch.py
SharsDela/BankCardRecognize
ce80589bc5a5afaba2b97b1ccab35354fb99b548
[ "Apache-2.0" ]
2
2020-02-03T01:57:36.000Z
2020-03-05T11:19:14.000Z
from api import get_result import os import shutil from glob import glob from PIL import Image if __name__ == '__main__': image_files = glob('./test_images/*.*') result_dir = './test_results' if os.path.exists(result_dir): shutil.rmtree(result_dir) os.mkdir(result_dir) txt_file = os.path.join(result_dir, 'result.txt') txt_f = open(txt_file, 'w') for image_file in sorted(image_files): if ".gitkeep" in image_files: continue print("Finded file", image_file, end=" ") result = get_result(Image.open(image_file)) print(":", result) txt_f.write(image_file.split('/')[-1].split('.')[0] + ':' + result + '\n') txt_f.close()
28.76
82
0.623088
0
0
0
0
0
0
0
0
102
0.141864
a3d15d48b5db9739108b6ecc4d1923cf6d0d654b
4,106
py
Python
CIM14/ENTSOE/Equipment/Core/Curve.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
58
2015-04-22T10:41:03.000Z
2022-03-29T16:04:34.000Z
CIM14/ENTSOE/Equipment/Core/Curve.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
12
2015-08-26T03:57:23.000Z
2020-12-11T20:14:42.000Z
CIM14/ENTSOE/Equipment/Core/Curve.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
35
2015-01-10T12:21:03.000Z
2020-09-09T08:18:16.000Z
# Copyright (C) 2010-2011 Richard Lincoln # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from CIM14.ENTSOE.Equipment.Core.IdentifiedObject import IdentifiedObject class Curve(IdentifiedObject): """A multi-purpose curve or functional relationship between an independent variable (X-axis) and dependent (Y-axis) variables. """ def __init__(self, y1Unit="A", curveStyle="straightLineYValues", xUnit="A", CurveDatas=None, *args, **kw_args): """Initialises a new 'Curve' instance. @param y1Unit: The Y1-axis units of measure. Values are: "A", "rad", "none", "g", "W/Hz", "V", "m2", "VA", "VArh", "N", "Pa", "VAh", "F", "H", "Hz-1", "W/s", "J", "m", "S", "min", "deg", "J/s", "s", "Wh", "m3", "oC", "V/VAr", "s-1", "h", "W", "ohm", "Hz", "VAr", "kg/J" @param curveStyle: The style or shape of the curve. Values are: "straightLineYValues", "rampYValue", "constantYValue", "formula" @param xUnit: The X-axis units of measure. Values are: "A", "rad", "none", "g", "W/Hz", "V", "m2", "VA", "VArh", "N", "Pa", "VAh", "F", "H", "Hz-1", "W/s", "J", "m", "S", "min", "deg", "J/s", "s", "Wh", "m3", "oC", "V/VAr", "s-1", "h", "W", "ohm", "Hz", "VAr", "kg/J" @param CurveDatas: The point data values that define a curve """ #: The Y1-axis units of measure. Values are: "A", "rad", "none", "g", "W/Hz", "V", "m2", "VA", "VArh", "N", "Pa", "VAh", "F", "H", "Hz-1", "W/s", "J", "m", "S", "min", "deg", "J/s", "s", "Wh", "m3", "oC", "V/VAr", "s-1", "h", "W", "ohm", "Hz", "VAr", "kg/J" self.y1Unit = y1Unit #: The style or shape of the curve. Values are: "straightLineYValues", "rampYValue", "constantYValue", "formula" self.curveStyle = curveStyle #: The X-axis units of measure. Values are: "A", "rad", "none", "g", "W/Hz", "V", "m2", "VA", "VArh", "N", "Pa", "VAh", "F", "H", "Hz-1", "W/s", "J", "m", "S", "min", "deg", "J/s", "s", "Wh", "m3", "oC", "V/VAr", "s-1", "h", "W", "ohm", "Hz", "VAr", "kg/J" self.xUnit = xUnit self._CurveDatas = [] self.CurveDatas = [] if CurveDatas is None else CurveDatas super(Curve, self).__init__(*args, **kw_args) _attrs = ["y1Unit", "curveStyle", "xUnit"] _attr_types = {"y1Unit": str, "curveStyle": str, "xUnit": str} _defaults = {"y1Unit": "A", "curveStyle": "straightLineYValues", "xUnit": "A"} _enums = {"y1Unit": "UnitSymbol", "curveStyle": "CurveStyle", "xUnit": "UnitSymbol"} _refs = ["CurveDatas"] _many_refs = ["CurveDatas"] def getCurveDatas(self): """The point data values that define a curve """ return self._CurveDatas def setCurveDatas(self, value): for x in self._CurveDatas: x.Curve = None for y in value: y._Curve = self self._CurveDatas = value CurveDatas = property(getCurveDatas, setCurveDatas) def addCurveDatas(self, *CurveDatas): for obj in CurveDatas: obj.Curve = self def removeCurveDatas(self, *CurveDatas): for obj in CurveDatas: obj.Curve = None
52.641026
277
0.614467
2,929
0.713346
0
0
0
0
0
0
2,928
0.713103
a3d2324b7f134c8871f8f82a96cc6abc0a30b3ea
2,432
py
Python
fluent/syntax/errors.py
unclenachoduh/python-fluent
1d15bdc94a37ecb488a80aefcdd37b8cb5535f73
[ "Apache-2.0" ]
null
null
null
fluent/syntax/errors.py
unclenachoduh/python-fluent
1d15bdc94a37ecb488a80aefcdd37b8cb5535f73
[ "Apache-2.0" ]
null
null
null
fluent/syntax/errors.py
unclenachoduh/python-fluent
1d15bdc94a37ecb488a80aefcdd37b8cb5535f73
[ "Apache-2.0" ]
null
null
null
from __future__ import unicode_literals class ParseError(Exception): def __init__(self, code, *args): self.code = code self.args = args self.message = get_error_message(code, args) def get_error_message(code, args): if code == 'E00001': return 'Generic error' if code == 'E0002': return 'Expected an entry start' if code == 'E0003': return 'Expected token: "{}"'.format(args[0]) if code == 'E0004': return 'Expected a character from range: "{}"'.format(args[0]) if code == 'E0005': msg = 'Expected message "{}" to have a value or attributes' return msg.format(args[0]) if code == 'E0006': msg = 'Expected term "{}" to have a value' return msg.format(args[0]) if code == 'E0007': return 'Keyword cannot end with a whitespace' if code == 'E0008': return 'The callee has to be a simple, upper-case identifier' if code == 'E0009': return 'The key has to be a simple identifier' if code == 'E0010': return 'Expected one of the variants to be marked as default (*)' if code == 'E0011': return 'Expected at least one variant after "->"' if code == 'E0012': return 'Expected value' if code == 'E0013': return 'Expected variant key' if code == 'E0014': return 'Expected literal' if code == 'E0015': return 'Only one variant can be marked as default (*)' if code == 'E0016': return 'Message references cannot be used as selectors' if code == 'E0017': return 'Variants cannot be used as selectors' if code == 'E0018': return 'Attributes of messages cannot be used as selectors' if code == 'E0019': return 'Attributes of terms cannot be used as placeables' if code == 'E0020': return 'Unterminated string expression' if code == 'E0021': return 'Positional arguments must not follow named arguments' if code == 'E0022': return 'Named arguments must be unique' if code == 'E0023': return 'VariantLists are only allowed inside of other VariantLists.' if code == 'E0024': return 'Cannot access variants of a message.' if code == 'E0025': return 'Unknown escape sequence: {}'.format(args[0]) if code == 'E0026': return 'Invalid Unicode escape sequence: {}'.format(args[0]) return code
36.298507
76
0.60773
168
0.069079
0
0
0
0
0
0
1,178
0.484375
a3d28839e9a9ab62ac7936ca858e4cb438e092b3
16,562
py
Python
tests/test_mag.py
jdddog/mag-archiver
079e735e610d6b81b3ac8dc479d4f93bb0aacb11
[ "Apache-2.0" ]
null
null
null
tests/test_mag.py
jdddog/mag-archiver
079e735e610d6b81b3ac8dc479d4f93bb0aacb11
[ "Apache-2.0" ]
null
null
null
tests/test_mag.py
jdddog/mag-archiver
079e735e610d6b81b3ac8dc479d4f93bb0aacb11
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Curtin University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Author: James Diprose import os import unittest from unittest.mock import patch import pendulum from azure.common import AzureMissingResourceHttpError from azure.cosmosdb.table.tableservice import TableService from azure.storage.blob import ContainerProperties from mag_archiver.azure import create_table from mag_archiver.mag import make_mag_query, MagState, MagDateType, MagRelease, MagTask, MagArchiverClient, \ hide_if_not_none class TestMag(unittest.TestCase): def test_hide_if_not_none(self): # Test that None is returned for None value = hide_if_not_none(None) self.assertEqual(value, None) # Test that 'hidden' is returned: string value = hide_if_not_none('hello world') self.assertEqual(value, 'hidden') # Test that 'hidden' is returned: integer value = hide_if_not_none(123) self.assertEqual(value, 'hidden') def test_make_mag_query(self): start_date = pendulum.datetime(year=2020, month=4, day=1) end_date = pendulum.datetime(year=2020, month=5, day=1) # No parameters query = make_mag_query() self.assertEqual(query, '') # State parameter query = make_mag_query(state=MagState.discovered) self.assertEqual(query, "State eq 'discovered'") query = make_mag_query(state=MagState.archived) self.assertEqual(query, "State eq 'archived'") query = make_mag_query(state=MagState.done) self.assertEqual(query, "State eq 'done'") # Start date parameter query = make_mag_query(start_date=start_date, date_type=MagDateType.release) self.assertEqual(query, "ReleaseDate ge datetime'2020-04-01T00:00Z'") query = make_mag_query(start_date=start_date, date_type=MagDateType.discovered) self.assertEqual(query, "DiscoveredDate ge datetime'2020-04-01T00:00Z'") query = make_mag_query(start_date=start_date, date_type=MagDateType.archived) self.assertEqual(query, "ArchivedDate ge datetime'2020-04-01T00:00Z'") query = make_mag_query(start_date=start_date, date_type=MagDateType.done) self.assertEqual(query, "DoneDate ge datetime'2020-04-01T00:00Z'") # End date parameter query = make_mag_query(end_date=end_date, date_type=MagDateType.release) self.assertEqual(query, "ReleaseDate lt datetime'2020-05-01T00:00Z'") query = make_mag_query(end_date=end_date, date_type=MagDateType.discovered) self.assertEqual(query, "DiscoveredDate lt datetime'2020-05-01T00:00Z'") query = make_mag_query(end_date=end_date, date_type=MagDateType.archived) self.assertEqual(query, "ArchivedDate lt datetime'2020-05-01T00:00Z'") query = make_mag_query(end_date=end_date, date_type=MagDateType.done) self.assertEqual(query, "DoneDate lt datetime'2020-05-01T00:00Z'") # Start date, end date and date type query = make_mag_query(start_date=start_date, end_date=end_date, date_type=MagDateType.release) self.assertEqual(query, "ReleaseDate ge datetime'2020-04-01T00:00Z' and ReleaseDate lt " "datetime'2020-05-01T00:00Z'") query = make_mag_query(start_date=start_date, end_date=end_date, date_type=MagDateType.discovered) self.assertEqual(query, "DiscoveredDate ge datetime'2020-04-01T00:00Z' and DiscoveredDate lt " "datetime'2020-05-01T00:00Z'") query = make_mag_query(start_date=start_date, end_date=end_date, date_type=MagDateType.archived) self.assertEqual(query, "ArchivedDate ge datetime'2020-04-01T00:00Z' and ArchivedDate lt " "datetime'2020-05-01T00:00Z'") query = make_mag_query(start_date=start_date, end_date=end_date, date_type=MagDateType.done) self.assertEqual(query, "DoneDate ge datetime'2020-04-01T00:00Z' and DoneDate lt " "datetime'2020-05-01T00:00Z'") # State, start date, end date and date type query = make_mag_query(state=MagState.discovered, start_date=start_date, end_date=end_date, date_type=MagDateType.discovered) self.assertEqual(query, "State eq 'discovered' and DiscoveredDate ge datetime'2020-04-01T00:00Z' " "and DiscoveredDate lt datetime'2020-05-01T00:00Z'") query = make_mag_query(state=MagState.archived, start_date=start_date, end_date=end_date, date_type=MagDateType.archived) self.assertEqual(query, "State eq 'archived' and ArchivedDate ge datetime'2020-04-01T00:00Z' " "and ArchivedDate lt datetime'2020-05-01T00:00Z'") query = make_mag_query(state=MagState.done, start_date=start_date, end_date=end_date, date_type=MagDateType.done) self.assertEqual(query, "State eq 'done' and DoneDate ge datetime'2020-04-01T00:00Z' " "and DoneDate lt datetime'2020-05-01T00:00Z'") def make_mag_release(account_name: str, account_key: str, year: int, month: int, day: int): min_date = pendulum.datetime(1601, 1, 1) partition_key_ = 'mag' row_key_ = f'mag-{year:0>4d}-{month:0>2d}-{day:0>2d}' state_ = MagState.discovered task_ = MagTask.not_started release_date_ = pendulum.datetime(year=year, month=month, day=day) source_container_ = row_key_ source_container_last_modified_ = pendulum.datetime(year=year, month=month, day=day, hour=1) release_container_ = '' release_path_ = '' discovered_date_ = pendulum.datetime(year=year, month=month, day=day, hour=2) archived_date_ = min_date done_date_ = min_date return MagRelease(partition_key_, row_key_, state_, task_, release_date_, source_container_, source_container_last_modified_, release_container_, release_path_, discovered_date_, archived_date_, done_date_, account_name=account_name, account_key=account_key) class TestMagRelease(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestMagRelease, self).__init__(*args, **kwargs) self.account_name = os.getenv('STORAGE_ACCOUNT_NAME') self.account_key = os.getenv('STORAGE_ACCOUNT_KEY') create_table(self.account_name, self.account_key, MagRelease.TABLE_NAME) def test_secrets_hidden(self): # Check that account key is hidden account_name = 'myaccountname' secret = 'secret' # Check that account_key and sas_token are hidden release = make_mag_release(account_name, secret, 2020, 1, 1) self.assertIn('account_key=hidden', release.__repr__()) self.assertNotIn(secret, release.__str__()) self.assertNotIn(secret, release.__repr__()) # Check that account_key is None release = make_mag_release(account_name, None, 2020, 1, 1) self.assertIn('account_key=None', release.__repr__()) def test_create(self): release = make_mag_release(self.account_name, self.account_key, 2019, 6, 1) try: success = release.create() self.assertTrue(success) finally: release.delete() def test_delete(self): release = make_mag_release(self.account_name, self.account_key, 2019, 6, 1) # Check that we can create and then delete release.create() release.delete() # Check that second delete fails with self.assertRaises(AzureMissingResourceHttpError): release.delete() def test_update(self): release = make_mag_release(self.account_name, self.account_key, 2019, 6, 1) try: release.create() # Update release release.state = MagState.archived release.archived_date = pendulum.utcnow().microsecond_(0) release.update() # Verify that release is updated service = TableService(account_name=self.account_name, account_key=self.account_key) entity = service.get_entity(MagRelease.TABLE_NAME, release.partition_key, release.row_key) updated_release = MagRelease.from_entity(entity) self.assertEqual(release.state, updated_release.state) self.assertEqual(release.archived_date, updated_release.archived_date) finally: release.delete() def make_containers(): containers = [] cp1 = ContainerProperties() cp1.name = 'mag-2020-04-17' cp1.last_modified = pendulum.datetime(year=2020, month=4, day=18) containers.append(cp1) cp3 = ContainerProperties() cp3.name = 'mag-2020-05-01' cp3.last_modified = pendulum.datetime(year=2020, month=5, day=1) containers.append(cp3) cp2 = ContainerProperties() cp2.name = 'mag-2020-04-24' cp2.last_modified = pendulum.datetime(year=2020, month=4, day=25) containers.append(cp2) return containers class TestMagArchiverClient(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestMagArchiverClient, self).__init__(*args, **kwargs) self.account_name = os.getenv('STORAGE_ACCOUNT_NAME') self.account_key = os.getenv('STORAGE_ACCOUNT_KEY') create_table(self.account_name, self.account_key, MagRelease.TABLE_NAME) def test_secrets_hidden(self): # Check that account key is hidden account_name = 'myaccountname' secret = 'secret' # Check that account_key and sas_token are hidden client = MagArchiverClient(account_name=account_name, account_key=secret, sas_token=secret) expected = f'MagArchiverClient(account_name={account_name}, account_key=hidden, sas_token=hidden)' self.assertEqual(client.__str__(), expected) self.assertEqual(client.__repr__(), expected) self.assertNotIn(secret, client.__str__()) self.assertNotIn(secret, client.__repr__()) # Check that account_key and sas_token are None client = MagArchiverClient(account_name=account_name) expected = f'MagArchiverClient(account_name={account_name}, account_key=None, sas_token=None)' self.assertEqual(client.__str__(), expected) self.assertEqual(client.__repr__(), expected) @patch('mag_archiver.mag.list_containers') @patch('pendulum.datetime.now') def test_list_containers(self, mock_now, mock_list_containers): # Mock time mock_now.return_value = pendulum.datetime(year=2020, month=5, day=1, minute=10) # Mock containers containers_in = make_containers() mock_list_containers.return_value = containers_in # Test that 2 containers are returned when last_modified_thresh=1 client = MagArchiverClient(account_name=self.account_name, account_key=self.account_key) containers_out = client.list_containers(last_modified_thresh=1) self.assertEqual(len(containers_out), 2) # Test that 3 containers are returned when last_modified_thresh=0 containers_out = client.list_containers(last_modified_thresh=0) self.assertEqual(len(containers_out), 3) # Test sort order reverse=False self.assertEqual(containers_in[0].name, containers_out[0].name) self.assertEqual(containers_in[2].name, containers_out[1].name) self.assertEqual(containers_in[1].name, containers_out[2].name) # Test sort order reverse=True containers_out = client.list_containers(last_modified_thresh=0, reverse=True) self.assertEqual(len(containers_out), 3) self.assertEqual(containers_in[1].name, containers_out[0].name) self.assertEqual(containers_in[2].name, containers_out[1].name) self.assertEqual(containers_in[0].name, containers_out[2].name) @patch('mag_archiver.mag.list_containers') @patch('pendulum.datetime.now') def test_update_releases(self, mock_now, mock_list_containers): # Mock time mock_now.return_value = pendulum.datetime(year=2020, month=5, day=1, minute=10) # Mock containers containers_in = make_containers() mock_list_containers.return_value = containers_in # Mock fetching of containers client = MagArchiverClient(account_name=self.account_name, account_key=self.account_key) containers = client.list_containers(last_modified_thresh=1) try: # Update releases based on containers num_updated, num_errors = client.update_releases(containers) self.assertEqual(num_updated, 2) self.assertEqual(num_errors, 0) finally: # Clean up service = TableService(account_name=self.account_name, account_key=self.account_key) for container in containers: service.delete_entity(MagRelease.TABLE_NAME, 'mag', container.name.replace("mag-", "")) @patch('mag_archiver.mag.list_containers') @patch('pendulum.datetime.now') def test_list_releases(self, mock_now, mock_list_containers): # Mock time mock_now.return_value = pendulum.datetime(year=2020, month=5, day=1, hour=1) # Mock containers containers_in = make_containers() mock_list_containers.return_value = containers_in # Mock fetching of containers client = MagArchiverClient(account_name=self.account_name, account_key=self.account_key) containers = client.list_containers(last_modified_thresh=1) try: # Update releases based on containers num_updated, num_errors = client.update_releases(containers) self.assertEqual(num_updated, 3) self.assertEqual(num_errors, 0) # Two releases start_date = pendulum.datetime(year=2020, month=4, day=17) end_date = pendulum.datetime(year=2020, month=5, day=1) releases = client.list_releases(start_date=start_date, end_date=end_date, state=MagState.discovered, date_type=MagDateType.release) self.assertEqual(len(releases), 2) # 1 release start_date = pendulum.datetime(year=2020, month=4, day=17, minute=1) end_date = pendulum.datetime(year=2020, month=5, day=1) releases = client.list_releases(start_date=start_date, end_date=end_date, state=MagState.discovered, date_type=MagDateType.release) self.assertEqual(len(releases), 1) # Three releases start_date = pendulum.datetime(year=2020, month=4, day=17) end_date = pendulum.datetime(year=2020, month=5, day=1, minute=1) releases = client.list_releases(start_date=start_date, end_date=end_date, state=MagState.discovered, date_type=MagDateType.release, reverse=False) self.assertEqual(len(releases), 3) # Sorting reverse=False self.assertEqual(releases[0].row_key, '2020-04-17') self.assertEqual(releases[1].row_key, '2020-04-24') self.assertEqual(releases[2].row_key, '2020-05-01') # Sorting reverse=True releases = client.list_releases(start_date=start_date, end_date=end_date, state=MagState.discovered, date_type=MagDateType.release, reverse=True) self.assertEqual(releases[0].row_key, '2020-05-01') self.assertEqual(releases[1].row_key, '2020-04-24') self.assertEqual(releases[2].row_key, '2020-04-17') finally: # Clean up service = TableService(account_name=self.account_name, account_key=self.account_key) for container in containers: service.delete_entity(MagRelease.TABLE_NAME, 'mag', container.name.replace("mag-", ""))
45.128065
112
0.676368
13,993
0.844886
0
0
5,643
0.34072
0
0
3,619
0.218512
a3d2e1e0e46f7f6e0817c75f138edaf65c103137
14,084
py
Python
twitterinfrastructure/CH-Data-Public.py
jacob-heglund/socialsensing-jh
fd6d2d749f40fee46bee749ff868212bf117a747
[ "BSD-2-Clause", "MIT" ]
null
null
null
twitterinfrastructure/CH-Data-Public.py
jacob-heglund/socialsensing-jh
fd6d2d749f40fee46bee749ff868212bf117a747
[ "BSD-2-Clause", "MIT" ]
null
null
null
twitterinfrastructure/CH-Data-Public.py
jacob-heglund/socialsensing-jh
fd6d2d749f40fee46bee749ff868212bf117a747
[ "BSD-2-Clause", "MIT" ]
null
null
null
''' Created on Mar 22, 2018 Edited on Jan 11, 2019 @author: npvance2 @author: curtisd2 Variables that will need to be edited/personalized: monitorID in Variables() (line 27) projectStartDate in Variables() (line 28) projectEndDate in Variables() (line 29) authToken in getAuthToken() (line 49) consumer_key in twitterAPI() (line 62) consumer_secret in twitterAPI() (line 63) access_token in twitterAPI() (line 64) access_secret in twitterAPI() (line 65) ''' from datetime import date, timedelta import urllib.request import json import csv import tweepy from tweepy import OAuthHandler def Variables(): monitorID = "9926183772" # The numerical ID for your Crimson Hexagon monitor startDate = "yyyy-mm-dd" # Date must be in yyyy-mm-dd format endDate = "yyyy-mm-dd" # Date must be in yyyy-mm-dd format variableMap = {} variableMap['monitorID'] = monitorID variableMap['startDate'] = startDate variableMap['endDate'] = endDate return variableMap def getURL(): #provides URL for Crimson API urlStart = "https://api.crimsonhexagon.com/api" return urlStart ########### # # You'll need to generate your own Crimson API key/token from here: # https://apidocs.crimsonhexagon.com/reference # ########### def getAuthToken(): #provides auth token needed to access Crimson API authToken = '' authToken = "&auth="+authToken return authToken ########### # # You'll need to add your own Twitter API keys here. # Instructions on generating API keys: https://developer.twitter.com/en/docs/basics/authentication/guides/access-tokens.html # API reference guide: https://developer.twitter.com/en/docs/api-reference-index.html # ########### def twitterAPI(): #Provides access keys for Twitter API consumer_key = '2S1Z7Giq0oOf3w0R0sJUPnLFx' consumer_secret = '9IPOE8dqWzUPseAPHeNxTTv1jAr9BNj8mF2ryw8DIud8Ot8VCe' access_token = '998275516892409858-hQ1pk5wKg1YyxUrbiFkuFHKHqztPMNE' access_secret = 'gsXqGx1gU93HkKNDupTPt56ZnAmmalsaSNBUuoBToraBw' if (consumer_key == '') or (consumer_secret =='') or (access_token =='') or (access_secret ==''): print("Not all Twitter keys have been entered, please add them to the script and try again") auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) return api def getTwitterURL(): #provides URL for Twitter api urlStart = "https://api.twitter.com/1.1/statuses/lookup.json?id=" return urlStart def DatePull(startdate, enddate): listArray = [] startdate = date(int(startdate[0:4]), int(startdate[5:7]), int(startdate[8:10])) enddate = date(int(enddate[0:4]), int(enddate[5:7]), int(enddate[8:10])) while startdate <= enddate: listArray.append(str(startdate)) startdate += timedelta(days=1) return listArray def main(): monitorID = Variables()['monitorID'] projectStartDate = Variables()['startDate'] projectEndDate = Variables()['endDate'] fPath = "Monitor-"+monitorID+'-from-'+projectStartDate+'-to-'+projectEndDate+'.csv' lineArray = DatePull(projectStartDate, projectEndDate) print("------------------------------") print("MonitorID is "+monitorID) print(lineArray[0],lineArray[-1]) with open(fPath, 'w', newline = '', encoding = 'utf-8') as f: writer = csv.writer(f) header = ["PostType","PostDate","PostTime","URL","TweetID","Contents","RetweetCount","FavoriteCount","Location","Language","Sentiment","NeutralScore","PositiveScore","NegativeScore","Followers","Friends","Author","AuthorGender","AuthorTweets"] writer.writerow(header) for i in range(len(lineArray)-1): print(lineArray[i]) startDate = lineArray[i] endDate = lineArray[i+1] dates = "&start="+startDate+"&end="+endDate #Combines start and end date into format needed for API call urlStart = getURL() #Gets URL authToken = getAuthToken() #Gets auth token endpoint = "/monitor/posts?id="; #endpoint needed for this query extendLimit = "&extendLimit=true" #extends call number from 500 to 10,000 fullContents = "&fullContents=true" #Brings back full contents for Blog and Tumblr posts which are usually truncated around search keywords. This can occasionally disrupt CSV formatting. urlData = urlStart+endpoint+monitorID+authToken+dates+extendLimit+fullContents #Combines all API calls parts into full URL webURL = urllib.request.urlopen(urlData) if (webURL.getcode() == 200): with open(fPath, 'a', newline='', encoding='utf-8') as f: writer = csv.writer(f) data = webURL.read().decode('utf8') theJSON = json.loads(data) postDates = [] #These initialize the attributes of the final output postTimes = [] urls = [] contents = [] authors = [] authorGenders = [] locations = [] languages = [] postTypes = [] sentiments = [] neutralScore = [] positiveScore = [] negativeScore = [] tweetIDs = [] followers = [] friends = [] retweetCounts = [] favoritesCount = [] statusesCount = [] tweetCount = 0 tempTweetIDs = [] api = twitterAPI() c = 0 for i in theJSON["posts"]: postDates.append("") postTimes.append("") if ('date' in i): #identifies date posted tempDate = str(i["date"]) dateTime = tempDate.split("T") postDates[c] = dateTime[0] postTimes[c] = dateTime[1] urls.append(i["url"]) contents.append("") if ('contents' in i): #identifies post contents contents[c] = i["contents"].replace(",","").replace("\n"," ") #replaces commas and new lines to facilitate CSV formatting, this occasionally missed new lines in some blog posts which I'm working to fix authors.append("") if ('author' in i): #identifies author authors[c] = i["author"].replace(",","") authorGenders.append("") if ('authorGender' in i): #identifies author gender authorGenders[c] = i["authorGender"] locations.append("") if ('location' in i): #identifies location locations[c] = i["location"].replace(",","") languages.append("") if ('language' in i): #identifies language specified in the author's profile languages[c] = i["language"] postTypes.append(i["type"]) #identifies the type of post, i.e. Twitter, Tumblr, Blog tweetIDs.append("") followers.append("") friends.append("") retweetCounts.append("") favoritesCount.append("") statusesCount.append("") if postTypes[c] == "Twitter": #if the post type is Twitter it goes through more processing tweetCount = tweetCount + 1 #counts number of tweets tweetSplit = urls[c].split("status/") #splits URL to get tweetID tweetIDs[c] = tweetSplit[1] tempTweetIDs.append(tweetIDs[c]) if tweetCount == 100: #the max number of TweetIDs in one API call is 100 so a call is run every 100 tweets identified tweepys = api.statuses_lookup(id_=tempTweetIDs) #call to Twitter API for tweet in tweepys: tempID = tweet.id_str #finds tweetsID postMatch = 0 for idMatch in tweetIDs: if idMatch==tempID: #matches tweetID in Twitter API call to tweetID stored from Crimson API tempDate = str(tweet.created_at).replace(" "," ") #These all fill the matching Crimson attributes to those found in the Twitter API dateTime = tempDate.split(" ") postDates[postMatch] = dateTime[0] postTimes[postMatch] = dateTime[1] contents[postMatch] = tweet.text.replace(",","") authors[postMatch] = tweet.author.screen_name followers[postMatch] = str(tweet.author.followers_count) friends[postMatch] = str(tweet.author.friends_count) retweetCounts[postMatch] = str(tweet.retweet_count) favoritesCount[postMatch] = str(tweet.favorite_count) statusesCount[postMatch] = str(tweet.author.statuses_count) postMatch = postMatch + 1 tweetCount = 0 #clears tweet count for a new 100 tempTweetIDs = [] #clears tweetIDs for next call sentiments.append("") neutralScore.append("") positiveScore.append("") negativeScore.append("") if ('categoryScores' in i): #finds sentiment value and matching attribute for l in i["categoryScores"]: catName = l["categoryName"] if catName == "Basic Neutral": neutralScore[c] = l["score"] elif catName =="Basic Positive": positiveScore[c] = l["score"] elif catName == "Basic Negative": negativeScore[c] = l["score"] if neutralScore[c] > positiveScore[c] and neutralScore[c] > negativeScore[c]: sentiments[c] = "Basic Neutral" if positiveScore[c] > neutralScore[c] and positiveScore[c] > negativeScore[c]: sentiments[c] = "Basic Positive" if negativeScore[c] > positiveScore[c] and negativeScore[c] > neutralScore[c]: sentiments[c] = "Basic Negative" c = c + 1 if len(tempTweetIDs) != 0: #after loop the Twitter API call must run one more time to clean up all the tweets since the last 100 try: tweepys = api.statuses_lookup(id_=tempTweetIDs) for tweet in tweepys: tempID = tweet.id_str postMatch = 0 for idMatch in tweetIDs: if idMatch==tempID: tempDate = str(tweet.created_at).replace(" "," ") dateTime = tempDate.split(" ") postDates[postMatch] = dateTime[0] postTimes[postMatch] = dateTime[1] contents[postMatch] = tweet.text.replace(",","") authors[postMatch] = tweet.author.screen_name followers[postMatch] = str(tweet.author.followers_count) friends[postMatch] = str(tweet.author.friends_count) retweetCounts[postMatch] = str(tweet.retweet_count) favoritesCount[postMatch] = str(tweet.favorite_count) statusesCount[postMatch] = str(tweet.author.statuses_count) postMatch = postMatch + 1 tweetCount = 0 except: print("Tweepy error: skipping cleanup") pC = 0 for pDate in postDates: #iterates through the word lists and prints matching posts to CSV csvRow=[postTypes[pC], pDate, postTimes[pC], urls[pC], str(tweetIDs[pC]), contents[pC].replace("\n"," "), retweetCounts[pC], favoritesCount[pC], locations[pC], languages[pC], sentiments[pC], str(neutralScore[pC]), str(positiveScore[pC]), str(negativeScore[pC]), followers[pC], friends[pC], authors[pC], authorGenders[pC], statusesCount[pC]] writer.writerow(csvRow) pC = pC + 1 else: print("Server Error, No Data" + str(webURL.getcode())) #displays error if Crimson URL fails if __name__ == '__main__': main()
47.103679
360
0.510934
0
0
0
0
0
0
0
0
4,016
0.285146
a3d3652391aca6bc7ecc488069329c58736eb71f
1,286
py
Python
roles/slurm/files/startnode.py
danhnguyen48/slurm-elastic-computing
0793cf23677169a6d9dceea0793118bc00c0913e
[ "MIT" ]
null
null
null
roles/slurm/files/startnode.py
danhnguyen48/slurm-elastic-computing
0793cf23677169a6d9dceea0793118bc00c0913e
[ "MIT" ]
null
null
null
roles/slurm/files/startnode.py
danhnguyen48/slurm-elastic-computing
0793cf23677169a6d9dceea0793118bc00c0913e
[ "MIT" ]
null
null
null
#! /opt/cloud_sdk/bin/python import asyncio import logging import subprocess import sys import citc_cloud def handle_exception(exc_type, exc_value, exc_traceback): if issubclass(exc_type, KeyboardInterrupt): sys.__excepthook__(exc_type, exc_value, exc_traceback) return log.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) async def main() -> None: nodespace = citc_cloud.get_nodespace() keys_file = "/home/slurm/opc_authorized_keys" with open(keys_file) as kf: ssh_keys = kf.read() hosts = subprocess.run(["scontrol", "show", "hostnames", sys.argv[1]], stdout=subprocess.PIPE).stdout.decode().split() await asyncio.gather(*( citc_cloud.start_node( log, host, nodespace, ssh_keys) for host in hosts )) sys.excepthook = handle_exception if __name__ == "__main__": log = logging.getLogger("startnode") log.setLevel(logging.INFO) handler = logging.FileHandler('/var/log/slurm/elastic.log') formatter = logging.Formatter('%(asctime)s %(name)-10s %(levelname)-8s %(message)s') handler.setFormatter(formatter) log.addHandler(handler) loop = asyncio.get_event_loop() try: loop.run_until_complete(main()) finally: loop.close()
26.244898
122
0.694401
0
0
0
0
0
0
430
0.33437
210
0.163297
a3d474d2b653dcd5a9578ce3979ff7a04e191213
2,300
py
Python
tests/pyre/components/component_class_registration_model.py
BryanRiel/pyre
179359634a7091979cced427b6133dd0ec4726ea
[ "BSD-3-Clause" ]
null
null
null
tests/pyre/components/component_class_registration_model.py
BryanRiel/pyre
179359634a7091979cced427b6133dd0ec4726ea
[ "BSD-3-Clause" ]
null
null
null
tests/pyre/components/component_class_registration_model.py
BryanRiel/pyre
179359634a7091979cced427b6133dd0ec4726ea
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # michael a.g. aïvázis # orthologue # (c) 1998-2018 all rights reserved # """ Verify that component registration interacts correctly with the pyre configurator model """ # access # print(" -- importing pyre") import pyre # print(" -- done") def declare(): # declare a protocol class protocol(pyre.protocol): """a protocol""" # properties p1 = pyre.properties.str() p2 = pyre.properties.str() # behavior @pyre.provides def do(self): """behave""" # declare a component class component(pyre.component, family="test", implements=protocol): """a component""" # traits p1 = pyre.properties.str(default="p1") p2 = pyre.properties.str(default="p2") @pyre.export def do(self): """behave""" return "component" return component def test(): # and the model model = pyre.executive.nameserver # model.dump(pattern='test') # print(" -- making some configuration changes") # add an assignment model['test.p1'] = 'step 1' # an alias model.alias(alias='p1', target='test.p1') # and a reference to the alias model['ref'] = '{p1}' # check that they point to the same slot assert model.retrieve(name='p1') == model.retrieve(name='test.p1') # save the nodes ref = model.retrieve(name='ref') step_0 = model.retrieve(name='test.p1') # now declare the component and its protocol # print(" -- declaring components") component = declare() # print(" -- done") # model.dump(pattern='') assert component.p1 == 'step 1' assert component.p2 == 'p2' # check that the model is as we expect # model.dump() assert model['test.p1'] == component.p1 assert model['test.p2'] == component.p2 # how about the alias and the reference? assert model['ref'] == component.p1 assert model['p1'] == component.p1 # make a late registration to what is now the component trait model['test.p2'] = 'step 2' # model.dump(pattern='test') # and check assert component.p1 == 'step 1' assert component.p2 == 'step 2' return # main if __name__ == "__main__": test() # end of file
22.772277
87
0.59913
540
0.234579
0
0
151
0.065595
0
0
1,111
0.482624
a3d5083187f3606549524985d8222291ba30b943
4,199
py
Python
tests/unit/transport/plugins/asyncssh/test_asyncssh_transport.py
carlmontanari/nssh
fa2277ea0b8fdb81de3064e1d48bad9264f0cd64
[ "MIT" ]
1
2020-02-09T17:43:43.000Z
2020-02-09T17:43:43.000Z
tests/unit/transport/plugins/asyncssh/test_asyncssh_transport.py
carlmontanari/nssh
fa2277ea0b8fdb81de3064e1d48bad9264f0cd64
[ "MIT" ]
null
null
null
tests/unit/transport/plugins/asyncssh/test_asyncssh_transport.py
carlmontanari/nssh
fa2277ea0b8fdb81de3064e1d48bad9264f0cd64
[ "MIT" ]
null
null
null
import asyncio from io import BytesIO import pytest from asyncssh.connection import SSHClientConnection from asyncssh.stream import SSHReader from scrapli.exceptions import ScrapliConnectionNotOpened, ScrapliTimeout class DumbContainer: def __init__(self): self.preferred_auth = () def __getattr__(self, item): # options has a billion attributes, just return None, doesnt matter for this test return None def test_close(monkeypatch, asyncssh_transport): def _close(cls): pass monkeypatch.setattr( "asyncssh.connection.SSHClientConnection.close", _close, ) # lie and pretend the session is already assigned options = DumbContainer() asyncssh_transport.session = SSHClientConnection( loop=asyncio.get_event_loop_policy().get_event_loop(), options=options ) asyncssh_transport.close() assert asyncssh_transport.session is None assert asyncssh_transport.stdin is None assert asyncssh_transport.stdout is None def test_close_catch_brokenpipe(monkeypatch, asyncssh_transport): def _close(cls): raise BrokenPipeError monkeypatch.setattr( "asyncssh.connection.SSHClientConnection.close", _close, ) # lie and pretend the session is already assigned options = DumbContainer() asyncssh_transport.session = SSHClientConnection( loop=asyncio.get_event_loop_policy().get_event_loop(), options=options ) asyncssh_transport.close() assert asyncssh_transport.session is None assert asyncssh_transport.stdin is None assert asyncssh_transport.stdout is None def test_isalive_no_session(asyncssh_transport): assert asyncssh_transport.isalive() is False def test_isalive(asyncssh_transport): # lie and pretend the session is already assigned options = DumbContainer() asyncssh_transport.session = SSHClientConnection( loop=asyncio.get_event_loop_policy().get_event_loop(), options=options ) # lie and tell asyncssh auth is done asyncssh_transport.session._auth_complete = True # also have to lie and create a transport and have it return False when is_closing is called asyncssh_transport.session._transport = DumbContainer() asyncssh_transport.session._transport.is_closing = lambda: False assert asyncssh_transport.isalive() is True def test_isalive_attribute_error(asyncssh_transport): # lie and pretend the session is already assigned options = DumbContainer() asyncssh_transport.session = SSHClientConnection( loop=asyncio.get_event_loop_policy().get_event_loop(), options=options ) # lie and tell asyncssh auth is done asyncssh_transport.session._auth_complete = True assert asyncssh_transport.isalive() is False async def test_read(monkeypatch, asyncssh_transport): async def _read(cls, _): return b"somebytes" monkeypatch.setattr( "asyncssh.stream.SSHReader.read", _read, ) # lie and pretend the session is already assigned/stdout is already a thing asyncssh_transport.stdout = SSHReader("", "") assert await asyncssh_transport.read() == b"somebytes" async def test_read_exception_not_open(asyncssh_transport): with pytest.raises(ScrapliConnectionNotOpened): await asyncssh_transport.read() async def test_read_exception_timeout(monkeypatch, asyncssh_transport): async def _read(cls, _): await asyncio.sleep(0.5) monkeypatch.setattr( "asyncssh.stream.SSHReader.read", _read, ) # lie and pretend the session is already assigned/stdout is already a thing asyncssh_transport.stdout = SSHReader("", "") asyncssh_transport._base_transport_args.timeout_transport = 0.1 with pytest.raises(ScrapliTimeout): await asyncssh_transport.read() def test_write(asyncssh_transport): asyncssh_transport.stdin = BytesIO() asyncssh_transport.write(b"blah") asyncssh_transport.stdin.seek(0) assert asyncssh_transport.stdin.read() == b"blah" def test_write_exception(asyncssh_transport): with pytest.raises(ScrapliConnectionNotOpened): asyncssh_transport.write("blah")
28.958621
96
0.740414
221
0.052632
0
0
0
0
1,043
0.248392
801
0.19076
a3d571f1fc3a63903055bc9efe42eada3f2c5310
3,699
py
Python
apps/ignite/views.py
Mozilla-GitHub-Standards/93f18f14efcf5fdfc0e04f9bf247f66baf46663f37b1d2087ab8d850abc90803
4e374b4d52dfb9039ebe543e7f27682189022307
[ "BSD-3-Clause" ]
2
2015-04-06T15:20:29.000Z
2016-12-30T12:25:11.000Z
apps/ignite/views.py
Mozilla-GitHub-Standards/93f18f14efcf5fdfc0e04f9bf247f66baf46663f37b1d2087ab8d850abc90803
4e374b4d52dfb9039ebe543e7f27682189022307
[ "BSD-3-Clause" ]
2
2019-02-17T17:38:02.000Z
2019-03-28T03:49:16.000Z
apps/ignite/views.py
Mozilla-GitHub-Standards/93f18f14efcf5fdfc0e04f9bf247f66baf46663f37b1d2087ab8d850abc90803
4e374b4d52dfb9039ebe543e7f27682189022307
[ "BSD-3-Clause" ]
1
2019-03-28T03:49:18.000Z
2019-03-28T03:49:18.000Z
from django.shortcuts import get_object_or_404 import jingo import waffle from django.contrib.auth.models import User from challenges.models import Submission, Category from projects.models import Project from blogs.models import BlogEntry from events.models import Event def splash(request, project, slug, template_name='ignite/splash.html'): """Show an individual project challenge.""" project = get_object_or_404(Project, slug=project) challenge = get_object_or_404(project.challenge_set, slug=slug) num_blogs = 3 # have we announced the winners yet - switch template if waffle.switch_is_active('announce_winners'): template_name = 'ignite/homepage-winners.html' num_blogs = 5 blogs = BlogEntry.objects.filter( page='splash' ).order_by("-updated",)[:num_blogs] # if the dev challenge is open we want to only show dev entries if request.development.is_open: entries = (Submission.objects.visible() .filter(phase__challenge=challenge) .filter(phase__name="Development") .order_by("?")) num_entries = len(entries) entries_from = 'apps' if num_entries < 5: entries = (Submission.objects.visible() .filter(phase__challenge=challenge) .filter(phase__name="Ideation") .order_by("?")) entries_from = 'ideas' else: entries = (Submission.objects.visible() .filter(phase__challenge=challenge) .filter(phase__name="Ideation") .order_by("?")) entries_from = 'ideas' event_list = Event.objects.get_featured()[:5] return jingo.render(request, template_name, { 'challenge': challenge, 'project': project, 'phases': list(enumerate(challenge.phases.all(), start=1)), 'entries': entries[:5], 'categories': Category.objects.all(), 'blogs': blogs, 'event_list': event_list, 'entries_from': entries_from, }) def about(request, project, slug, template_name='ignite/about.html'): if waffle.switch_is_active('announce_winners'): template_name = 'ignite/about-winners.html' return jingo.render(request, template_name) def judges(request, project, slug, template_name='challenges/all_judges.html'): """ List all judges we have in the system """ profiles = [] for judge in User.objects.filter(groups__name='Judges'): profile = judge.get_profile() # we only want to show featured profiles if profile.featured == True: profiles.append(profile) return jingo.render(request, 'ignite/judges.html', { 'profiles': profiles }) def terms(request, project, slug, template_name='static/terms_conditions.html'): return jingo.render(request, template_name, {}) def terms_development(request, project, slug, template_name='static/terms_conditions_development.html'): return jingo.render(request, template_name, {}) def fail(request, template_name='404.html'): return jingo.render(request, template_name, {}, status=404) def app_fail(request, template_name='500.html'): return jingo.render(request, template_name, {}, status=500) def action_unavailable_response(request, message=None, template_name="action_unavailable.html"): """Generic page for unavailable actions""" context = {'message': message} return jingo.render(request, template_name, context, status=403)
36.99
104
0.638821
0
0
0
0
0
0
0
0
772
0.208705
a3d593a4708a16249302174780a07f2fdc88109b
664
py
Python
dataPresenter.py
thebouv/IUS-Hacktoberfest
084634ec2feff3e81862d85b3938e1ae2c5aadff
[ "MIT" ]
3
2019-09-30T18:25:12.000Z
2019-10-01T21:47:41.000Z
dataPresenter.py
thebouv/IUS-Hacktoberfest
084634ec2feff3e81862d85b3938e1ae2c5aadff
[ "MIT" ]
3
2019-09-27T22:44:34.000Z
2019-10-09T17:00:37.000Z
dataPresenter.py
thebouv/IUS-Hacktoberfest
084634ec2feff3e81862d85b3938e1ae2c5aadff
[ "MIT" ]
6
2019-09-28T04:17:16.000Z
2019-10-08T18:47:26.000Z
from plotly.subplots import make_subplots import plotly.graph_objects as go import plotly.io as pio from dataProcessor import parseLabels, parseLangs import plotly.io as pio import os years = parseLabels() langs = parseLangs() #make the plotly results fig = make_subplots( rows=1, cols=2, specs=[[{"type": "xy"}, {"type": "domain"}]], ) fig.add_trace(go.Bar(y = list(langs.values()), x = list(langs.keys()), showlegend=False), row=1, col=1) fig.add_trace(go.Pie(values = list(years.values()), labels = list(years.keys())), row=1, col=2) fig.update_layout(height=600) pio.write_html(fig, 'index.html', auto_open=True)
22.133333
89
0.680723
0
0
0
0
0
0
0
0
60
0.090361
a3d6b9ef2efd18b552dbe05895fafd84b7430c25
17,209
py
Python
bdlb/diabetic_retinopathy_diagnosis/benchmark.py
Sairam954/bdl-benchmarks
6fbc855ca51403ad8f64b6be30ed92f6118c6cae
[ "Apache-2.0" ]
666
2019-06-14T17:14:05.000Z
2022-03-24T10:48:47.000Z
bdlb/diabetic_retinopathy_diagnosis/benchmark.py
Sairam954/bdl-benchmarks
6fbc855ca51403ad8f64b6be30ed92f6118c6cae
[ "Apache-2.0" ]
12
2019-06-26T16:54:14.000Z
2020-08-18T13:16:01.000Z
bdlb/diabetic_retinopathy_diagnosis/benchmark.py
Sairam954/bdl-benchmarks
6fbc855ca51403ad8f64b6be30ed92f6118c6cae
[ "Apache-2.0" ]
97
2019-06-14T20:30:39.000Z
2022-02-05T08:33:49.000Z
# Copyright 2019 BDL Benchmarks Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Diabetic retinopathy diagnosis BDL Benchmark.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import os from typing import Callable from typing import Dict from typing import Optional from typing import Sequence from typing import Text from typing import Tuple from typing import Union import numpy as np import pandas as pd import tensorflow as tf from absl import logging from ..core import transforms from ..core.benchmark import Benchmark from ..core.benchmark import BenchmarkInfo from ..core.benchmark import DataSplits from ..core.constants import DATA_DIR from ..core.levels import Level tfk = tf.keras _DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR = os.path.join( DATA_DIR, "downloads", "manual", "diabetic_retinopathy_diagnosis") class DiabeticRetinopathyDiagnosisBecnhmark(Benchmark): """Diabetic retinopathy diagnosis benchmark class.""" def __init__( self, level: Union[Text, Level], batch_size: int = 64, data_dir: Optional[Text] = None, download_and_prepare: bool = False, ): """Constructs a benchmark object. Args: level: `Level` or `str, downstream task level. batch_size: (optional) `int`, number of datapoints per mini-batch. data_dir: (optional) `str`, path to parent data directory. download_and_prepare: (optional) `bool`, if the data is not available it downloads and preprocesses it. """ self.__level = level if isinstance(level, Level) else Level.from_str(level) try: self.__ds = self.load(level=level, batch_size=batch_size, data_dir=data_dir or DATA_DIR) except AssertionError: if not download_and_prepare: raise else: logging.info( "Data not found, `DiabeticRetinopathyDiagnosisBecnhmark.download_and_prepare()`" " is now running...") self.download_and_prepare() @classmethod def evaluate( cls, estimator: Callable[[np.ndarray], Tuple[np.ndarray, np.ndarray]], dataset: tf.data.Dataset, output_dir: Optional[Text] = None, name: Optional[Text] = None, ) -> Dict[Text, float]: """Evaluates an `estimator` on the `mode` benchmark dataset. Args: estimator: `lambda x: mu_x, uncertainty_x`, an uncertainty estimation function, which returns `mean_x` and predictive `uncertainty_x`. dataset: `tf.data.Dataset`, on which dataset to performance evaluation. output_dir: (optional) `str`, directory to save figures. name: (optional) `str`, the name of the method. """ import inspect import tqdm import tensorflow_datasets as tfds from sklearn.metrics import roc_auc_score from sklearn.metrics import accuracy_score import matplotlib.pyplot as plt # Containers used for caching performance evaluation y_true = list() y_pred = list() y_uncertainty = list() # Convert to NumPy iterator if necessary ds = dataset if inspect.isgenerator(dataset) else tfds.as_numpy(dataset) for x, y in tqdm.tqdm(ds): # Sample from probabilistic model mean, uncertainty = estimator(x) # Cache predictions y_true.append(y) y_pred.append(mean) y_uncertainty.append(uncertainty) # Use vectorized NumPy containers y_true = np.concatenate(y_true).flatten() y_pred = np.concatenate(y_pred).flatten() y_uncertainty = np.concatenate(y_uncertainty).flatten() fractions = np.asarray([0.5, 0.6, 0.7, 0.8, 0.9, 1.0]) # Metrics for evaluation metrics = zip(["accuracy", "auc"], cls.metrics()) return { metric: cls._evaluate_metric( y_true, y_pred, y_uncertainty, fractions, lambda y_true, y_pred: metric_fn(y_true, y_pred).numpy(), name, ) for (metric, metric_fn) in metrics } @staticmethod def _evaluate_metric( y_true: np.ndarray, y_pred: np.ndarray, y_uncertainty: np.ndarray, fractions: Sequence[float], metric_fn: Callable[[np.ndarray, np.ndarray], float], name=None, ) -> pd.DataFrame: """Evaluate model predictive distribution on `metric_fn` at data retain `fractions`. Args: y_true: `numpy.ndarray`, the ground truth labels, with shape [N]. y_pred: `numpy.ndarray`, the model predictions, with shape [N]. y_uncertainty: `numpy.ndarray`, the model uncertainties, with shape [N]. fractions: `iterable`, the percentages of data to retain for calculating `metric_fn`. metric_fn: `lambda(y_true, y_pred) -> float`, a metric function that provides a score given ground truths and predictions. name: (optional) `str`, the name of the method. Returns: A `pandas.DataFrame` with columns ["retained_data", "mean", "std"], that summarizes the scores at different data retained fractions. """ N = y_true.shape[0] # Sorts indexes by ascending uncertainty I_uncertainties = np.argsort(y_uncertainty) # Score containers mean = np.empty_like(fractions) # TODO(filangel): do bootstrap sampling and estimate standard error std = np.zeros_like(fractions) for i, frac in enumerate(fractions): # Keep only the %-frac of lowest uncertainties I = np.zeros(N, dtype=bool) I[I_uncertainties[:int(N * frac)]] = True mean[i] = metric_fn(y_true[I], y_pred[I]) # Store df = pd.DataFrame(dict(retained_data=fractions, mean=mean, std=std)) df.name = name return df @property def datasets(self) -> tf.data.Dataset: """Pointer to the processed datasets.""" return self.__ds @property def info(self) -> BenchmarkInfo: """Text description of the benchmark.""" return BenchmarkInfo(description="", urls="", setup="", citation="") @property def level(self) -> Level: """The downstream task level.""" return self.__level @staticmethod def loss() -> tfk.losses.Loss: """Loss used for training binary classifiers.""" return tfk.losses.BinaryCrossentropy() @staticmethod def metrics() -> tfk.metrics.Metric: """Evaluation metrics used for monitoring training.""" return [tfk.metrics.BinaryAccuracy(), tfk.metrics.AUC()] @staticmethod def class_weight() -> Sequence[float]: """Class weights used for rebalancing the dataset, by skewing the `loss` accordingly.""" return [1.0, 4.0] @classmethod def load( cls, level: Union[Text, Level] = "realworld", batch_size: int = 64, data_dir: Optional[Text] = None, as_numpy: bool = False, ) -> DataSplits: """Loads the datasets for the benchmark. Args: level: `Level` or `str, downstream task level. batch_size: (optional) `int`, number of datapoints per mini-batch. data_dir: (optional) `str`, path to parent data directory. as_numpy: (optional) `bool`, if True returns python generators with `numpy.ndarray` outputs. Returns: A namedtuple with properties: * train: `tf.data.Dataset`, train dataset. * validation: `tf.data.Dataset`, validation dataset. * test: `tf.data.Dataset`, test dataset. """ import tensorflow_datasets as tfds from .tfds_adapter import DiabeticRetinopathyDiagnosis # Fetch datasets try: ds_train, ds_validation, ds_test = DiabeticRetinopathyDiagnosis( data_dir=data_dir or DATA_DIR, config=level).as_dataset(split=["train", "validation", "test"], shuffle_files=True, batch_size=batch_size) except AssertionError as ae: raise AssertionError( str(ae) + " Run DiabeticRetinopathyDiagnosisBecnhmark.download_and_prepare()" " first and then retry.") # Parse task level level = level if isinstance(level, Level) else Level.from_str(level) # Dataset tranformations transforms_train, transforms_eval = cls._preprocessors() # Apply transformations ds_train = ds_train.map(transforms_train, num_parallel_calls=tf.data.experimental.AUTOTUNE) ds_validation = ds_validation.map( transforms_eval, num_parallel_calls=tf.data.experimental.AUTOTUNE) ds_test = ds_test.map(transforms_eval, num_parallel_calls=tf.data.experimental.AUTOTUNE) # Prefetches datasets to memory ds_train = ds_train.prefetch(tf.data.experimental.AUTOTUNE) ds_validation = ds_validation.prefetch(tf.data.experimental.AUTOTUNE) ds_test = ds_test.prefetch(tf.data.experimental.AUTOTUNE) if as_numpy: # Convert to NumPy iterators ds_train = tfds.as_numpy(ds_train) ds_validation = tfds.as_numpy(ds_validation) ds_test = tfds.as_numpy(ds_test) return DataSplits(ds_train, ds_validation, ds_test) @classmethod def download_and_prepare(cls, levels=None) -> None: """Downloads dataset from Kaggle, extracts zip files and processes it using `tensorflow_datasets`. Args: levels: (optional) `iterable` of `str`, specifies which levels from {'medium', 'realworld'} to prepare, if None it prepares all the levels. Raises: OSError: if `~/.kaggle/kaggle.json` is not set up. """ # Disable GPU for data download, extraction and preparation import os os.environ["CUDA_VISIBLE_DEVICES"] = "-1" cls._download() # cls._extract() #cls._prepare(levels) @staticmethod def _download() -> None: """Downloads data from Kaggle using `tensorflow_datasets`. Raises: OSError: if `~/.kaggle/kaggle.json` is not set up. """ import subprocess as sp import tensorflow_datasets as tfds # Append `/home/$USER/.local/bin` to path os.environ["PATH"] += ":/home/{}/.local/bin/".format(os.environ["USER"]) # Download all files from Kaggle drd = tfds.download.kaggle.KaggleCompetitionDownloader( "diabetic-retinopathy-detection") try: for dfile in drd.competition_files: drd.download_file(dfile, output_dir=_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR) except sp.CalledProcessError as cpe: raise OSError( str(cpe) + "." + " Make sure you have ~/.kaggle/kaggle.json setup, fetched from the Kaggle website" " https://www.kaggle.com/<username>/account -> 'Create New API Key'." " Also accept the dataset license by going to" " https://www.kaggle.com/c/diabetic-retinopathy-detection/rules" " and look for the button 'I Understand and Accept' (make sure when reloading the" " page that the button does not pop up again).") @staticmethod def _extract() -> None: """Extracts zip files downloaded from Kaggle.""" import glob import tqdm import zipfile import tempfile # Extract train and test original images for split in ["train", "test"]: # Extract "<split>.zip.00*"" files to "<split>" with tempfile.NamedTemporaryFile() as tmp: # Concatenate "<split>.zip.00*" to "<split>.zip" for fname in tqdm.tqdm( sorted( glob.glob( os.path.join(_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR, "{split}.zip.00*".format(split=split))))): # Unzip "<split>.zip" to "<split>" with open(fname, "rb") as ztmp: tmp.write(ztmp.read()) with zipfile.ZipFile(tmp) as zfile: for image in tqdm.tqdm(iterable=zfile.namelist(), total=len(zfile.namelist())): zfile.extract(member=image, path=_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR) # Delete "<split>.zip.00*" files for splitzip in os.listdir(_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR): if "{split}.zip.00".format(split=split) in splitzip: os.remove( os.path.join(_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR, splitzip)) # Extract "sample.zip", "trainLabels.csv.zip" for fname in ["sample", "trainLabels.csv"]: zfname = os.path.join(_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR, "{fname}.zip".format(fname=fname)) with zipfile.ZipFile(zfname) as zfile: zfile.extractall(_DIABETIC_RETINOPATHY_DIAGNOSIS_DATA_DIR) os.remove(zfname) @staticmethod def _prepare(levels=None) -> None: """Generates the TFRecord objects for medium and realworld experiments.""" import multiprocessing from absl import logging from .tfds_adapter import DiabeticRetinopathyDiagnosis # Hangle each level individually for level in levels or ["medium", "realworld"]: dtask = DiabeticRetinopathyDiagnosis(data_dir=DATA_DIR, config=level) logging.debug("=== Preparing TFRecords for {} ===".format(level)) dtask.download_and_prepare() @classmethod def _preprocessors(cls) -> Tuple[transforms.Transform, transforms.Transform]: """Applies transformations to the raw data.""" import tensorflow_datasets as tfds # Transformation hyperparameters mean = np.asarray([0.42606387, 0.29752496, 0.21309826]) stddev = np.asarray([0.27662534, 0.20280295, 0.1687619]) class Parse(transforms.Transform): """Parses datapoints from raw `tf.data.Dataset`.""" def __call__(self, x, y=None): """Returns `as_supervised` tuple.""" return x["image"], x["label"] class CastX(transforms.Transform): """Casts image to `dtype`.""" def __init__(self, dtype): """Constructs a type caster.""" self.dtype = dtype def __call__(self, x, y): """Returns casted image (to `dtype`) and its (unchanged) label as tuple.""" return tf.cast(x, self.dtype), y class To01X(transforms.Transform): """Rescales image to [min, max]=[0, 1].""" def __call__(self, x, y): """Returns rescaled image and its (unchanged) label as tuple.""" return x / 255.0, y # Get augmentation schemes [augmentation_config, no_augmentation_config] = cls._ImageDataGenerator_config() # Transformations for train dataset transforms_train = transforms.Compose([ Parse(), CastX(tf.float32), To01X(), transforms.Normalize(mean, stddev), # TODO(filangel): hangle batch with ImageDataGenerator # transforms.RandomAugment(**augmentation_config), ]) # Transformations for validation/test dataset transforms_eval = transforms.Compose([ Parse(), CastX(tf.float32), To01X(), transforms.Normalize(mean, stddev), # TODO(filangel): hangle batch with ImageDataGenerator # transforms.RandomAugment(**no_augmentation_config), ]) return transforms_train, transforms_eval @staticmethod def _ImageDataGenerator_config(): """Returns the configs for the `tensorflow.keras.preprocessing.image.ImageDataGenerator`, used for the random augmentation of the dataset, following the implementation of https://github.com/chleibig/disease-detection/blob/f3401b26aa9b832ff77afe93 e3faa342f7d088e5/scripts/inspect_data_augmentation.py.""" augmentation_config = dict( featurewise_center=False, samplewise_center=False, featurewise_std_normalization=False, samplewise_std_normalization=False, zca_whitening=False, rotation_range=180.0, width_shift_range=0.05, height_shift_range=0.05, shear_range=0., zoom_range=0.10, channel_shift_range=0., fill_mode="constant", cval=0., horizontal_flip=True, vertical_flip=True, data_format="channels_last", ) no_augmentation_config = dict( featurewise_center=False, samplewise_center=False, featurewise_std_normalization=False, samplewise_std_normalization=False, zca_whitening=False, rotation_range=0.0, width_shift_range=0.0, height_shift_range=0.0, shear_range=0., zoom_range=0.0, channel_shift_range=0., fill_mode="nearest", cval=0., horizontal_flip=False, vertical_flip=False, data_format="channels_last", ) return augmentation_config, no_augmentation_config
34.625755
92
0.658609
15,686
0.9115
0
0
14,452
0.839793
0
0
6,868
0.399093
a3d6f97d9d98a31dc4f2829ae746920be2ea8e17
4,902
py
Python
pysnmp-with-texts/CXConsoleDriver-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/CXConsoleDriver-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/CXConsoleDriver-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module CXConsoleDriver-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CXConsoleDriver-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:32:28 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion") cxConsoleDriver, = mibBuilder.importSymbols("CXProduct-SMI", "cxConsoleDriver") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter64, Gauge32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Unsigned32, Integer32, ModuleIdentity, NotificationType, ObjectIdentity, MibIdentifier, Counter32, iso, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Gauge32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Unsigned32", "Integer32", "ModuleIdentity", "NotificationType", "ObjectIdentity", "MibIdentifier", "Counter32", "iso", "Bits") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") cxCdBaudRate = MibScalar((1, 3, 6, 1, 4, 1, 495, 2, 1, 5, 6, 1), Integer32().clone(9600)).setMaxAccess("readwrite") if mibBuilder.loadTexts: cxCdBaudRate.setStatus('mandatory') if mibBuilder.loadTexts: cxCdBaudRate.setDescription('Determines the baud rate of the console port. The setting of this object is dynamic. The console port immediately implements the option you enter. Options: 9600 19200 38400 115200 Default Value: 9600 Configuration Changed: operative') cxCdCharSize = MibScalar((1, 3, 6, 1, 4, 1, 495, 2, 1, 5, 6, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(7, 8)).clone(8)).setMaxAccess("readwrite") if mibBuilder.loadTexts: cxCdCharSize.setStatus('mandatory') if mibBuilder.loadTexts: cxCdCharSize.setDescription('Determines how many bits constitute a character for the console port. Options: none - the value is fixed at 8 Default Value: 8 Configuration Changed: none ') cxCdParity = MibScalar((1, 3, 6, 1, 4, 1, 495, 2, 1, 5, 6, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noParity", 1), ("evenParity", 2), ("oddParity", 3))).clone('noParity')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cxCdParity.setStatus('mandatory') if mibBuilder.loadTexts: cxCdParity.setDescription('Determines the parity scheme the CPU uses to validate the characters it receives through the console port. Options: none - the value is fixed at noParity Default Value: noParity Configuration Changed: none ') cxCdStopBit = MibScalar((1, 3, 6, 1, 4, 1, 495, 2, 1, 5, 6, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)).clone(1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: cxCdStopBit.setStatus('mandatory') if mibBuilder.loadTexts: cxCdStopBit.setDescription('Determines how many stop bits are at the end of each character the console port receives. Options: none - the value is fixed at 1 Default Value: 1 Configuration Changed: none ') cxCdProtocol = MibScalar((1, 3, 6, 1, 4, 1, 495, 2, 1, 5, 6, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("localConsole", 1), ("ppp", 2))).clone('localConsole')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cxCdProtocol.setStatus('mandatory') if mibBuilder.loadTexts: cxCdProtocol.setDescription('Determines the protocol (configuration method) for the console port. The setting of this object is dynamic. The console port immediately implements the option you enter. However, if you change the protocol you are currently using to configure the port your connection will be lost. Options: localConsole (1): you use this protocol when you attach a TTY terminal directly to the console port. This protocol requires you to use command line configuration. You also must enter a password to gain access to the configuration tables. You can define the password using the object uiPassword of the CXUserInterface Table. ppp (2): you use this protocol when you are configuring via a windows-based application such as HP/OV (Hewlett Packard-OpenView). Default Value: ppp (2) Configuration Changed: operative') mibBuilder.exportSymbols("CXConsoleDriver-MIB", cxCdParity=cxCdParity, cxCdProtocol=cxCdProtocol, cxCdBaudRate=cxCdBaudRate, cxCdStopBit=cxCdStopBit, cxCdCharSize=cxCdCharSize)
158.129032
856
0.786006
0
0
0
0
0
0
0
0
2,670
0.544676
a3d7408e9bd4e19d03c2fd8dc2282dcab222a6b8
2,782
py
Python
db/redis_db.py
Lifeistrange/WeiboSpider
8aa3465487ef64bb6e9bb4bd503f182a1b38c292
[ "MIT" ]
1
2018-07-23T03:58:53.000Z
2018-07-23T03:58:53.000Z
db/redis_db.py
545314690/WeiboSpider-1.6.4
d29cc9b926da5790768ddebdfdf9bf6c617a0e03
[ "MIT" ]
null
null
null
db/redis_db.py
545314690/WeiboSpider-1.6.4
d29cc9b926da5790768ddebdfdf9bf6c617a0e03
[ "MIT" ]
2
2018-06-25T09:21:24.000Z
2018-07-23T03:59:31.000Z
# coding:utf-8 import datetime import json import re import redis from config.conf import get_redis_args redis_args = get_redis_args() class Cookies(object): rd_con = redis.StrictRedis(host=redis_args.get('host'), port=redis_args.get('port'), password=redis_args.get('password'), db=redis_args.get('cookies')) rd_con_broker = redis.StrictRedis(host=redis_args.get('host'), port=redis_args.get('port'), password=redis_args.get('password'), db=redis_args.get('broker')) @classmethod def store_cookies(cls, name, cookies): pickled_cookies = json.dumps( {'cookies': cookies, 'loginTime': datetime.datetime.now().timestamp()}) cls.rd_con.hset('account', name, pickled_cookies) cls.rd_con.lpush('account_queue', name) @classmethod def fetch_cookies(cls): for i in range(cls.rd_con.llen('account_queue')): name = cls.rd_con.rpop('account_queue').decode('utf-8') if name: j_account = cls.rd_con.hget('account', name).decode('utf-8') if j_account: cls.rd_con.lpush('account_queue', name) # 当账号不存在时,这个name也会清除,并取下一个name account = json.loads(j_account) login_time = datetime.datetime.fromtimestamp(account['loginTime']) if datetime.datetime.now() - login_time > datetime.timedelta(hours=20): cls.rd_con.hdel('account', name) continue # 丢弃这个过期账号,account_queue会在下次访问的时候被清除,这里不清除是因为分布式的关系 return name, account['cookies'] else: return None @classmethod def delete_cookies(cls, name): cls.rd_con.hdel('account', name) return True @classmethod def check_login_task(cls): if cls.rd_con_broker.llen('login_queue') > 0: cls.rd_con_broker.delete('login_queue') class Urls(object): rd_con = redis.StrictRedis(host=redis_args.get('host'), port=redis_args.get('port'), password=redis_args.get('password'), db=redis_args.get('urls')) @classmethod def store_crawl_url(cls, url, result): cls.rd_con.set(url, result) class IdNames(object): rd_con = redis.StrictRedis(host=redis_args.get('host'), port=redis_args.get('port'), password=redis_args.get('password'), db=redis_args.get('id_name')) @classmethod def store_id_name(cls, user_name, user_id): cls.rd_con.set(user_name, user_id) @classmethod def fetch_uid_by_name(cls, user_name): user_id = cls.rd_con.get(user_name) if user_id: return user_id.decode('utf-8') return ''
36.12987
103
0.611431
2,744
0.949481
0
0
1,880
0.650519
0
0
508
0.175779
a3d75ce424bf88d4d06c99b804df0f846b952cac
1,873
py
Python
vivisect/storage/mpfile.py
vEpiphyte/vivisect
14947a53c6781175f0aa83d49cc16c524a2e23a3
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
vivisect/storage/mpfile.py
vEpiphyte/vivisect
14947a53c6781175f0aa83d49cc16c524a2e23a3
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
vivisect/storage/mpfile.py
vEpiphyte/vivisect
14947a53c6781175f0aa83d49cc16c524a2e23a3
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
import base64 import logging import msgpack logger = logging.getLogger(__name__) loadargs = {'use_list': False, 'raw': False} if msgpack.version < (1, 0, 0): loadargs['encoding'] = 'utf-8' else: loadargs['strict_map_key'] = False VSIG = b'MSGVIV'.ljust(8, b'\x00') def vivEventsAppendFile(filename, events): with open(filename, 'ab') as f: for event in events: if event[0] == 20: mape = base64.b64encode(event[1][3]) event = (event[0], (event[1][0], event[1][1], event[1][2], mape)) msgpack.pack(event, f, use_bin_type=False) def saveWorkspaceChanges(vw, filename): events = vw.exportWorkspaceChanges() vivEventsAppendFile(filename, events) def vivEventsToFile(filename, events): with open(filename, 'wb') as f: msgpack.pack(VSIG, f, use_bin_type=False) for event in events: if event[0] == 20: mape = base64.b64encode(event[1][3]) event = (event[0], (event[1][0], event[1][1], event[1][2], mape)) msgpack.pack(event, f, use_bin_type=False) def saveWorkspace(vw, filename): events = vw.exportWorkspace() vivEventsToFile(filename, events) def vivEventsFromFile(filename): events = [] with open(filename, 'rb') as f: unpacker = msgpack.Unpacker(f, **loadargs) siggy = next(unpacker) if siggy.encode('utf-8') != VSIG: logger.warning('Invalid file signature of %s', str(siggy)) return for event in unpacker: if event[0] == 20: mape = base64.b64decode(event[1][3]) event = (event[0], (event[1][0], event[1][1], event[1][2], mape)) events.append(event) return events def loadWorkspace(vw, filename): events = vivEventsFromFile(filename) vw.importWorkspace(events)
28.815385
81
0.599573
0
0
0
0
0
0
0
0
113
0.060331
a3d771361889efe007b26f62c7cd92ffc6f656a2
3,832
py
Python
pytest_pgsql/plugin.py
mathiasose/pytest-pgsql
5e076db146699c3b683b49e4a31323c4c23054de
[ "BSD-3-Clause" ]
null
null
null
pytest_pgsql/plugin.py
mathiasose/pytest-pgsql
5e076db146699c3b683b49e4a31323c4c23054de
[ "BSD-3-Clause" ]
null
null
null
pytest_pgsql/plugin.py
mathiasose/pytest-pgsql
5e076db146699c3b683b49e4a31323c4c23054de
[ "BSD-3-Clause" ]
null
null
null
"""This forms the core of the pytest plugin.""" import pytest import testing.postgresql from pytest_pgsql import database from pytest_pgsql import ext def pytest_addoption(parser): """Add configuration options for pytest_pgsql.""" parser.addoption( '--pg-extensions', action='store', default='', help="A comma-separated list of PostgreSQL extensions to install at " "the beginning of the session for use by all tests. Example: " "--pg-extensions=uuid-ossp,pg_tgrm,pgcrypto") parser.addoption( '--pg-work-mem', type=int, default=32, help='Set the value of the `work_mem` setting, in megabytes. ' '`pytest_pgsql` defaults to 32. Adjusting this up or down can ' 'help performance; see the Postgres documentation for more details.') parser.addoption( '--pg-conf-opt', action='append', help='Add a key=value line that will be appended to postgresql.conf') @pytest.fixture(scope='session') def database_uri(request): """A fixture giving the connection URI of the session-wide test database.""" # Note: due to the nature of the variable configs, the command line options # must be tested manually. work_mem = request.config.getoption('--pg-work-mem') if work_mem < 0: # pragma: no cover pytest.exit('ERROR: --pg-work-mem value must be >= 0. Got: %d' % work_mem) return elif work_mem == 0: # pragma: no cover # Disable memory tweak and use the server default. work_mem_setting = '' else: # User wants to change the working memory setting. work_mem_setting = '-c work_mem=%dMB ' % work_mem conf_opts = request.config.getoption('--pg-conf-opt') if conf_opts: conf_opts_string = ' -c ' + ' -c '.join(conf_opts) else: conf_opts_string = '' # pylint: disable=bad-continuation,deprecated-method with testing.postgresql.Postgresql( postgres_args='-c TimeZone=UTC ' '-c fsync=off ' '-c synchronous_commit=off ' '-c full_page_writes=off ' + work_mem_setting + '-c checkpoint_timeout=30min ' '-c bgwriter_delay=10000ms' + conf_opts_string) as pgdb: yield pgdb.url() #: A SQLAlchemy engine shared by the transacted and non-transacted database fixtures. #: #: .. seealso:: `pytest_pgsql.ext.create_engine_fixture` # pylint: disable=invalid-name pg_engine = ext.create_engine_fixture('pg_engine', scope='session') # pylint: enable=invalid-name @pytest.fixture(scope='session') def database_snapshot(pg_engine): """Create one database snapshot for the session. The database will be restored to this state after each test. .. note :: This is an implementation detail and should not be used directly except by derived fixtures. """ return database.create_database_snapshot(pg_engine) # pylint: disable=invalid-name #: Create a test database instance and cleans up after each test finishes. #: #: You should prefer the `transacted_postgresql_db` fixture unless your test #: cannot be run in a single transaction. The `transacted_postgresql_db` fixture #: leads to faster tests since it doesn't tear down the entire database between #: each test. postgresql_db = \ database.PostgreSQLTestDB.create_fixture('postgresql_db') #: Create a test database instance that rolls back the current transaction after #: each test finishes, verifying its integrity before returning. #: #: Read the warning in the main documentation page before using this fixture. transacted_postgresql_db = \ database.TransactedPostgreSQLTestDB.create_fixture('transacted_postgresql_db') # pylint: enable=invalid-name
35.813084
85
0.675626
0
0
1,352
0.352818
1,760
0.45929
0
0
2,340
0.610647
a3d816c8c07445ebc9580d3703129a46fcf2cc64
737
py
Python
power_data_to_sat_passes/date_utils.py
abrahamneben/orbcomm_beam_mapping
71b3e7d6e4214db0a6f4e68ebeeb7d7f846f5004
[ "MIT" ]
1
2019-04-10T02:50:19.000Z
2019-04-10T02:50:19.000Z
power_data_to_sat_passes/date_utils.py
abrahamneben/orbcomm_beam_mapping
71b3e7d6e4214db0a6f4e68ebeeb7d7f846f5004
[ "MIT" ]
null
null
null
power_data_to_sat_passes/date_utils.py
abrahamneben/orbcomm_beam_mapping
71b3e7d6e4214db0a6f4e68ebeeb7d7f846f5004
[ "MIT" ]
null
null
null
# written by abraham on aug 24 def dyear2date(dyear): year = int(dyear) month_lengths = [31,28,31,30,31,30,31,31,30,31,30,31] days_before_months = [0,31,59,90,120,151,181,212,243,273,304,334] days_into_year_f = (dyear-year)*365 days_into_year_i = int(days_into_year_f) for i in range(12): if days_before_months[i] < days_into_year_f < (days_before_months[i]+month_lengths[i]): month = i+1 break date = days_into_year_i - days_before_months[month-1] hours_f = (days_into_year_f-days_into_year_i)*24 hours_i = int(hours_f) minutes_f = (hours_f-hours_i)*60 minutes_i = int(minutes_f) seconds_i = int((minutes_f-minutes_i)*60) return "%02d/%02d/%d %02d:%02d:%02d" % (month,date,year,hours_i,minutes_i,seconds_i)
27.296296
89
0.72863
0
0
0
0
0
0
0
0
59
0.080054
a3d8391391013bac7dd77afd2eebf78925078f05
752
py
Python
app/base/count_lines.py
sourcery-ai-bot/personal-expenses-accounting
55e76744a06fd502d119f57427cd7a0bfaf68fe1
[ "MIT" ]
5
2020-02-21T16:26:21.000Z
2021-08-05T09:34:28.000Z
app/base/count_lines.py
sourcery-ai-bot/personal-expenses-accounting
55e76744a06fd502d119f57427cd7a0bfaf68fe1
[ "MIT" ]
11
2020-06-26T09:05:04.000Z
2022-01-24T20:35:07.000Z
app/base/count_lines.py
sourcery-ai-bot/personal-expenses-accounting
55e76744a06fd502d119f57427cd7a0bfaf68fe1
[ "MIT" ]
1
2021-06-25T09:42:08.000Z
2021-06-25T09:42:08.000Z
import glob from os import walk exclude_folders = [ 'node_modules', 'ios', 'android', '__pycache__' ] exclude_files = [ 'json', 'txt', 'traineddata', 'lstmf', 'yml', 'md' 'log', 'env', 'gitignore', 'dockerignore' ] # get all files in directory dirr = '/home/viktor/Documents/personal-expenses-accounting/app/services/web_service/' folders = glob.glob(dirr + '/**/', recursive=True) # only app related directories directories = [] for folder in folders: current_folder = folder.split('/')[-2] if current_folder not in exclude_folders: files = glob.glob(folder + '*') print(files) directories.append(folder) # num_lines = sum(1 for line in open('myfile.txt'))
19.282051
86
0.625
0
0
0
0
0
0
0
0
316
0.420213
a3d86cad8d3203300d44bd218c5b17bca9639482
409
py
Python
data/contacts.py
rgurevych/python_for_testers
04023a5d6ea480f7828aa56e8a4094b744e05721
[ "Apache-2.0" ]
null
null
null
data/contacts.py
rgurevych/python_for_testers
04023a5d6ea480f7828aa56e8a4094b744e05721
[ "Apache-2.0" ]
null
null
null
data/contacts.py
rgurevych/python_for_testers
04023a5d6ea480f7828aa56e8a4094b744e05721
[ "Apache-2.0" ]
null
null
null
from models.contact import Contact testdata = [Contact(first_name="Firstname", last_name="Lastname", mobile_phone="+12345678", work_phone="12345", home_phone="67890", fax="55443322", email_1="[email protected]", email_2="[email protected]", email_3="[email protected]", address="Street, 15 \n 12345 New-York")]
51.125
116
0.577017
0
0
0
0
0
0
0
0
143
0.349633
a3d89936d8b1b9966571e7248379800a7bb8190c
17,617
py
Python
charmhelpers/contrib/charmsupport/nrpe.py
nobuto-m/charm-helpers
4cffc05ace43234d34b040cccdde3460f68cb673
[ "Apache-2.0" ]
null
null
null
charmhelpers/contrib/charmsupport/nrpe.py
nobuto-m/charm-helpers
4cffc05ace43234d34b040cccdde3460f68cb673
[ "Apache-2.0" ]
1
2019-09-04T12:17:17.000Z
2019-09-04T12:17:17.000Z
charmhelpers/contrib/charmsupport/nrpe.py
nobuto-m/charm-helpers
4cffc05ace43234d34b040cccdde3460f68cb673
[ "Apache-2.0" ]
null
null
null
# Copyright 2014-2015 Canonical Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Compatibility with the nrpe-external-master charm""" # Copyright 2012 Canonical Ltd. # # Authors: # Matthew Wedgwood <[email protected]> import subprocess import pwd import grp import os import glob import shutil import re import shlex import yaml from charmhelpers.core.hookenv import ( config, hook_name, local_unit, log, relation_get, relation_ids, relation_set, relations_of_type, ) from charmhelpers.core.host import service from charmhelpers.core import host # This module adds compatibility with the nrpe-external-master and plain nrpe # subordinate charms. To use it in your charm: # # 1. Update metadata.yaml # # provides: # (...) # nrpe-external-master: # interface: nrpe-external-master # scope: container # # and/or # # provides: # (...) # local-monitors: # interface: local-monitors # scope: container # # 2. Add the following to config.yaml # # nagios_context: # default: "juju" # type: string # description: | # Used by the nrpe subordinate charms. # A string that will be prepended to instance name to set the host name # in nagios. So for instance the hostname would be something like: # juju-myservice-0 # If you're running multiple environments with the same services in them # this allows you to differentiate between them. # nagios_servicegroups: # default: "" # type: string # description: | # A comma-separated list of nagios servicegroups. # If left empty, the nagios_context will be used as the servicegroup # # 3. Add custom checks (Nagios plugins) to files/nrpe-external-master # # 4. Update your hooks.py with something like this: # # from charmsupport.nrpe import NRPE # (...) # def update_nrpe_config(): # nrpe_compat = NRPE() # nrpe_compat.add_check( # shortname = "myservice", # description = "Check MyService", # check_cmd = "check_http -w 2 -c 10 http://localhost" # ) # nrpe_compat.add_check( # "myservice_other", # "Check for widget failures", # check_cmd = "/srv/myapp/scripts/widget_check" # ) # nrpe_compat.write() # # def config_changed(): # (...) # update_nrpe_config() # # def nrpe_external_master_relation_changed(): # update_nrpe_config() # # def local_monitors_relation_changed(): # update_nrpe_config() # # 4.a If your charm is a subordinate charm set primary=False # # from charmsupport.nrpe import NRPE # (...) # def update_nrpe_config(): # nrpe_compat = NRPE(primary=False) # # 5. ln -s hooks.py nrpe-external-master-relation-changed # ln -s hooks.py local-monitors-relation-changed class CheckException(Exception): pass class Check(object): shortname_re = '[A-Za-z0-9-_.@]+$' service_template = (""" #--------------------------------------------------- # This file is Juju managed #--------------------------------------------------- define service {{ use active-service host_name {nagios_hostname} service_description {nagios_hostname}[{shortname}] """ """{description} check_command check_nrpe!{command} servicegroups {nagios_servicegroup} }} """) def __init__(self, shortname, description, check_cmd): super(Check, self).__init__() # XXX: could be better to calculate this from the service name if not re.match(self.shortname_re, shortname): raise CheckException("shortname must match {}".format( Check.shortname_re)) self.shortname = shortname self.command = "check_{}".format(shortname) # Note: a set of invalid characters is defined by the # Nagios server config # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= self.description = description self.check_cmd = self._locate_cmd(check_cmd) def _get_check_filename(self): return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) def _get_service_filename(self, hostname): return os.path.join(NRPE.nagios_exportdir, 'service__{}_{}.cfg'.format(hostname, self.command)) def _locate_cmd(self, check_cmd): search_path = ( '/usr/lib/nagios/plugins', '/usr/local/lib/nagios/plugins', ) parts = shlex.split(check_cmd) for path in search_path: if os.path.exists(os.path.join(path, parts[0])): command = os.path.join(path, parts[0]) if len(parts) > 1: command += " " + " ".join(parts[1:]) return command log('Check command not found: {}'.format(parts[0])) return '' def _remove_service_files(self): if not os.path.exists(NRPE.nagios_exportdir): return for f in os.listdir(NRPE.nagios_exportdir): if f.endswith('_{}.cfg'.format(self.command)): os.remove(os.path.join(NRPE.nagios_exportdir, f)) def remove(self, hostname): nrpe_check_file = self._get_check_filename() if os.path.exists(nrpe_check_file): os.remove(nrpe_check_file) self._remove_service_files() def write(self, nagios_context, hostname, nagios_servicegroups): nrpe_check_file = self._get_check_filename() with open(nrpe_check_file, 'w') as nrpe_check_config: nrpe_check_config.write("# check {}\n".format(self.shortname)) if nagios_servicegroups: nrpe_check_config.write( "# The following header was added automatically by juju\n") nrpe_check_config.write( "# Modifying it will affect nagios monitoring and alerting\n") nrpe_check_config.write( "# servicegroups: {}\n".format(nagios_servicegroups)) nrpe_check_config.write("command[{}]={}\n".format( self.command, self.check_cmd)) if not os.path.exists(NRPE.nagios_exportdir): log('Not writing service config as {} is not accessible'.format( NRPE.nagios_exportdir)) else: self.write_service_config(nagios_context, hostname, nagios_servicegroups) def write_service_config(self, nagios_context, hostname, nagios_servicegroups): self._remove_service_files() templ_vars = { 'nagios_hostname': hostname, 'nagios_servicegroup': nagios_servicegroups, 'description': self.description, 'shortname': self.shortname, 'command': self.command, } nrpe_service_text = Check.service_template.format(**templ_vars) nrpe_service_file = self._get_service_filename(hostname) with open(nrpe_service_file, 'w') as nrpe_service_config: nrpe_service_config.write(str(nrpe_service_text)) def run(self): subprocess.call(self.check_cmd) class NRPE(object): nagios_logdir = '/var/log/nagios' nagios_exportdir = '/var/lib/nagios/export' nrpe_confdir = '/etc/nagios/nrpe.d' homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server def __init__(self, hostname=None, primary=True): super(NRPE, self).__init__() self.config = config() self.primary = primary self.nagios_context = self.config['nagios_context'] if 'nagios_servicegroups' in self.config and self.config['nagios_servicegroups']: self.nagios_servicegroups = self.config['nagios_servicegroups'] else: self.nagios_servicegroups = self.nagios_context self.unit_name = local_unit().replace('/', '-') if hostname: self.hostname = hostname else: nagios_hostname = get_nagios_hostname() if nagios_hostname: self.hostname = nagios_hostname else: self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) self.checks = [] # Iff in an nrpe-external-master relation hook, set primary status relation = relation_ids('nrpe-external-master') if relation: log("Setting charm primary status {}".format(primary)) for rid in relation: relation_set(relation_id=rid, relation_settings={'primary': self.primary}) self.remove_check_queue = set() def add_check(self, *args, **kwargs): shortname = None if kwargs.get('shortname') is None: if len(args) > 0: shortname = args[0] else: shortname = kwargs['shortname'] self.checks.append(Check(*args, **kwargs)) try: self.remove_check_queue.remove(shortname) except KeyError: pass def remove_check(self, *args, **kwargs): if kwargs.get('shortname') is None: raise ValueError('shortname of check must be specified') # Use sensible defaults if they're not specified - these are not # actually used during removal, but they're required for constructing # the Check object; check_disk is chosen because it's part of the # nagios-plugins-basic package. if kwargs.get('check_cmd') is None: kwargs['check_cmd'] = 'check_disk' if kwargs.get('description') is None: kwargs['description'] = '' check = Check(*args, **kwargs) check.remove(self.hostname) self.remove_check_queue.add(kwargs['shortname']) def write(self): try: nagios_uid = pwd.getpwnam('nagios').pw_uid nagios_gid = grp.getgrnam('nagios').gr_gid except Exception: log("Nagios user not set up, nrpe checks not updated") return if not os.path.exists(NRPE.nagios_logdir): os.mkdir(NRPE.nagios_logdir) os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid) nrpe_monitors = {} monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} for nrpecheck in self.checks: nrpecheck.write(self.nagios_context, self.hostname, self.nagios_servicegroups) nrpe_monitors[nrpecheck.shortname] = { "command": nrpecheck.command, } # update-status hooks are configured to firing every 5 minutes by # default. When nagios-nrpe-server is restarted, the nagios server # reports checks failing causing unnecessary alerts. Let's not restart # on update-status hooks. if not hook_name() == 'update-status': service('restart', 'nagios-nrpe-server') monitor_ids = relation_ids("local-monitors") + \ relation_ids("nrpe-external-master") for rid in monitor_ids: reldata = relation_get(unit=local_unit(), rid=rid) if 'monitors' in reldata: # update the existing set of monitors with the new data old_monitors = yaml.safe_load(reldata['monitors']) old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe'] # remove keys that are in the remove_check_queue old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items() if k not in self.remove_check_queue} # update/add nrpe_monitors old_nrpe_monitors.update(nrpe_monitors) old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors # write back to the relation relation_set(relation_id=rid, monitors=yaml.dump(old_monitors)) else: # write a brand new set of monitors, as no existing ones. relation_set(relation_id=rid, monitors=yaml.dump(monitors)) self.remove_check_queue.clear() def get_nagios_hostcontext(relation_name='nrpe-external-master'): """ Query relation with nrpe subordinate, return the nagios_host_context :param str relation_name: Name of relation nrpe sub joined to """ for rel in relations_of_type(relation_name): if 'nagios_host_context' in rel: return rel['nagios_host_context'] def get_nagios_hostname(relation_name='nrpe-external-master'): """ Query relation with nrpe subordinate, return the nagios_hostname :param str relation_name: Name of relation nrpe sub joined to """ for rel in relations_of_type(relation_name): if 'nagios_hostname' in rel: return rel['nagios_hostname'] def get_nagios_unit_name(relation_name='nrpe-external-master'): """ Return the nagios unit name prepended with host_context if needed :param str relation_name: Name of relation nrpe sub joined to """ host_context = get_nagios_hostcontext(relation_name) if host_context: unit = "%s:%s" % (host_context, local_unit()) else: unit = local_unit() return unit def add_init_service_checks(nrpe, services, unit_name, immediate_check=True): """ Add checks for each service in list :param NRPE nrpe: NRPE object to add check to :param list services: List of services to check :param str unit_name: Unit name to use in check description :param bool immediate_check: For sysv init, run the service check immediately """ for svc in services: # Don't add a check for these services from neutron-gateway if svc in ['ext-port', 'os-charm-phy-nic-mtu']: next upstart_init = '/etc/init/%s.conf' % svc sysv_init = '/etc/init.d/%s' % svc if host.init_is_systemd(): nrpe.add_check( shortname=svc, description='process check {%s}' % unit_name, check_cmd='check_systemd.py %s' % svc ) elif os.path.exists(upstart_init): nrpe.add_check( shortname=svc, description='process check {%s}' % unit_name, check_cmd='check_upstart_job %s' % svc ) elif os.path.exists(sysv_init): cronpath = '/etc/cron.d/nagios-service-check-%s' % svc checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc) croncmd = ( '/usr/local/lib/nagios/plugins/check_exit_status.pl ' '-e -s /etc/init.d/%s status' % svc ) cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath) f = open(cronpath, 'w') f.write(cron_file) f.close() nrpe.add_check( shortname=svc, description='service check {%s}' % unit_name, check_cmd='check_status_file.py -f %s' % checkpath, ) # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail # (LP: #1670223). if immediate_check and os.path.isdir(nrpe.homedir): f = open(checkpath, 'w') subprocess.call( croncmd.split(), stdout=f, stderr=subprocess.STDOUT ) f.close() os.chmod(checkpath, 0o644) def copy_nrpe_checks(nrpe_files_dir=None): """ Copy the nrpe checks into place """ NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' if nrpe_files_dir is None: # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks for segment in ['.', 'hooks']: nrpe_files_dir = os.path.abspath(os.path.join( os.getenv('CHARM_DIR'), segment, 'charmhelpers', 'contrib', 'openstack', 'files')) if os.path.isdir(nrpe_files_dir): break else: raise RuntimeError("Couldn't find charmhelpers directory") if not os.path.exists(NAGIOS_PLUGINS): os.makedirs(NAGIOS_PLUGINS) for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): if os.path.isfile(fname): shutil.copy2(fname, os.path.join(NAGIOS_PLUGINS, os.path.basename(fname))) def add_haproxy_checks(nrpe, unit_name): """ Add checks for each service in list :param NRPE nrpe: NRPE object to add check to :param str unit_name: Unit name to use in check description """ nrpe.add_check( shortname='haproxy_servers', description='Check HAProxy {%s}' % unit_name, check_cmd='check_haproxy.sh') nrpe.add_check( shortname='haproxy_queue', description='Check HAProxy queue depth {%s}' % unit_name, check_cmd='check_haproxy_queue_depth.sh')
36.174538
90
0.605097
9,339
0.530113
0
0
0
0
0
0
7,453
0.423057
a3d9e6ada4265efd73113dc71c68649cc06c25fa
13,250
py
Python
venv/Lib/site-packages/proglog/proglog.py
mintzer/pupillometry-rf-back
cfa86fa984a49dce0123798f8de5b838c02e10d5
[ "CC-BY-4.0" ]
83
2017-08-14T02:20:38.000Z
2022-03-01T20:32:03.000Z
venv/lib/python3.7/site-packages/proglog/proglog.py
haideraltahan/CropMe
75a111b9d3b2c50c6f2a9a36d21432053f02284d
[ "MIT" ]
20
2021-05-03T18:02:23.000Z
2022-03-12T12:01:04.000Z
venv/lib/python3.7/site-packages/proglog/proglog.py
haideraltahan/CropMe
75a111b9d3b2c50c6f2a9a36d21432053f02284d
[ "MIT" ]
6
2018-10-23T08:12:26.000Z
2021-02-14T13:53:13.000Z
"""Implements the generic progress logger class, and the ProgressBar class. """ from tqdm import tqdm, tqdm_notebook from collections import OrderedDict import time SETTINGS = { 'notebook': False } def notebook(turn='on'): SETTINGS['notebook'] = True if (turn == 'on') else False def troncate_string(s, max_length=25): return s if (len(s) < max_length) else (s[:max_length] + "...") class ProgressLogger: """Generic class for progress loggers. A progress logger contains a "state" dictionnary. Parameters ---------- init_state Dictionnary representing the initial state. """ def __init__(self, init_state=None): self.state = {} self.stored = {} self.logs = [] self.log_indent = 0 if init_state is not None: self.state.update(init_state) def log(self, message): self.logs.append((' ' * self.log_indent) + message) def dump_logs(self, filepath=None): if filepath is not None: with open(filepath, 'a') as f: f.write("\n".join(self.logs)) else: return "\n".join(self.logs) def callback(self, **kw): """Execute something after the state has been updated by the given state elements. This default callback does nothing, overwrite it by subclassing """ pass def store(self, **kw): """Store objects in the logger and trigger ``self.store_callback``. This works exactly like ``logger()``, but the later is meant for simple data objects (text, numbers) that will be sent over the network or written to a file. The ``store`` method expects rather large objects which are not necessarily serializable, and will be used eg to draw plots on the fly. """ self.stored.update(kw) self.store_callback(**kw) def store_callback(self, **kw): """Execute something after the store has been updated by the given state elements. This default callback does nothing, overwrite it by subclassing """ pass def iter(self, **kw): """Iterate through a list while updating the state. Examples -------- >>> for username in logger.iter(user=['tom', 'tim', 'lea']: >>> # At every loop, logger.state['user'] is updated >>> print (username) """ for field, iterable in kw.items(): for it in iterable: self(**{field: it}) yield it def __call__(self, **kw): self.state.update(kw) self.callback(**kw) class ProgressBarLogger(ProgressLogger): """Generic class for progress loggers. A progress logger contains a "state" dictionnary Parameters ---------- init_state Initial state of the logger bars Either None (will be initialized with no bar) or a list/tuple of bar names (``['main', 'sub']``) which will be initialized with index -1 and no total, or a dictionary (possibly ordered) of bars, of the form ``{bar_1: {title: 'bar1', index: 2, total:23}, bar_2: {...}}`` ignored_bars Either None (newly met bars will be added) or a list of blacklisted bar names, or ``'all_others'`` to signify that all bar names not already in ``self.bars`` will be ignored. """ bar_indent = 2 def __init__(self, init_state=None, bars=None, ignored_bars=None, logged_bars='all', min_time_interval=0, ignore_bars_under=0): ProgressLogger.__init__(self, init_state) if bars is None: bars = OrderedDict() elif isinstance(bars, (list, tuple)): bars = OrderedDict([ (b, dict(title=b, index=-1, total=None, message=None, indent=0)) for b in bars ]) if isinstance(ignored_bars, (list, tuple)): ignored_bars = set(ignored_bars) self.ignored_bars = ignored_bars self.logged_bars = logged_bars self.state['bars'] = bars self.min_time_interval = min_time_interval self.ignore_bars_under = ignore_bars_under @property def bars(self): """Return ``self.state['bars'].``""" return self.state['bars'] def bar_is_ignored(self, bar): if self.ignored_bars is None: return False elif self.ignored_bars == 'all_others': return (bar not in self.bars) else: return bar in self.ignored_bars def bar_is_logged(self, bar): if (not self.logged_bars): return False elif self.logged_bars == 'all': return True else: return bar in self.logged_bars def iterable_is_too_short(self, iterable): length = len(iterable) if hasattr(iterable, '__len__') else None return (length is not None) and (length < self.ignore_bars_under) def iter_bar(self, bar_prefix='', **kw): """Iterate through a list while updating a state bar. Examples -------- >>> for username in logger.iter_bar(user=['tom', 'tim', 'lea']): >>> # At every loop, logger.state['bars']['user'] is updated >>> # to {index: i, total: 3, title:'user'} >>> print (username) """ if 'bar_message' in kw: bar_message = kw.pop('bar_message') else: bar_message = None bar, iterable = kw.popitem() if self.bar_is_ignored(bar) or self.iterable_is_too_short(iterable): return iterable bar = bar_prefix + bar if hasattr(iterable, '__len__'): self(**{bar + '__total': len(iterable)}) def new_iterable(): last_time = time.time() i = 0 # necessary in case the iterator is empty for i, it in enumerate(iterable): now_time = time.time() if (i == 0) or (now_time - last_time > self.min_time_interval): if bar_message is not None: self(**{bar + '__message': bar_message(it)}) self(**{bar + '__index': i}) last_time = now_time yield it if self.bars[bar]['index'] != i: self(**{bar + '__index': i}) self(**{bar + '__index': i + 1}) return new_iterable() def bars_callback(self, bar, attr, value, old_value=None): """Execute a custom action after the progress bars are updated. Parameters ---------- bar Name/ID of the bar to be modified. attr Attribute of the bar attribute to be modified value New value of the attribute old_value Previous value of this bar's attribute. This default callback does nothing, overwrite it by subclassing. """ pass def __call__(self, **kw): items = sorted(kw.items(), key=lambda kv: not kv[0].endswith('total')) for key, value in items: if '__' in key: bar, attr = key.split('__') if self.bar_is_ignored(bar): continue kw.pop(key) if bar not in self.bars: self.bars[bar] = dict(title=bar, index=-1, total=None, message=None) old_value = self.bars[bar][attr] if self.bar_is_logged(bar): new_bar = (attr == 'index') and (value < old_value) if (attr == 'total') or (new_bar): self.bars[bar]['indent'] = self.log_indent else: self.log_indent = self.bars[bar]['indent'] self.log("[%s] %s: %s" % (bar, attr, value)) self.log_indent += self.bar_indent self.bars[bar][attr] = value self.bars_callback(bar, attr, value, old_value) self.state.update(kw) self.callback(**kw) class TqdmProgressBarLogger(ProgressBarLogger): """Tqdm-powered progress bar for console or Notebooks. Parameters ---------- init_state Initial state of the logger bars Either None (will be initialized with no bar) or a list/tuple of bar names (``['main', 'sub']``) which will be initialized with index -1 and no total, or a dictionary (possibly ordered) of bars, of the form ``{bar_1: {title: 'bar1', index: 2, total:23}, bar_2: {...}}`` ignored_bars Either None (newly met bars will be added) or a list of blacklisted bar names, or ``'all_others'`` to signify that all bar names not already in ``self.bars`` will be ignored. leave_bars notebook True will make the bars look nice (HTML) in the jupyter notebook. It is advised to leave to 'default' as the default can be globally set from inside a notebook with ``import proglog; proglog.notebook_mode()``. print_messages If True, every ``logger(message='something')`` will print a message in the console / notebook """ def __init__(self, init_state=None, bars=None, leave_bars=False, ignored_bars=None, logged_bars='all', notebook='default', print_messages=True, min_time_interval=0, ignore_bars_under=0): ProgressBarLogger.__init__(self, init_state=init_state, bars=bars, ignored_bars=ignored_bars, logged_bars=logged_bars, ignore_bars_under=ignore_bars_under, min_time_interval=min_time_interval) self.leave_bars = leave_bars self.tqdm_bars = OrderedDict([ (bar, None) for bar in self.bars ]) if notebook == 'default': notebook = SETTINGS['notebook'] self.notebook = notebook self.print_messages = print_messages self.tqdm = (tqdm_notebook if self.notebook else tqdm) def new_tqdm_bar(self, bar): """Create a new tqdm bar, possibly replacing an existing one.""" if (bar in self.tqdm_bars) and (self.tqdm_bars[bar] is not None): self.close_tqdm_bar(bar) infos = self.bars[bar] self.tqdm_bars[bar] = self.tqdm( total=infos['total'], desc=infos['title'], postfix=dict(now=troncate_string(str(infos['message']))), leave=self.leave_bars ) def close_tqdm_bar(self, bar): """Close and erase the tqdm bar""" self.tqdm_bars[bar].close() if not self.notebook: self.tqdm_bars[bar] = None def bars_callback(self, bar, attr, value, old_value): if (bar not in self.tqdm_bars) or (self.tqdm_bars[bar] is None): self.new_tqdm_bar(bar) if attr == 'index': if value >= old_value: total = self.bars[bar]['total'] if total and (value >= total): self.close_tqdm_bar(bar) else: self.tqdm_bars[bar].update(value - old_value) else: self.new_tqdm_bar(bar) self.tqdm_bars[bar].update(value + 1) elif attr == 'message': self.tqdm_bars[bar].set_postfix(now=troncate_string(str(value))) self.tqdm_bars[bar].update(0) def callback(self, **kw): if self.print_messages and ('message' in kw) and kw['message']: if self.notebook: print(kw['message']) else: self.tqdm.write(kw['message']) class RqWorkerProgressLogger: def __init__(self, job): self.job = job if 'progress_data' not in self.job.meta: self.job.meta['progress_data'] = {} self.job.save() def callback(self, **kw): self.job.meta['progress_data'] = self.state self.job.save() class RqWorkerBarLogger(RqWorkerProgressLogger, ProgressBarLogger): def __init__(self, job, init_state=None, bars=None, ignored_bars=(), logged_bars='all', min_time_interval=0): RqWorkerProgressLogger.__init__(self, job) ProgressBarLogger.__init__(self, init_state=init_state, bars=bars, ignored_bars=ignored_bars, logged_bars=logged_bars, min_time_interval=min_time_interval) class MuteProgressBarLogger(ProgressBarLogger): def bar_is_ignored(self, bar): return True def default_bar_logger(logger, bars=None, ignored_bars=None, logged_bars='all', min_time_interval=0, ignore_bars_under=0): if logger == 'bar': return TqdmProgressBarLogger( bars=bars, ignored_bars=ignored_bars, logged_bars=logged_bars, min_time_interval=min_time_interval, ignore_bars_under=ignore_bars_under ) elif logger is None: return MuteProgressBarLogger() else: return logger
33.80102
79
0.568981
12,328
0.930415
1,899
0.143321
108
0.008151
0
0
4,406
0.332528
a3d9f04c9618b248a5e94c0c7319362fccd10a9f
665
py
Python
gdsfactory/tests/test_component_from_yaml_bezier.py
jorgepadilla19/gdsfactory
68e1c18257a75d4418279851baea417c8899a165
[ "MIT" ]
42
2020-05-25T09:33:45.000Z
2022-03-29T03:41:19.000Z
gdsfactory/tests/test_component_from_yaml_bezier.py
jorgepadilla19/gdsfactory
68e1c18257a75d4418279851baea417c8899a165
[ "MIT" ]
133
2020-05-28T18:29:04.000Z
2022-03-31T22:21:42.000Z
gdsfactory/tests/test_component_from_yaml_bezier.py
jorgepadilla19/gdsfactory
68e1c18257a75d4418279851baea417c8899a165
[ "MIT" ]
17
2020-06-30T07:07:50.000Z
2022-03-17T15:45:27.000Z
import gdsfactory as gf from gdsfactory.component import Component yaml = """ name: test_component_yaml_without_cell instances: mmi: component: mmi1x2 bend: component: bend_s connections: bend,o1: mmi,o2 """ def test_component_from_yaml_without_cell() -> Component: """bezier does not have cell""" c = gf.read.from_yaml(yaml) assert c.name == "test_component_yaml_without_cell", c.name assert len(c.get_dependencies()) == 2, len(c.get_dependencies()) assert len(c.ports) == 0, len(c.ports) return c if __name__ == "__main__": c = test_component_from_yaml_without_cell() print(c.name) c.show()
20.151515
68
0.682707
0
0
0
0
0
0
0
0
239
0.359398
a3da86d4ce645eeb7110c2f1c12a4c42e43e9f77
2,681
py
Python
cats/types.py
AdamBrianBright/cats-python
163cbde06c0d56520c217c0d66ddca34c7e0f63b
[ "MIT" ]
2
2021-10-04T05:39:03.000Z
2021-10-07T06:44:19.000Z
cats/types.py
AdamBrianBright/cats-python
163cbde06c0d56520c217c0d66ddca34c7e0f63b
[ "MIT" ]
7
2021-08-17T17:50:23.000Z
2021-08-31T08:44:13.000Z
cats/types.py
AdamBrianBright/cats-python
163cbde06c0d56520c217c0d66ddca34c7e0f63b
[ "MIT" ]
2
2021-10-01T20:58:25.000Z
2021-10-04T05:40:35.000Z
from pathlib import Path from types import GeneratorType from typing import AsyncIterable, Iterable, TypeAlias import ujson from cats.errors import MalformedHeadersError try: from django.db.models import QuerySet, Model except ImportError: QuerySet = type('QuerySet', (list,), {}) Model = type('Model', (list,), {}) __all__ = [ 'Bytes', 'BytesGen', 'BytesAsyncGen', 'BytesAnyGen', 'Byte', 'Json', 'File', 'List', 'Missing', 'MISSING', 'QuerySet', 'Model', 'T_Headers', 'Headers', ] Bytes: TypeAlias = bytes | bytearray | memoryview BytesGen: TypeAlias = Iterable[Bytes] BytesAsyncGen: TypeAlias = AsyncIterable[Bytes] BytesAnyGen: TypeAlias = BytesGen | BytesAsyncGen Byte: TypeAlias = Bytes Json: TypeAlias = str | int | float | dict | list | bool | None File: TypeAlias = Path | str List = list | tuple | set | GeneratorType | QuerySet class Missing(str): """ Custom Missing type is required for Pydantic to work properly. IDK """ __slots__ = () def __init__(self): super().__init__() def __eq__(self, other): return isinstance(other, Missing) def __bool__(self): return False MISSING = Missing() class Headers(dict): __slots__ = () def __init__(self, *args, **kwargs): v = self._convert(*args, **kwargs) if (offset := v.get('offset', None)) and (not isinstance(offset, int) or offset < 0): raise MalformedHeadersError('Invalid offset header', headers=v) super().__init__(v) @classmethod def _key(cls, key: str) -> str: return key.replace(' ', '-').title() def __getitem__(self, item): return super().__getitem__(self._key(item)) def __setitem__(self, key, value): return super().__setitem__(self._key(key), value) def __delitem__(self, key): return super().__delitem__(self._key(key)) def __contains__(self, item): return super().__contains__(self._key(item)) @classmethod def _convert(cls, *args, **kwargs): return {cls._key(k): v for k, v in dict(*args, **kwargs).items() if isinstance(k, str)} def update(self, *args, **kwargs) -> None: super().update(self._convert(*args, **kwargs)) def encode(self) -> bytes: return ujson.dumps(self, ensure_ascii=False, escape_forward_slashes=False).encode('utf-8') @classmethod def decode(cls, headers: Bytes) -> 'Headers': try: headers = ujson.loads(headers) except ValueError: # + UnicodeDecodeError headers = None return cls(headers or {}) T_Headers: TypeAlias = Headers | dict[str]
25.056075
98
0.631481
1,696
0.6326
0
0
471
0.175681
0
0
298
0.111153
a3dad87fce4f18faf3a3d29b5cefbd7b89d614d5
384
py
Python
raven/utils/urlparse.py
MyCollege/raven
9447f3a55ae7703afe84c3493625e3c3fb700700
[ "BSD-3-Clause" ]
null
null
null
raven/utils/urlparse.py
MyCollege/raven
9447f3a55ae7703afe84c3493625e3c3fb700700
[ "BSD-3-Clause" ]
null
null
null
raven/utils/urlparse.py
MyCollege/raven
9447f3a55ae7703afe84c3493625e3c3fb700700
[ "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import try: import urlparse as _urlparse except ImportError: from urllib import parse as _urlparse def register_scheme(scheme): for method in filter(lambda s: s.startswith('uses_'), dir(_urlparse)): uses = getattr(_urlparse, method) if scheme not in uses: uses.append(scheme) urlparse = _urlparse.urlparse
22.588235
74
0.708333
0
0
0
0
0
0
0
0
7
0.018229
a3db35b8c7d191b6f652e750b697cd40a1dc6c0c
516
py
Python
setup.py
stjordanis/MONeT-1
98a5c7d149ca19c8c64069dbd8f27ce7f97bf3af
[ "MIT" ]
161
2020-10-28T02:21:50.000Z
2022-03-11T05:06:16.000Z
setup.py
stjordanis/MONeT-1
98a5c7d149ca19c8c64069dbd8f27ce7f97bf3af
[ "MIT" ]
4
2020-10-28T02:27:43.000Z
2021-03-31T00:04:43.000Z
setup.py
stjordanis/MONeT-1
98a5c7d149ca19c8c64069dbd8f27ce7f97bf3af
[ "MIT" ]
15
2020-10-28T02:32:12.000Z
2021-12-23T13:20:23.000Z
import setuptools setuptools.setup( name="monet_memory_optimized_training", version="0.0.1", description="Memory Optimized Network Training Framework", url="https://github.com/philkr/lowrank_conv", packages=setuptools.find_packages(include = ['monet', 'monet.*', 'models', 'checkmate', 'gist']), classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], python_requires='>=3.6', )
32.25
101
0.655039
0
0
0
0
0
0
0
0
286
0.554264
a3db4ad6c588be26e30297068925d6bff9a900d1
5,616
py
Python
Tests/Methods/Machine/test_Magnet_Type_11_meth.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
2
2020-06-29T13:48:37.000Z
2021-06-15T07:34:05.000Z
Tests/Methods/Machine/test_Magnet_Type_11_meth.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
null
null
null
Tests/Methods/Machine/test_Magnet_Type_11_meth.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ @date Created on Thu Dec 18 13:56:33 2014 @copyright (C) 2014-2015 EOMYS ENGINEERING. @author pierre_b """ from unittest import TestCase from ddt import ddt, data from pyleecan.Classes.Arc1 import Arc1 from pyleecan.Classes.Segment import Segment from pyleecan.Classes.MagnetType11 import MagnetType11 from pyleecan.Classes.LamSlotMag import LamSlotMag from pyleecan.Classes.SlotMPolar import SlotMPolar from numpy import pi, exp, angle, array from pyleecan.Methods.Machine.Magnet.comp_surface import comp_surface Mag11_test = list() # Internal Slot surface lam = LamSlotMag(is_internal=True, Rext=0.5) lam.slot = SlotMPolar(H0=0, W0=pi / 4, Zs=4) lam.slot.magnet = [MagnetType11(Hmag=1, Wmag=pi / 4)] Mag11_test.append({"test_obj": lam, "S_exp": 0.78539616, "Ao": pi / 4, "H_exp": 1}) # Internal Slot inset lam = LamSlotMag(is_internal=True, Rext=0.5) lam.slot = SlotMPolar(H0=40e-3, W0=pi / 4, Zs=4) lam.slot.magnet = [MagnetType11(Hmag=20e-3, Wmag=pi / 4)] Mag11_test.append({"test_obj": lam, "S_exp": 7.3827e-3, "Ao": pi / 4, "H_exp": 20e-3}) # Outward Slot inset lam = LamSlotMag(is_internal=False, Rext=0.1325) lam.slot = SlotMPolar(H0=5e-3, W0=pi / 10, Zs=8) lam.slot.magnet = [MagnetType11(Hmag=8e-3, Wmag=pi / 12)] Mag11_test.append({"test_obj": lam, "S_exp": 2.09439e-6, "Ao": pi / 12, "H_exp": 8e-3}) # For AlmostEqual DELTA = 1e-4 @ddt class test_Magnet_Type_11_meth(TestCase): """unittest for MagnetType11 methods """ @data(*Mag11_test) def test_comp_surface(self, test_dict): """Check that the computation of the surface is correct """ test_obj = test_dict["test_obj"] result = test_obj.slot.magnet[0].comp_surface() a = result b = test_dict["S_exp"] msg = "Return " + str(a) + " expected " + str(b) self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg) # Compare numerical and analytical results b = comp_surface(test_obj.slot.magnet[0]) msg = "Analytical: " + str(a) + " Numerical " + str(b) self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg) @data(*Mag11_test) def test_comp_height(self, test_dict): """Check that the computation of the height is correct """ test_obj = test_dict["test_obj"] result = test_obj.slot.magnet[0].comp_height() a = result b = test_dict["H_exp"] msg = "Return " + str(a) + " expected " + str(b) self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg) @data(*Mag11_test) def test_comp_angle_op(self, test_dict): """Check that the computation of the opening angle is correct """ test_obj = test_dict["test_obj"] result = test_obj.slot.magnet[0].comp_angle_opening() a = result b = test_dict["Ao"] msg = "Return " + str(a) + " expected " + str(b) self.assertAlmostEqual((a - b) / a, 0, delta=DELTA, msg=msg) def test_build_geometry_out(self): """check that curve_list is correct (outwards magnet)""" lam = LamSlotMag( Rint=40e-3, Rext=90e-3, is_internal=False, is_stator=False, L1=0.45, Nrvd=1, Wrvd=0.05, ) magnet = [MagnetType11(Wmag=pi / 10, Hmag=0.2)] lam.slot = SlotMPolar(Zs=8, W0=pi / 10, H0=0.2, magnet=magnet) test_obj = lam.slot.magnet[0] Z1 = (40e-3 + 0.2) * exp(-1j * pi / 10 / 2) Z2 = (40e-3 + 0.2) * exp(1j * pi / 10 / 2) Z = abs(Z1) Z3 = (Z - 0.2) * exp(1j * angle(Z1)) Z4 = (Z - 0.2) * exp(1j * angle(Z2)) # # Creation of curve curve_list = list() curve_list.append(Segment(Z1, Z3)) curve_list.append(Arc1(Z3, Z4, abs(Z3))) curve_list.append(Segment(Z4, Z2)) curve_list.append(Arc1(Z2, Z1, -abs(Z2))) surface = test_obj.build_geometry() result = surface[0].get_lines() for i in range(0, len(result)): a = result[i].begin b = curve_list[i].begin self.assertAlmostEqual((a - b) / a, 0, delta=DELTA) a = result[i].end b = curve_list[i].end self.assertAlmostEqual((a - b) / a, 0, delta=DELTA) def test_build_geometry_in(self): """check that curve_list is correct (inwards magnet)""" lam = LamSlotMag( Rint=40e-1, Rext=90e-1, is_internal=True, is_stator=False, L1=0.45, Nrvd=1, Wrvd=0.05, ) magnet = [MagnetType11(Wmag=pi / 10, Hmag=0.2)] lam.slot = SlotMPolar(Zs=8, W0=pi / 10, H0=0.2, magnet=magnet) test_obj = lam.slot.magnet[0] Z1 = (90e-1 - 0.2) * exp(-1j * pi / 10 / 2) Z2 = (90e-1 - 0.2) * exp(1j * pi / 10 / 2) Z = abs(Z1) Z3 = (Z + 0.2) * exp(1j * angle(Z1)) Z4 = (Z + 0.2) * exp(1j * angle(Z2)) # # Creation of curve curve_list = list() curve_list.append(Segment(Z1, Z3)) curve_list.append(Arc1(Z3, Z4, abs(Z3))) curve_list.append(Segment(Z4, Z2)) curve_list.append(Arc1(Z2, Z1, -abs(Z2))) surface = test_obj.build_geometry() result = surface[0].get_lines() for i in range(0, len(result)): a = result[i].begin b = curve_list[i].begin self.assertAlmostEqual((a - b) / a, 0, delta=DELTA) a = result[i].end b = curve_list[i].end self.assertAlmostEqual((a - b) / a, 0, delta=DELTA)
33.035294
87
0.579238
4,228
0.752849
0
0
4,233
0.753739
0
0
881
0.156873
a3db7f4c59462c81c92a9534466aa08adc11bb16
4,600
py
Python
tomo_encoders/tasks/void_mapping.py
arshadzahangirchowdhury/TomoEncoders
9c2b15fd515d864079f198546821faee5d78df17
[ "BSD-3-Clause" ]
null
null
null
tomo_encoders/tasks/void_mapping.py
arshadzahangirchowdhury/TomoEncoders
9c2b15fd515d864079f198546821faee5d78df17
[ "BSD-3-Clause" ]
null
null
null
tomo_encoders/tasks/void_mapping.py
arshadzahangirchowdhury/TomoEncoders
9c2b15fd515d864079f198546821faee5d78df17
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ from operator import mod from tomo_encoders.misc.voxel_processing import modified_autocontrast, TimerGPU from tomo_encoders.reconstruction.recon import recon_patches_3d import cupy as cp import numpy as np from skimage.filters import threshold_otsu from tomo_encoders import Grid def get_values_cyl_mask(vol, mask_fac): vol_shape = vol.shape assert vol_shape[1] == vol_shape[2], "must be a tomographic volume where shape y = shape x" shape_yx = vol_shape[1] shape_z = vol_shape[0] rad = int(mask_fac*shape_yx/2) pts = cp.arange(-int(shape_yx//2), int(cp.ceil(shape_yx//2))) yy, xx = cp.meshgrid(pts, pts, indexing = 'ij') circ = (cp.sqrt(yy**2 + xx**2) < rad).astype(cp.uint8) # inside is positive circ = circ[cp.newaxis, ...] cyl = cp.repeat(circ, shape_z, axis = 0) return vol[cyl > 0] def cylindrical_mask(out_vol, mask_fac, mask_val = 0): vol_shape = out_vol.shape assert vol_shape[1] == vol_shape[2], "must be a tomographic volume where shape y = shape x" shape_yx = vol_shape[1] shape_z = vol_shape[0] rad = int(mask_fac*shape_yx/2) pts = cp.arange(-int(shape_yx//2), int(cp.ceil(shape_yx//2))) yy, xx = cp.meshgrid(pts, pts, indexing = 'ij') circ = (cp.sqrt(yy**2 + xx**2) < rad).astype(cp.uint8) # inside is positive circ = circ[cp.newaxis, ...] cyl = cp.repeat(circ, shape_z, axis = 0) out_vol[cyl == 0] = mask_val return def segment_otsu(vol, s = 0.05): '''segment volume with otsu''' timer = TimerGPU() timer.tic() tmp_values = vol[::4,::4,::4].get() # rec_min_max = modified_autocontrast(tmp_values, s = s, normalize_sampling_factor=1) thresh = cp.float32(threshold_otsu(tmp_values.reshape(-1))) vol = (vol < thresh).astype(cp.uint8) timer.toc("otsu thresholding") return vol def edge_map(Y): ''' this algorithm was inspired by: https://github.com/tomochallenge/tomochallenge_utils/blob/master/foam_phantom_utils.py ''' msk = cp.zeros_like(Y) tmp = Y[:-1]!=Y[1:] msk[:-1][tmp] = 1 msk[1:][tmp] = 1 tmp = Y[:,:-1]!=Y[:,1:] msk[:,:-1][tmp] = 1 msk[:,1:][tmp] = 1 tmp = Y[:,:,:-1]!=Y[:,:,1:] msk[:,:,:-1][tmp] = 1 msk[:,:,1:][tmp] = 1 return msk > 0 def guess_surface(V_bin, b, wd): # find patches on surface wdb = int(wd//b) p3d = Grid(V_bin.shape, width = wdb) x = p3d.extract(V_bin) is_surf = (np.std(x, axis = (1,2,3)) > 0.0) is_ones = (np.sum(x, axis = (1,2,3))/(wdb**3) == 1) is_zeros = (np.sum(x, axis = (1,2,3))/(wdb**3) == 0) p3d = p3d.rescale(b) p3d_surf = p3d.filter_by_condition(is_surf) p3d_ones = p3d.filter_by_condition(is_ones) p3d_zeros = p3d.filter_by_condition(is_zeros) eff = len(p3d_surf)*(wd**3)/np.prod(p3d_surf.vol_shape) print(f"\tSTAT: r value: {eff*100.0:.2f}") return p3d_surf, p3d_ones, p3d_zeros def process_patches(projs, theta, center, fe, p_surf, min_max, TIMEIT = False): # SCHEME 1: integrate reconstruction and segmention (segments data on gpu itself) # st_proc = cp.cuda.Event(); end_proc = cp.cuda.Event(); st_proc.record() # x_surf, p_surf = recon_patches_3d(projs, theta, center, p_surf, \ # apply_fbp = True, segmenter = fe, \ # segmenter_batch_size = 256) # end_proc.record(); end_proc.synchronize(); t_surf = cp.cuda.get_elapsed_time(st_proc,end_proc) # SCHEME 2: reconstruct and segment separately (copies rec data from gpu to cpu) st_rec = cp.cuda.Event(); end_rec = cp.cuda.Event(); st_rec.record() x_surf, p_surf = recon_patches_3d(projs, theta, center, p_surf, \ apply_fbp =True) end_rec.record(); end_rec.synchronize(); t_rec = cp.cuda.get_elapsed_time(st_rec,end_rec) st_seg = cp.cuda.Event(); end_seg = cp.cuda.Event(); st_seg.record() x_surf = np.clip(x_surf, *min_max) x_surf = fe.predict_patches("segmenter", x_surf[...,np.newaxis], 256, None, min_max = min_max)[...,0] end_seg.record(); end_seg.synchronize(); t_seg = cp.cuda.get_elapsed_time(st_seg,end_seg) print(f'\tTIME: local reconstruction - {t_rec/1000.0:.2f} secs') print(f'\tTIME: local segmentation - {t_seg/1000.0:.2f} secs') print(f'\tSTAT: total patches in neighborhood: {len(p_surf)}') if TIMEIT: return x_surf, p_surf, t_rec, t_seg else: return x_surf, p_surf
35.384615
122
0.62087
0
0
0
0
0
0
0
0
1,248
0.271304
a3dc8e80c4d30f101894ef231d725a510807944b
833
py
Python
handypackages/subscribe/migrations/0001_initial.py
roundium/handypackages
b8a0e4952644144b31168f9a4ac8e743933d87c7
[ "MIT" ]
1
2019-07-31T11:40:06.000Z
2019-07-31T11:40:06.000Z
handypackages/subscribe/migrations/0001_initial.py
roundium/handypackages
b8a0e4952644144b31168f9a4ac8e743933d87c7
[ "MIT" ]
10
2020-02-12T01:16:25.000Z
2021-06-10T18:42:24.000Z
handypackages/subscribe/migrations/0001_initial.py
roundium/handypackages
b8a0e4952644144b31168f9a4ac8e743933d87c7
[ "MIT" ]
1
2019-07-31T11:40:18.000Z
2019-07-31T11:40:18.000Z
# Generated by Django 2.2.1 on 2019-06-22 11:03 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='SubscribeModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('email', models.EmailField(db_index=True, max_length=255, unique=True, verbose_name='Email')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='Subscribe Time')), ], options={ 'verbose_name': 'Subscribe Email', 'verbose_name_plural': 'Subscribe Emails', 'abstract': False, }, ), ]
29.75
114
0.57503
740
0.888355
0
0
0
0
0
0
194
0.232893
a3dd7fa87a5a13e38a56d66d0de7938491e30d3e
793
py
Python
TuShare/view/sh_margins.py
lwh2015/TuShare
f244e05e5cf208e18e6237d3b81f71f0d3c1394a
[ "MIT" ]
1
2018-09-26T08:34:02.000Z
2018-09-26T08:34:02.000Z
TuShare/view/sh_margins.py
lwh2015/TuShare
f244e05e5cf208e18e6237d3b81f71f0d3c1394a
[ "MIT" ]
null
null
null
TuShare/view/sh_margins.py
lwh2015/TuShare
f244e05e5cf208e18e6237d3b81f71f0d3c1394a
[ "MIT" ]
null
null
null
# -*- coding: UTF-8 -*- import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt import tushare as ts from .publiceClass import DateEncoder @csrf_exempt def sh_margins(request): try: start = request.POST.get('start','')#选填 end = request.POST.get('end','')#选填 data = ts.sh_margins(start,end) res = {'columns':[ '信用交易日期', '本日融资余额(元)', '本日融资买入额(元)', '本日融券余量', '本日融券余量金额(元)', '本日融券卖出量', '本日融资融券余额(元)' ],'data':json.loads(json.dumps(data.values,cls=DateEncoder))} except(BaseException): return HttpResponse(BaseException) else: return HttpResponse(json.dumps(res),content_type="application/json")
26.433333
76
0.596469
0
0
0
0
716
0.79116
0
0
264
0.291713
a3de1f30f9f2a9d6efbf703fb8df76e65a62d871
1,181
py
Python
intermediate/classes/camera.py
robertob45/learning-python
7407f7d9e513792150eb2b65ebc644b5f8632c56
[ "MIT" ]
null
null
null
intermediate/classes/camera.py
robertob45/learning-python
7407f7d9e513792150eb2b65ebc644b5f8632c56
[ "MIT" ]
null
null
null
intermediate/classes/camera.py
robertob45/learning-python
7407f7d9e513792150eb2b65ebc644b5f8632c56
[ "MIT" ]
null
null
null
class Camera: """docstring for .""" def __init__(self, brand, sensor, lens, battery): self.brand = brand self.sensor = sensor self.lens = lens self.battery = battery def __str__(self): return self.brand + ' ' + self.sensor + ' ' + self.lens + ' ' + self.battery def focus(self): print('Focusing using', self.lens, '...') print('') def frame(self): print('Move until your subject is in the desired position') print('.') print('.') print('.') def flash(self, flash_use): if flash_use == 's': print('Shooting with flash...') else: print('Shooting without flash...') print('') def format(self, save_format): if save_format == 'jpg': print('Saving in: ' + save_format) elif save_format == 'raw': print('Saving in: ' + save_format) else: print('No valid format to save') def take_picture(self, save_format, flash_use): print('Say cheese!') self.focus() self.frame() self.flash(flash_use) self.format(save_format)
27.465116
84
0.531753
1,180
0.999153
0
0
0
0
0
0
244
0.206605
a3de3ec0c21d41a610e2d90e04c28f83ca0ba4c2
7,332
py
Python
dbaas/tsuru/tests/test_service_add.py
didindinn/database-as-a-service
747de31ff8546f7874ddd654af860e130afd17a0
[ "BSD-3-Clause" ]
null
null
null
dbaas/tsuru/tests/test_service_add.py
didindinn/database-as-a-service
747de31ff8546f7874ddd654af860e130afd17a0
[ "BSD-3-Clause" ]
null
null
null
dbaas/tsuru/tests/test_service_add.py
didindinn/database-as-a-service
747de31ff8546f7874ddd654af860e130afd17a0
[ "BSD-3-Clause" ]
null
null
null
from mock import patch, MagicMock from django.contrib.auth.models import User from django.test import TestCase from django.core.urlresolvers import reverse from django.utils.datastructures import MultiValueDictKeyError from account.models import Role, Team, Organization from physical.tests.factory import EnvironmentFactory, PlanFactory from physical.models import Plan class ValidationTestCase(TestCase): """HTTP test cases for the tsuru Service Add. This class focuses on validations of POST """ USERNAME = "fake_user" PASSWORD = "123456" def setUp(self): self.role = Role.objects.get_or_create(name="fake_role")[0] self.organization = Organization.objects.get_or_create( name='fake_organization' )[0] self.team = Team.objects.get_or_create( name="fake_team", role=self.role, organization=self.organization)[0] self.superuser = User.objects.create_superuser( self.USERNAME, email="{}@admin.com".format(self.USERNAME), password=self.PASSWORD ) self.team.users.add(self.superuser) self.client.login(username=self.USERNAME, password=self.PASSWORD) self.env = 'dev' self.environment = EnvironmentFactory.create(name=self.env) self.url = reverse('tsuru:service-add', args=(self.env,)) self.name = 'fake_database' self.user = '{}@admin.com'.format(self.USERNAME) self.description = 'fake desc' self.plan = PlanFactory(name='fake_plan', provider=Plan.CLOUDSTACK) self.plan.environments.add(self.environment) self.plan_name = 'fake-plan-dev' def tearDown(self): self.client.logout() def _assert_resp(self, resp, msg): self.assertEqual(resp.status_code, 400) self.assertEqual(resp.content, msg) def test_name_not_in_payload(self): with self.assertRaises(MultiValueDictKeyError): self.client.post(self.url, {}) def test_user_not_in_payload(self): with self.assertRaises(MultiValueDictKeyError): self.client.post( self.url, {'name': self.name} ) def test_team_not_in_payload(self): with self.assertRaises(MultiValueDictKeyError): self.client.post( self.url, {'name': self.name, 'user': self.user} ) def test_description_fail(self): resp = self.client.post( self.url, {'name': self.name, 'user': self.user, 'team': self.team} ) self._assert_resp(resp, '"A description must be provided."') def test_name_fail(self): resp = self.client.post( self.url, { 'name': '99invalid-name', 'user': self.user, 'description': self.description, 'team': self.team } ) self._assert_resp( resp, '"Your database name must match /^[a-z][a-z0-9_]+$/ ."' ) @patch('tsuru.views.Database.objects.get', new=MagicMock()) def test_database_found(self): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team } ) self._assert_resp( resp, '"There is already a database called fake_database in dev."' ) @patch( 'tsuru.views.database_name_evironment_constraint', new=MagicMock(return_value=True) ) def test_already_exist_database_with_name(self): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team } ) self._assert_resp( resp, '"fake_database already exists in env dev!"' ) def test_user_not_found(self): resp = self.client.post( self.url, { 'name': self.name, 'user': 'another_user@not_found.com', 'description': self.description, 'team': self.team } ) self._assert_resp( resp, '"User does not exist."' ) def test_team_not_found(self): resp = self.client.post( self.url, { 'name': self.name, 'user': 'another_user@not_found.com', 'description': self.description, 'team': 'team_not_found' } ) self._assert_resp( resp, '"User does not exist."' ) def test_env_not_found(self): self.url = self.url.replace( '/{}/'.format(self.env), '/env_not_found/' ) resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team.name } ) self._assert_resp( resp, '"Environment does not exist."' ) @patch( 'tsuru.views.Team.count_databases_in_use', new=MagicMock(return_value=99) ) def test_allocation_limit(self): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team.name } ) self._assert_resp( resp, ('"The database alocation limit of 2 has been exceeded for the ' 'selected team: fake_team"') ) def test_plan_not_on_payload(self): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team.name } ) self._assert_resp( resp, '"Plan was not found"' ) def test_plan_not_found(self): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team.name, 'plan': 'not found' } ) self._assert_resp( resp, '"Plan was not found"' ) @patch('notification.tasks.TaskRegister.create_task', new=MagicMock()) @patch('notification.tasks.create_database_with_retry') def test_call_database_create(self, create_database_mock): resp = self.client.post( self.url, { 'name': self.name, 'user': self.user, 'description': self.description, 'team': self.team.name, 'plan': self.plan_name } ) self.assertTrue(create_database_mock.called) self.assertEqual(resp.status_code, 201)
30.17284
76
0.524686
6,956
0.948718
0
0
2,143
0.29228
0
0
1,369
0.186716
a3dff7c9e786003c054a9ad079320e1b77c72f5f
439
py
Python
Main/migrations/0072_auto_20210506_0016.py
Muhammet-Yildiz/Ecommerce_Website-HepsiOrada
91935014ccc37e0ea57c8cbd2c4891941dcbb917
[ "MIT" ]
10
2021-06-01T19:30:00.000Z
2021-06-28T16:16:20.000Z
Main/migrations/0072_auto_20210506_0016.py
Muhammet-Yildiz/HepsiOrada
91935014ccc37e0ea57c8cbd2c4891941dcbb917
[ "MIT" ]
null
null
null
Main/migrations/0072_auto_20210506_0016.py
Muhammet-Yildiz/HepsiOrada
91935014ccc37e0ea57c8cbd2c4891941dcbb917
[ "MIT" ]
null
null
null
# Generated by Django 3.1.4 on 2021-05-05 21:16 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('Main', '0071_auto_20210506_0004'), ] operations = [ migrations.RemoveField( model_name='product', name='chooseColor', ), migrations.RemoveField( model_name='product', name='chooseSize', ), ]
19.954545
47
0.567198
354
0.806378
0
0
0
0
0
0
121
0.275626
a3e04f191bacc2a7d80fcd1ad9bb0b6bdef01714
788
py
Python
1.py
zweed4u/dailycodingproblem
6e40eaad347e283f86a11adeff01c6426211a0be
[ "MIT" ]
null
null
null
1.py
zweed4u/dailycodingproblem
6e40eaad347e283f86a11adeff01c6426211a0be
[ "MIT" ]
null
null
null
1.py
zweed4u/dailycodingproblem
6e40eaad347e283f86a11adeff01c6426211a0be
[ "MIT" ]
null
null
null
#!/usr/bin/python3 """ Good morning! Here's your coding interview problem for today. This problem was recently asked by Google. Given a list of numbers and a number k, return whether any two numbers from the list add up to k. For example, given [10, 15, 3, 7] and k of 17, return true since 10 + 7 is 17. Bonus: Can you do this in one pass? """ def func(l, k): sums = [] for index, element in enumerate(l): print(f'Current element: {element}') if index == 0: # first element - need another print() continue for num in range(index): print(f'Appending {l[index]} + {l[num]}') sums.append(l[num] + l[index]) print() print(sums) return k in sums print(func([10, 15, 3, 7], 17))
26.266667
97
0.593909
0
0
0
0
0
0
0
0
440
0.558376
a3e06ae8cd6e0aabca5915c1a17ae312a2a03a30
734
py
Python
gryphon/data/template_scaffolding/template/setup.py
ow-gryphon/gryphon
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
[ "MIT" ]
null
null
null
gryphon/data/template_scaffolding/template/setup.py
ow-gryphon/gryphon
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
[ "MIT" ]
1
2022-03-08T14:54:26.000Z
2022-03-08T15:02:52.000Z
gryphon/data/template_scaffolding/template/setup.py
ow-gryphon/gryphon
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
[ "MIT" ]
null
null
null
import json import setuptools with open("template/README.md", "r") as fh: long_description = fh.read() with open('requirements.txt') as fr: requirements = fr.read().strip().split('\n') with open('metadata.json') as fr: metadata = json.load(fr) setuptools.setup( name="", # Name of the repository version="0.0.1", author=metadata.get("author", ""), author_email=metadata.get("author_email", ""), description=metadata.get("description", ""), long_description=long_description, long_description_content_type="text/markdown", url="", # Repository URL or externally maintained page packages=setuptools.find_packages(), python_requires='>=3.6', install_requires=requirements, )
28.230769
59
0.688011
0
0
0
0
0
0
0
0
204
0.277929
a3e0ad9312af3accd64fc327daefc5bf89405ae4
6,558
py
Python
train_base3.py
Mhaiyang/iccv
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
[ "MIT" ]
2
2019-01-10T03:44:03.000Z
2019-05-24T08:50:14.000Z
train_base3.py
Mhaiyang/iccv
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
[ "MIT" ]
null
null
null
train_base3.py
Mhaiyang/iccv
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
[ "MIT" ]
null
null
null
""" @Time : 201/21/19 10:41 @Author : TaylorMei @Email : [email protected] @Project : iccv @File : train_base3.py @Function: """ import datetime import os import torch from torch import nn from torch import optim from torch.autograd import Variable from torch.backends import cudnn from torch.utils.data import DataLoader from torchvision import transforms from tensorboardX import SummaryWriter from tqdm import tqdm import joint_transforms from config import msd_training_root from config import backbone_path from dataset import ImageFolder from misc import AvgMeter, check_mkdir from model.base3 import BASE3 import loss as L cudnn.benchmark = True device_ids = [2] ckpt_path = './ckpt' exp_name = 'BASE3' args = { 'epoch_num': 100, 'train_batch_size': 14, 'last_epoch': 0, 'lr': 5e-3, 'lr_decay': 0.9, 'weight_decay': 5e-4, 'momentum': 0.9, 'snapshot': '', 'scale': 384, 'save_point': [60, 80, 90], 'add_graph': True, 'poly_train': True, 'optimizer': 'SGD' } # Path. check_mkdir(ckpt_path) check_mkdir(os.path.join(ckpt_path, exp_name)) vis_path = os.path.join(ckpt_path, exp_name, 'log') check_mkdir(vis_path) log_path = os.path.join(ckpt_path, exp_name, str(datetime.datetime.now()) + '.txt') writer = SummaryWriter(log_dir=vis_path, comment=exp_name) # Transform Data. joint_transform = joint_transforms.Compose([ joint_transforms.RandomRotate(), joint_transforms.Resize((args['scale'], args['scale'])) ]) img_transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # maybe can optimized. ]) target_transform = transforms.ToTensor() # Prepare Data Set. train_set = ImageFolder(msd_training_root, joint_transform, img_transform, target_transform) print("Train set: {}".format(train_set.__len__())) train_loader = DataLoader(train_set, batch_size=args['train_batch_size'], num_workers=0, shuffle=True) def main(): print(args) print(exp_name) net = BASE3(backbone_path).cuda(device_ids[0]).train() if args['add_graph']: writer.add_graph(net, input_to_model=torch.rand( args['train_batch_size'], 3, args['scale'], args['scale']).cuda(device_ids[0])) if args['optimizer'] == 'Adam': print("Adam") optimizer = optim.Adam([ {'params': [param for name, param in net.named_parameters() if name[-4:] == 'bias'], 'lr': 2 * args['lr']}, {'params': [param for name, param in net.named_parameters() if name[-4:] != 'bias'], 'lr': 1 * args['lr'], 'weight_decay': args['weight_decay']} ]) else: print("SGD") optimizer = optim.SGD([ {'params': [param for name, param in net.named_parameters() if name[-4:] == 'bias'], 'lr': 2 * args['lr']}, {'params': [param for name, param in net.named_parameters() if name[-4:] != 'bias'], 'lr': 1 * args['lr'], 'weight_decay': args['weight_decay']} ], momentum=args['momentum']) if len(args['snapshot']) > 0: print('Training Resumes From \'%s\'' % args['snapshot']) net.load_state_dict(torch.load(os.path.join(ckpt_path, exp_name, args['snapshot'] + '.pth'))) net = nn.DataParallel(net, device_ids=device_ids) print("Using {} GPU(s) to Train.".format(len(device_ids))) open(log_path, 'w').write(str(args) + '\n\n') train(net, optimizer) writer.close() def train(net, optimizer): curr_iter = 1 for epoch in range(args['last_epoch'] + 1, args['last_epoch'] + 1 + args['epoch_num']): loss_4_record, loss_3_record, loss_2_record, loss_1_record, \ loss_f_record, loss_record = AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter() train_iterator = tqdm(train_loader, total=len(train_loader)) for data in train_iterator: if args['poly_train']: base_lr = args['lr'] * (1 - float(curr_iter) / (args['epoch_num'] * len(train_loader))) ** args[ 'lr_decay'] optimizer.param_groups[0]['lr'] = 2 * base_lr optimizer.param_groups[1]['lr'] = 1 * base_lr inputs, labels = data batch_size = inputs.size(0) inputs = Variable(inputs).cuda(device_ids[0]) labels = Variable(labels).cuda(device_ids[0]) optimizer.zero_grad() predict_4, predict_3, predict_2, predict_1, predict_f = net(inputs) loss_4 = L.lovasz_hinge(predict_4, labels) loss_3 = L.lovasz_hinge(predict_3, labels) loss_2 = L.lovasz_hinge(predict_2, labels) loss_1 = L.lovasz_hinge(predict_1, labels) loss_f = L.lovasz_hinge(predict_f, labels) loss = loss_4 + loss_3 + loss_2 + loss_1 + loss_f loss.backward() optimizer.step() loss_record.update(loss.data, batch_size) loss_4_record.update(loss_4.data, batch_size) loss_3_record.update(loss_3.data, batch_size) loss_2_record.update(loss_2.data, batch_size) loss_1_record.update(loss_1.data, batch_size) loss_f_record.update(loss_f.data, batch_size) if curr_iter % 50 == 0: writer.add_scalar('loss', loss, curr_iter) writer.add_scalar('loss_4', loss_4, curr_iter) writer.add_scalar('loss_3', loss_3, curr_iter) writer.add_scalar('loss_2', loss_2, curr_iter) writer.add_scalar('loss_1', loss_1, curr_iter) writer.add_scalar('loss_f', loss_f, curr_iter) log = '[%3d], [%6d], [%.6f], [%.5f], [L4: %.5f], [L3: %.5f], [L2: %.5f], [L1: %.5f], [Lf: %.5f]' % \ (epoch, curr_iter, base_lr, loss_record.avg, loss_4_record.avg, loss_3_record.avg, loss_2_record.avg, loss_1_record.avg, loss_f_record.avg) train_iterator.set_description(log) open(log_path, 'a').write(log + '\n') curr_iter += 1 if epoch in args['save_point']: net.cpu() torch.save(net.module.state_dict(), os.path.join(ckpt_path, exp_name, '%d.pth' % epoch)) net.cuda(device_ids[0]) if epoch >= args['epoch_num']: net.cpu() torch.save(net.module.state_dict(), os.path.join(ckpt_path, exp_name, '%d.pth' % epoch)) print("Optimization Have Done!") return if __name__ == '__main__': main()
34.15625
119
0.617261
0
0
0
0
0
0
0
0
1,063
0.162092
a3e0c5f65be532d1c0caf49217af9908f82568d1
574
py
Python
tests/test_comment.py
uwase-diane/min_pitch
514ab5da150244e900fd51b6563173a905ef4f29
[ "Unlicense" ]
1
2020-11-29T16:18:50.000Z
2020-11-29T16:18:50.000Z
tests/test_comment.py
uwase-diane/min_pitch
514ab5da150244e900fd51b6563173a905ef4f29
[ "Unlicense" ]
null
null
null
tests/test_comment.py
uwase-diane/min_pitch
514ab5da150244e900fd51b6563173a905ef4f29
[ "Unlicense" ]
null
null
null
import unittest from app.models import Comment, Pitch from app import db class TestPitchComment(unittest.TestCase): def setUp(self): self.new_pitch = Pitch(post = "doit", category='Quotes') self.new_comment = Comment(comment = "good comment", pitch=self.new_pitch) def test_instance(self): self.assertTrue(isinstance(self.new_comment,Comment)) def test_check_instance_variables(self): self.assertEquals(self.new_comment.comment,"good comment") self.assertEquals(self.new_comment.pitch,self.new_pitch, 'do it')
33.764706
82
0.716028
500
0.87108
0
0
0
0
0
0
49
0.085366
a3e11b8d66ab1bd3a621bca6d89f7a077e4198d7
3,584
py
Python
teacher/views.py
itteamforslp/safelife_project
53af23dec0d19acf7227a43a16d7aedad443e90d
[ "MIT" ]
null
null
null
teacher/views.py
itteamforslp/safelife_project
53af23dec0d19acf7227a43a16d7aedad443e90d
[ "MIT" ]
4
2021-04-08T20:11:37.000Z
2021-09-22T19:37:57.000Z
safelife/safelife_project/teacher/views.py
CSUS-Scrumbags/safelife
2de7f83f637fae930b1176af796f4cc6f0519c86
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse from django.contrib.auth.decorators import login_required from django.views.decorators.csrf import csrf_exempt from django.template import loader from django.db import connection from django.http import HttpResponseRedirect import datetime from django.http import JsonResponse from administrator.models import Course, CourseTeacher, CourseStudent, Student from django.core.exceptions import PermissionDenied def teacher_only(function): #"""Limit view to teacher only.""" def _inner(request, *args, **kwargs): if not request.user.is_staff == False | request.user.is_superuser: raise PermissionDenied return function(request, *args, **kwargs) return _inner @login_required(login_url = '/users') @teacher_only def home(request): current_user = request.user.id teacher_current_courses = Course.objects.select_related().raw('SELECT * ' 'FROM course_teachers as CT, courses as C ' 'WHERE CT.teachers_id = %s AND C.course_id = CT.course_id AND C.is_complete = 0 ', [current_user]) currentdate = datetime.datetime.today().strftime('%Y-%m-%d') with connection.cursor() as cursor: cursor.execute('SELECT CL.course_id, CL.date ' 'FROM classes as CL, course_teachers as CT ' 'WHERE CT.teachers_id = %s AND CL.date >= %s ' 'AND CT.course_id = CL.course_id ' 'GROUP BY CL.course_id ', [current_user, currentdate]) next_class_date = cursor.fetchall() with connection.cursor() as cursor: cursor.execute('SELECT CS.course_id, COUNT(CS.students_id) ' 'FROM course_teachers as CT, course_students as CS ' 'WHERE CT.teachers_id = %s AND CT.course_id = CS.course_id ' 'GROUP BY CS.course_id ', [current_user]) teacher_student_count = cursor.fetchall() with connection.cursor() as cursor: cursor.execute('SELECT C.course_id, C.notes ' 'FROM course_teachers as CT, courses as C ' 'WHERE CT.teachers_id = %s AND C.course_id = CT.course_id ' 'GROUP BY CT.course_id ', [current_user]) teacher_course_notes = cursor.fetchall() template = loader.get_template('teacher/dashboard.html') context = { 'teacher_current_courses': teacher_current_courses, 'teacher_student_count': teacher_student_count, 'next_class_date': next_class_date, 'teacher_course_notes': teacher_course_notes } # Render the template to the user return HttpResponse(template.render(context, request)) @csrf_exempt def update_course_notes(request): # Get the student name that was passed from the web page courseNotes = request.POST.get('courseNotes') courseId = request.POST.get('courseId') # Create a cursor to execute raw SQL queries. with connection.cursor() as cursor: cursor.execute('UPDATE courses ' 'SET notes = %s ' 'WHERE course_id = %s', [courseNotes, courseId]) # Render the response to the user
44.8
154
0.595145
0
0
0
0
2,803
0.782087
0
0
1,060
0.295759
a3e19235aa28103e4d3ebb91450083d6b6b9fdab
5,898
py
Python
botstory/middlewares/text/text_test.py
botstory/bot-story
9c5b2fc7f7a14dbd467d70f60d5ba855ef89dac3
[ "MIT" ]
5
2017-01-14T13:42:13.000Z
2021-07-27T21:52:04.000Z
botstory/middlewares/text/text_test.py
botstory/bot-story
9c5b2fc7f7a14dbd467d70f60d5ba855ef89dac3
[ "MIT" ]
235
2016-11-07T23:33:28.000Z
2018-03-13T11:27:33.000Z
botstory/middlewares/text/text_test.py
hyzhak/bot-story
9c5b2fc7f7a14dbd467d70f60d5ba855ef89dac3
[ "MIT" ]
5
2017-01-14T13:42:14.000Z
2020-11-06T08:33:20.000Z
import logging import pytest import re from . import text from ... import matchers from ...utils import answer, SimpleTrigger logger = logging.getLogger(__name__) @pytest.mark.asyncio async def test_should_run_story_on_equal_message(): trigger = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on('hi there!') def one_story(): @story.part() def then(ctx): trigger.passed() await talk.pure_text('hi there!') assert trigger.is_triggered @pytest.mark.asyncio async def test_should_not_run_story_on_non_equal_message(): trigger = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on('hi there!') def one_story(): @story.part() def then(ctx): trigger.passed() await talk.pure_text('buy!') assert not trigger.is_triggered @pytest.mark.asyncio async def test_should_catch_any_text_message(): trigger = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.Any()) def one_story(): @story.part() def then(ctx): trigger.passed() await talk.pure_text('hi there!') assert trigger.is_triggered @pytest.mark.asyncio async def test_should_ignore_any_non_text_message(): trigger = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.Any()) def one_story(): @story.part() def then(ctx): trigger.passed() await talk.location('some where') assert not trigger.is_triggered def test_serialize_text_any(): m_old = text.Any() m_new = matchers.deserialize(matchers.serialize(m_old)) assert isinstance(m_new, text.Any) @pytest.mark.asyncio async def test_should_catch_equal_text_message(): trigger_hi_there = SimpleTrigger() trigger_see_you = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.Equal('hi there!')) def first_story(): @story.part() def then(ctx): trigger_hi_there.passed() @story.on(text.Equal('see you!')) def second_story(): @story.part() def then(ctx): trigger_see_you.passed() await talk.pure_text('see you!') assert not trigger_hi_there.is_triggered assert trigger_see_you.is_triggered def test_equal_handle_should_create_right_type(): assert isinstance(text.Equal.handle(''), text.Equal) def test_serialize_text_equal(): m_old = text.Equal('hats off') m_new = matchers.deserialize(matchers.serialize(m_old)) assert isinstance(m_new, text.Equal) assert m_new.test_string == 'hats off' @pytest.mark.asyncio async def test_should_catch_equal_text_message_case_in_sensitive(): trigger_hi_there = SimpleTrigger() trigger_see_you = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.EqualCaseIgnore('hi there!')) def first_story(): @story.part() def then(ctx): trigger_hi_there.passed() @story.on(text.EqualCaseIgnore('see you!')) def second_story(): @story.part() def then(ctx): trigger_see_you.passed() await talk.pure_text('See You!') assert not trigger_hi_there.is_triggered assert trigger_see_you.is_triggered def test_serialize_text_equal_case_ignore(): m_old = text.EqualCaseIgnore('hats off') m_new = matchers.deserialize(matchers.serialize(m_old)) assert isinstance(m_new, text.EqualCaseIgnore) assert m_new.test_string == 'hats off' @pytest.mark.asyncio async def test_should_catch_text_message_that_match_regex(): trigger_buy = SimpleTrigger() trigger_sell = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.Match('buy (.*)btc')) def one_story(): @story.part() def then(ctx): trigger_buy.receive(text.get_text(ctx)['matches'][0]) @story.on(text.Match('sell (.*)btc')) def another_story(): @story.part() def then(ctx): trigger_sell.receive(text.get_text(ctx)['matches'][0]) await talk.pure_text('buy 700btc') await talk.pure_text('sell 600btc') assert trigger_buy.result() == '700' assert trigger_sell.result() == '600' @pytest.mark.asyncio async def test_should_catch_text_message_that_match_regex_with_flags(): trigger_destination = SimpleTrigger() with answer.Talk() as talk: story = talk.story @story.on(text.Match('going to (.*)', re.IGNORECASE)) def one_story(): @story.part() def then(ctx): logger.debug('ctx') logger.debug(ctx) trigger_destination.receive(text.get_text(ctx)['matches'][0]) await talk.pure_text('Going to Pripyat') assert trigger_destination.result() == 'Pripyat' @pytest.mark.asyncio async def test_should_not_fail_on_empty_message(): with answer.Talk() as talk: story = talk.story @story.on(text.Match('going to (.*)', re.IGNORECASE)) def one_story(): @story.part() def then(ctx): pass await talk.ask(None) def test_serialize_text_match(): m_old = text.Match('hello (.*)', re.IGNORECASE) m_new = matchers.deserialize(matchers.serialize(m_old)) assert isinstance(m_new, text.Match) assert m_new.matcher.match('Hello Piter!') def test_text_qual_should_handle_text(): assert isinstance(matchers.get_validator('just pure text'), text.Equal)
25.755459
77
0.623432
0
0
0
0
4,629
0.784842
4,440
0.752798
359
0.060868
a3e2215b6ec560d3033ce187558d53690b59cd03
33,955
py
Python
pywikibot/site/_datasite.py
xqt/pwb
9a4fe27138f32952e533256195849d05855df0b0
[ "MIT" ]
null
null
null
pywikibot/site/_datasite.py
xqt/pwb
9a4fe27138f32952e533256195849d05855df0b0
[ "MIT" ]
1
2021-12-08T16:29:41.000Z
2021-12-08T16:29:41.000Z
pywikibot/site/_datasite.py
xqt/pwb
9a4fe27138f32952e533256195849d05855df0b0
[ "MIT" ]
2
2022-01-04T04:10:38.000Z
2022-01-04T04:18:18.000Z
"""Objects representing API interface to Wikibase site.""" # # (C) Pywikibot team, 2012-2022 # # Distributed under the terms of the MIT license. # import datetime import json import uuid from contextlib import suppress from typing import Optional from warnings import warn import pywikibot from pywikibot.data import api from pywikibot.exceptions import ( APIError, EntityTypeUnknownError, IsRedirectPageError, NoPageError, NoWikibaseEntityError, ) from pywikibot.site._apisite import APISite from pywikibot.site._decorators import need_extension, need_right, need_version from pywikibot.tools import itergroup, merge_unique_dicts, remove_last_args __all__ = ('DataSite', ) class DataSite(APISite): """Wikibase data capable site.""" def __init__(self, *args, **kwargs) -> None: """Initializer.""" super().__init__(*args, **kwargs) self._item_namespace = None self._property_namespace = None self._type_to_class = { 'item': pywikibot.ItemPage, 'property': pywikibot.PropertyPage, 'mediainfo': pywikibot.MediaInfo, 'lexeme': pywikibot.LexemePage, 'form': pywikibot.LexemeForm, 'sense': pywikibot.LexemeSense, } def _cache_entity_namespaces(self) -> None: """Find namespaces for each known wikibase entity type.""" self._entity_namespaces = {} for entity_type in self._type_to_class: for namespace in self.namespaces.values(): if not hasattr(namespace, 'defaultcontentmodel'): continue content_model = namespace.defaultcontentmodel if content_model == ('wikibase-' + entity_type): self._entity_namespaces[entity_type] = namespace break def get_namespace_for_entity_type(self, entity_type): """ Return namespace for given entity type. :return: corresponding namespace :rtype: Namespace """ if not hasattr(self, '_entity_namespaces'): self._cache_entity_namespaces() if entity_type in self._entity_namespaces: return self._entity_namespaces[entity_type] raise EntityTypeUnknownError( '{!r} does not support entity type "{}" ' "or it doesn't have its own namespace" .format(self, entity_type)) @property def item_namespace(self): """ Return namespace for items. :return: item namespace :rtype: Namespace """ if self._item_namespace is None: self._item_namespace = self.get_namespace_for_entity_type('item') return self._item_namespace @property def property_namespace(self): """ Return namespace for properties. :return: property namespace :rtype: Namespace """ if self._property_namespace is None: self._property_namespace = self.get_namespace_for_entity_type( 'property') return self._property_namespace def get_entity_for_entity_id(self, entity_id): """ Return a new instance for given entity id. :raises pywikibot.exceptions.NoWikibaseEntityError: there is no entity with the id :return: a WikibaseEntity subclass :rtype: WikibaseEntity """ for cls in self._type_to_class.values(): if cls.is_valid_id(entity_id): return cls(self, entity_id) entity = pywikibot.page.WikibaseEntity(self, entity_id) raise NoWikibaseEntityError(entity) @property @need_version('1.28-wmf.3') def sparql_endpoint(self): """ Return the sparql endpoint url, if any has been set. :return: sparql endpoint url :rtype: str|None """ return self.siteinfo['general'].get('wikibase-sparql') @property @need_version('1.28-wmf.23') def concept_base_uri(self): """ Return the base uri for concepts/entities. :return: concept base uri :rtype: str """ return self.siteinfo['general']['wikibase-conceptbaseuri'] def geo_shape_repository(self): """Return Site object for the geo-shapes repository e.g. commons.""" url = self.siteinfo['general'].get('wikibase-geoshapestoragebaseurl') if url: return pywikibot.Site(url=url, user=self.username()) return None def tabular_data_repository(self): """Return Site object for the tabular-datas repository e.g. commons.""" url = self.siteinfo['general'].get( 'wikibase-tabulardatastoragebaseurl') if url: return pywikibot.Site(url=url, user=self.username()) return None def loadcontent(self, identification, *props): """ Fetch the current content of a Wikibase item. This is called loadcontent since wbgetentities does not support fetching old revisions. Eventually this will get replaced by an actual loadrevisions. :param identification: Parameters used to identify the page(s) :type identification: dict :param props: the optional properties to fetch. """ params = merge_unique_dicts(identification, action='wbgetentities', # TODO: When props is empty it results in # an empty string ('&props=') but it should # result in a missing entry. props=props if props else False) req = self.simple_request(**params) data = req.submit() if 'success' not in data: raise APIError(data['errors'], '') return data['entities'] def preload_entities(self, pagelist, groupsize: int = 50): """ Yield subclasses of WikibaseEntity's with content prefilled. Note that pages will be iterated in a different order than in the underlying pagelist. :param pagelist: an iterable that yields either WikibaseEntity objects, or Page objects linked to an ItemPage. :param groupsize: how many pages to query at a time """ if not hasattr(self, '_entity_namespaces'): self._cache_entity_namespaces() for sublist in itergroup(pagelist, groupsize): req = {'ids': [], 'titles': [], 'sites': []} for p in sublist: if isinstance(p, pywikibot.page.WikibaseEntity): ident = p._defined_by() for key in ident: req[key].append(ident[key]) else: if p.site == self and p.namespace() in ( self._entity_namespaces.values()): req['ids'].append(p.title(with_ns=False)) else: assert p.site.has_data_repository, \ 'Site must have a data repository' req['sites'].append(p.site.dbName()) req['titles'].append(p._link._text) req = self.simple_request(action='wbgetentities', **req) data = req.submit() for entity in data['entities']: if 'missing' in data['entities'][entity]: continue cls = self._type_to_class[data['entities'][entity]['type']] page = cls(self, entity) # No api call is made because item._content is given page._content = data['entities'][entity] with suppress(IsRedirectPageError): page.get() # cannot provide get_redirect=True (T145971) yield page def getPropertyType(self, prop): """ Obtain the type of a property. This is used specifically because we can cache the value for a much longer time (near infinite). """ params = {'action': 'wbgetentities', 'ids': prop.getID(), 'props': 'datatype'} expiry = datetime.timedelta(days=365 * 100) # Store it for 100 years req = self._request(expiry=expiry, parameters=params) data = req.submit() # the IDs returned from the API can be upper or lowercase, depending # on the version. See bug T55894 for more information. try: dtype = data['entities'][prop.getID()]['datatype'] except KeyError: dtype = data['entities'][prop.getID().lower()]['datatype'] return dtype @need_right('edit') def editEntity(self, entity, data, bot: bool = True, **kwargs): """ Edit entity. Note: This method is unable to create entities other than 'item' if dict with API parameters was passed to 'entity' parameter. :param entity: Page to edit, or dict with API parameters to use for entity identification :type entity: WikibaseEntity or dict :param data: data updates :type data: dict :param bot: Whether to mark the edit as a bot edit :return: New entity data :rtype: dict """ # this changes the reference to a new object data = dict(data) if isinstance(entity, pywikibot.page.WikibaseEntity): params = entity._defined_by(singular=True) if 'id' in params and params['id'] == '-1': del params['id'] if not params: params['new'] = entity.entity_type data_for_new_entity = entity.get_data_for_new_entity() data.update(data_for_new_entity) else: if 'id' in entity and entity['id'] == '-1': del entity['id'] params = dict(entity) if not params: # If no identification was provided params['new'] = 'item' params['action'] = 'wbeditentity' if bot: params['bot'] = 1 if 'baserevid' in kwargs and kwargs['baserevid']: params['baserevid'] = kwargs['baserevid'] params['token'] = self.tokens['edit'] for arg in kwargs: if arg in ['clear', 'summary']: params[arg] = kwargs[arg] elif arg != 'baserevid': warn('Unknown wbeditentity parameter {} ignored'.format(arg), UserWarning, 2) params['data'] = json.dumps(data) req = self.simple_request(**params) return req.submit() @need_right('edit') def addClaim(self, entity, claim, bot: bool = True, summary=None) -> None: """ Add a claim. :param entity: Entity to modify :type entity: WikibaseEntity :param claim: Claim to be added :type claim: pywikibot.Claim :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary :type summary: str """ claim.snak = entity.getID() + '$' + str(uuid.uuid4()) params = {'action': 'wbsetclaim', 'claim': json.dumps(claim.toJSON()), 'baserevid': entity.latest_revision_id, 'summary': summary, 'token': self.tokens['edit'], 'bot': bot, } req = self.simple_request(**params) data = req.submit() # Update the item if claim.getID() in entity.claims: entity.claims[claim.getID()].append(claim) else: entity.claims[claim.getID()] = [claim] entity.latest_revision_id = data['pageinfo']['lastrevid'] @need_right('edit') def changeClaimTarget(self, claim, snaktype: str = 'value', bot: bool = True, summary=None): """ Set the claim target to the value of the provided claim target. :param claim: The source of the claim target value :type claim: pywikibot.Claim :param snaktype: An optional snaktype ('value', 'novalue' or 'somevalue'). Default: 'value' :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary :type summary: str """ if claim.isReference or claim.isQualifier: raise NotImplementedError if not claim.snak: # We need to already have the snak value raise NoPageError(claim) params = {'action': 'wbsetclaimvalue', 'claim': claim.snak, 'snaktype': snaktype, 'summary': summary, 'bot': bot, 'token': self.tokens['edit']} if snaktype == 'value': params['value'] = json.dumps(claim._formatValue()) params['baserevid'] = claim.on_item.latest_revision_id req = self.simple_request(**params) return req.submit() @need_right('edit') def save_claim(self, claim, summary=None, bot: bool = True): """ Save the whole claim to the wikibase site. :param claim: The claim to save :type claim: pywikibot.Claim :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary :type summary: str """ if claim.isReference or claim.isQualifier: raise NotImplementedError if not claim.snak: # We need to already have the snak value raise NoPageError(claim) params = {'action': 'wbsetclaim', 'claim': json.dumps(claim.toJSON()), 'token': self.tokens['edit'], 'baserevid': claim.on_item.latest_revision_id, 'summary': summary, 'bot': bot, } req = self.simple_request(**params) data = req.submit() claim.on_item.latest_revision_id = data['pageinfo']['lastrevid'] return data @need_right('edit') @remove_last_args(['baserevid']) # since 7.0.0 def editSource(self, claim, source, new: bool = False, bot: bool = True, summary: Optional[str] = None): """Create/Edit a source. .. versionchanged:: 7.0 deprecated `baserevid` parameter was removed :param claim: A Claim object to add the source to :type claim: pywikibot.Claim :param source: A Claim object to be used as a source :type source: pywikibot.Claim :param new: Whether to create a new one if the "source" already exists :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary """ if claim.isReference or claim.isQualifier: raise ValueError('The claim cannot have a source.') params = {'action': 'wbsetreference', 'statement': claim.snak, 'baserevid': claim.on_item.latest_revision_id, 'summary': summary, 'bot': bot, 'token': self.tokens['edit']} # build up the snak if isinstance(source, list): sources = source else: sources = [source] snak = {} for sourceclaim in sources: datavalue = sourceclaim._formatDataValue() valuesnaks = snak.get(sourceclaim.getID(), []) valuesnaks.append({ 'snaktype': 'value', 'property': sourceclaim.getID(), 'datavalue': datavalue, }) snak[sourceclaim.getID()] = valuesnaks # set the hash if the source should be changed. # if present, all claims of one source have the same hash if not new and hasattr(sourceclaim, 'hash'): params['reference'] = sourceclaim.hash params['snaks'] = json.dumps(snak) req = self.simple_request(**params) return req.submit() @need_right('edit') @remove_last_args(['baserevid']) # since 7.0.0 def editQualifier(self, claim, qualifier, new: bool = False, bot: bool = True, summary: Optional[str] = None): """Create/Edit a qualifier. .. versionchanged:: 7.0 deprecated `baserevid` parameter was removed :param claim: A Claim object to add the qualifier to :type claim: pywikibot.Claim :param qualifier: A Claim object to be used as a qualifier :type qualifier: pywikibot.Claim :param new: Whether to create a new one if the "qualifier" already exists :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary """ if claim.isReference or claim.isQualifier: raise ValueError('The claim cannot have a qualifier.') params = {'action': 'wbsetqualifier', 'claim': claim.snak, 'baserevid': claim.on_item.latest_revision_id, 'summary': summary, 'bot': bot} if (not new and hasattr(qualifier, 'hash') and qualifier.hash is not None): params['snakhash'] = qualifier.hash params['token'] = self.tokens['edit'] # build up the snak if qualifier.getSnakType() == 'value': params['value'] = json.dumps(qualifier._formatValue()) params['snaktype'] = qualifier.getSnakType() params['property'] = qualifier.getID() req = self.simple_request(**params) return req.submit() @need_right('edit') @remove_last_args(['baserevid']) # since 7.0.0 def removeClaims(self, claims, bot: bool = True, summary: Optional[str] = None): """Remove claims. .. versionchanged:: 7.0 deprecated `baserevid` parameter was removed :param claims: Claims to be removed :type claims: List[pywikibot.Claim] :param bot: Whether to mark the edit as a bot edit :type bot: bool :param summary: Edit summary :type summary: str """ # Check on_item for all additional claims items = {claim.on_item for claim in claims if claim.on_item} assert len(items) == 1 baserevid = items.pop().latest_revision_id params = { 'action': 'wbremoveclaims', 'baserevid': baserevid, 'summary': summary, 'bot': bot, 'claim': '|'.join(claim.snak for claim in claims), 'token': self.tokens['edit'], } req = self.simple_request(**params) return req.submit() @need_right('edit') @remove_last_args(['baserevid']) # since 7.0.0 def removeSources(self, claim, sources, bot: bool = True, summary: Optional[str] = None): """Remove sources. .. versionchanged:: 7.0 deprecated `baserevid` parameter was removed :param claim: A Claim object to remove the sources from :type claim: pywikibot.Claim :param sources: A list of Claim objects that are sources :type sources: list :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary """ params = { 'action': 'wbremovereferences', 'baserevid': claim.on_item.latest_revision_id, 'summary': summary, 'bot': bot, 'statement': claim.snak, 'references': '|'.join(source.hash for source in sources), 'token': self.tokens['edit'], } req = self.simple_request(**params) return req.submit() @need_right('edit') @remove_last_args(['baserevid']) # since 7.0.0 def remove_qualifiers(self, claim, qualifiers, bot: bool = True, summary: Optional[str] = None): """Remove qualifiers. .. versionchanged:: 7.0 deprecated `baserevid` parameter was removed :param claim: A Claim object to remove the qualifier from :type claim: pywikibot.Claim :param qualifiers: Claim objects currently used as a qualifiers :type qualifiers: List[pywikibot.Claim] :param bot: Whether to mark the edit as a bot edit :param summary: Edit summary """ params = { 'action': 'wbremovequalifiers', 'claim': claim.snak, 'baserevid': claim.on_item.latest_revision_id, 'summary': summary, 'bot': bot, 'qualifiers': [qualifier.hash for qualifier in qualifiers], 'token': self.tokens['edit'] } req = self.simple_request(**params) return req.submit() @need_right('edit') def linkTitles(self, page1, page2, bot: bool = True): """ Link two pages together. :param page1: First page to link :type page1: pywikibot.Page :param page2: Second page to link :type page2: pywikibot.Page :param bot: Whether to mark the edit as a bot edit :return: dict API output :rtype: dict """ params = { 'action': 'wblinktitles', 'tosite': page1.site.dbName(), 'totitle': page1.title(), 'fromsite': page2.site.dbName(), 'fromtitle': page2.title(), 'token': self.tokens['edit'] } if bot: params['bot'] = 1 req = self.simple_request(**params) return req.submit() @need_right('item-merge') def mergeItems(self, from_item, to_item, ignore_conflicts=None, summary=None, bot: bool = True): """ Merge two items together. :param from_item: Item to merge from :type from_item: pywikibot.ItemPage :param to_item: Item to merge into :type to_item: pywikibot.ItemPage :param ignore_conflicts: Which type of conflicts ('description', 'sitelink', and 'statement') should be ignored :type ignore_conflicts: list of str :param summary: Edit summary :type summary: str :param bot: Whether to mark the edit as a bot edit :return: dict API output :rtype: dict """ params = { 'action': 'wbmergeitems', 'fromid': from_item.getID(), 'toid': to_item.getID(), 'ignoreconflicts': ignore_conflicts, 'token': self.tokens['edit'], 'summary': summary, } if bot: params['bot'] = 1 req = self.simple_request(**params) return req.submit() @need_right('item-merge') @need_extension('WikibaseLexeme') def mergeLexemes(self, from_lexeme, to_lexeme, summary=None, *, bot: bool = True) -> dict: """ Merge two lexemes together. :param from_lexeme: Lexeme to merge from :type from_lexeme: pywikibot.LexemePage :param to_lexeme: Lexeme to merge into :type to_lexeme: pywikibot.LexemePage :param summary: Edit summary :type summary: str :keyword bot: Whether to mark the edit as a bot edit :return: dict API output """ params = { 'action': 'wblmergelexemes', 'source': from_lexeme.getID(), 'target': to_lexeme.getID(), 'token': self.tokens['edit'], 'summary': summary, } if bot: params['bot'] = 1 req = self.simple_request(**params) data = req.submit() return data @need_right('item-redirect') def set_redirect_target(self, from_item, to_item, bot: bool = True): """ Make a redirect to another item. :param to_item: title of target item. :type to_item: pywikibot.ItemPage :param from_item: Title of the item to be redirected. :type from_item: pywikibot.ItemPage :param bot: Whether to mark the edit as a bot edit """ params = { 'action': 'wbcreateredirect', 'from': from_item.getID(), 'to': to_item.getID(), 'token': self.tokens['edit'], 'bot': bot, } req = self.simple_request(**params) return req.submit() def search_entities(self, search: str, language: str, total: Optional[int] = None, **kwargs): """ Search for pages or properties that contain the given text. :param search: Text to find. :param language: Language to search in. :param total: Maximum number of pages to retrieve in total, or None in case of no limit. :return: 'search' list from API output. :rtype: Generator """ lang_codes = self._paraminfo.parameter('wbsearchentities', 'language')['type'] if language not in lang_codes: raise ValueError('Data site used does not support provided ' 'language.') if 'site' in kwargs: if kwargs['site'].sitename != self.sitename: raise ValueError('The site given in the kwargs is different.') warn('search_entities should not get a site via kwargs.', UserWarning, 2) del kwargs['site'] parameters = dict(search=search, language=language, **kwargs) gen = self._generator(api.APIGenerator, type_arg='wbsearchentities', data_name='search', total=total, parameters=parameters) return gen @need_right('edit') def _wbset_action(self, itemdef, action: str, action_data, **kwargs) -> dict: """ Execute wbset{action} on a Wikibase entity. Supported actions are: wbsetaliases, wbsetdescription, wbsetlabel and wbsetsitelink :param itemdef: Entity to modify or create :type itemdef: str, WikibaseEntity or Page connected to such item :param action: wbset{action} to perform: 'wbsetaliases', 'wbsetdescription', 'wbsetlabel', 'wbsetsitelink' :param action_data: data to be used in API request, see API help :type action_data: SiteLink or dict wbsetaliases: dict shall have the following structure: {'language': value (str), 'add': list of language codes (str), 'remove': list of language codes (str), 'set' list of language codes (str) } 'add' and 'remove' are alternative to 'set' wbsetdescription and wbsetlabel: dict shall have keys 'language', 'value' wbsetsitelink: dict shall have keys 'linksite', 'linktitle' and optionally 'badges' :keyword bot: Whether to mark the edit as a bot edit, default is True :type bot: bool :keyword tags: Change tags to apply with the edit :type tags: list of str :return: query result :raises AssertionError, TypeError """ def format_sitelink(sitelink): """Convert SiteLink to a dict accepted by wbsetsitelink API.""" if isinstance(sitelink, pywikibot.page.SiteLink): _dict = { 'linksite': sitelink._sitekey, 'linktitle': sitelink._rawtitle, 'badges': '|'.join([b.title() for b in sitelink.badges]), } else: _dict = sitelink return _dict def prepare_data(action, data): """Prepare data as expected by API.""" if action == 'wbsetaliases': res = data keys = set(res) assert keys < {'language', 'add', 'remove', 'set'} assert 'language' in keys assert ({'add', 'remove', 'set'} & keys) assert ({'add', 'set'} >= keys) assert ({'remove', 'set'} >= keys) elif action in ('wbsetlabel', 'wbsetdescription'): res = data keys = set(res) assert keys == {'language', 'value'} elif action == 'wbsetsitelink': res = format_sitelink(data) keys = set(res) assert keys >= {'linksite'} assert keys <= {'linksite', 'linktitle', 'badges'} else: raise ValueError('Something has gone wrong ...') return res # Supported actions assert action in ('wbsetaliases', 'wbsetdescription', 'wbsetlabel', 'wbsetsitelink'), \ 'action {} not supported.'.format(action) # prefer ID over (site, title) if isinstance(itemdef, str): itemdef = self.get_entity_for_entity_id(itemdef) elif isinstance(itemdef, pywikibot.Page): itemdef = pywikibot.ItemPage.fromPage(itemdef, lazy_load=True) elif not isinstance(itemdef, pywikibot.page.WikibaseEntity): raise TypeError('itemdef shall be str, WikibaseEntity or Page') params = itemdef._defined_by(singular=True) # TODO: support 'new' baserevid = kwargs.pop( 'baserevid', itemdef.latest_revision_id if 'id' in params else 0 ) params.update( {'baserevid': baserevid, 'action': action, 'token': self.tokens['edit'], 'bot': kwargs.pop('bot', True), }) params.update(prepare_data(action, action_data)) for arg in kwargs: if arg in ['summary', 'tags']: params[arg] = kwargs[arg] else: warn('Unknown parameter {} for action {}, ignored' .format(arg, action), UserWarning, 2) req = self.simple_request(**params) data = req.submit() return data def wbsetaliases(self, itemdef, aliases, **kwargs): """ Set aliases for a single Wikibase entity. See self._wbset_action() for parameters """ return self._wbset_action(itemdef, 'wbsetaliases', aliases, **kwargs) def wbsetdescription(self, itemdef, description, **kwargs): """ Set description for a single Wikibase entity. See self._wbset_action() """ return self._wbset_action(itemdef, 'wbsetdescription', description, **kwargs) def wbsetlabel(self, itemdef, label, **kwargs): """ Set label for a single Wikibase entity. See self._wbset_action() for parameters """ return self._wbset_action(itemdef, 'wbsetlabel', label, **kwargs) def wbsetsitelink(self, itemdef, sitelink, **kwargs): """ Set, remove or modify a sitelink on a Wikibase item. See self._wbset_action() for parameters """ return self._wbset_action(itemdef, 'wbsetsitelink', sitelink, **kwargs) @need_right('edit') @need_extension('WikibaseLexeme') def add_form(self, lexeme, form, *, bot: bool = True, baserevid=None) -> dict: """ Add a form. :param lexeme: Lexeme to modify :type lexeme: pywikibot.LexemePage :param form: Form to be added :type form: pywikibot.LexemeForm :keyword bot: Whether to mark the edit as a bot edit :keyword baserevid: Base revision id override, used to detect conflicts. :type baserevid: long """ params = { 'action': 'wbladdform', 'lexemeId': lexeme.getID(), 'data': json.dumps(form.toJSON()), 'bot': bot, 'token': self.tokens['edit'], } if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) data = req.submit() return data @need_right('edit') @need_extension('WikibaseLexeme') def remove_form(self, form, *, bot: bool = True, baserevid=None) -> dict: """ Remove a form. :param form: Form to be removed :type form: pywikibot.LexemeForm :keyword bot: Whether to mark the edit as a bot edit :keyword baserevid: Base revision id override, used to detect conflicts. :type baserevid: long """ params = { 'action': 'wblremoveform', 'id': form.getID(), 'bot': bot, 'token': self.tokens['edit'], } if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) data = req.submit() return data @need_right('edit') @need_extension('WikibaseLexeme') def edit_form_elements(self, form, data, *, bot: bool = True, baserevid=None) -> dict: """ Edit lexeme form elements. :param form: Form :type form: pywikibot.LexemeForm :param data: data updates :type data: dict :keyword bot: Whether to mark the edit as a bot edit :keyword baserevid: Base revision id override, used to detect conflicts. :type baserevid: long :return: New form data """ params = { 'action': 'wbleditformelements', 'formId': form.getID(), 'data': json.dumps(data), 'bot': bot, 'token': self.tokens['edit'], } if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) data = req.submit() return data
36.160809
79
0.560389
33,255
0.979384
2,012
0.059255
23,907
0.704079
0
0
15,047
0.443145
a3e3eb5e33cc147796a90e6e65542a513c75576b
1,210
py
Python
app.py
MisaelVillaverde/fourier-calculator
fd50cd292e333c1a9d75e93962a0aaa0985ecef9
[ "MIT" ]
null
null
null
app.py
MisaelVillaverde/fourier-calculator
fd50cd292e333c1a9d75e93962a0aaa0985ecef9
[ "MIT" ]
1
2021-11-07T04:40:13.000Z
2021-11-07T04:40:13.000Z
app.py
MisaelVillaverde/fourier-calculator
fd50cd292e333c1a9d75e93962a0aaa0985ecef9
[ "MIT" ]
null
null
null
from flask import Flask from flask import render_template, request from flask import jsonify import requests import json app = Flask(__name__) @app.route("/symbo",methods=['POST']) def symbo(): #import pdb; pdb.set_trace() session = requests.session() token = session.get("https://es.symbolab.com/solver/step-by-step/x%5E%7B2%7D?or=input").cookies.get_dict()["sy2.pub.token"] query = request.json["expression"] #response = json.loads(session.get(f"https://es.symbolab.com/pub_api/steps?subscribed=true&origin=input&language=es&query=%5Cint+tcos%5Cleft(nt%5Cright)dt+&referer=https%3A%2F%2Fes.symbolab.com%2Fsolver%2Fstep-by-step%2F%255Cint_%257B%2520%257Dtcos%255Cleft(nt%255Cright)dt%2520%3For%3Dinput&plotRequest=PlotOptional&page=step-by-step",headers={ response = json.loads(session.get(f"https://es.symbolab.com/pub_api/steps?subscribed=true&origin=input&language=es&query={query}",headers={ "x-requested-with":"XMLHttpRequest", "authorization":f"Bearer {token}" }).content) return { "dym":response["dym"], "solutions":response["solutions"] } @app.route('/') def hello(): return render_template('index.html') app.run(debug=True)
41.724138
349
0.717355
0
0
0
0
1,041
0.860331
0
0
688
0.568595
a3e55a939b6d954bcaed4fd506083967468d2eb3
1,584
py
Python
my_code/Chapter_2.py
kalona/Spark-The-Definitive-Guide
0b495c4710b2030aa59d5a7f4053ee0a8345d0d8
[ "Apache-2.0" ]
2
2022-01-02T14:24:29.000Z
2022-01-02T15:54:47.000Z
my_code/Chapter_2.py
kalona/Spark-The-Definitive-Guide
0b495c4710b2030aa59d5a7f4053ee0a8345d0d8
[ "Apache-2.0" ]
null
null
null
my_code/Chapter_2.py
kalona/Spark-The-Definitive-Guide
0b495c4710b2030aa59d5a7f4053ee0a8345d0d8
[ "Apache-2.0" ]
null
null
null
from pyspark.sql import SparkSession # spark = SparkSession.builder.master("local[*]").getOrCreate() spark = SparkSession.builder.getOrCreate() file_path = "C:\home_work\local_github\Spark-The-Definitive-Guide\data\/flight-data\csv\/2015-summary.csv" # COMMAND ---------- # COMMAND ---------- flightData2015 = spark\ .read\ .option("inferSchema", "true")\ .option("header", "true")\ .csv("./data/flight-data/csv/2015-summary.csv") # COMMAND ---------- flightData2015.createOrReplaceTempView("flight_data_2015") # COMMAND ---------- sqlWay = spark.sql(""" SELECT DEST_COUNTRY_NAME, count(1) FROM flight_data_2015 GROUP BY DEST_COUNTRY_NAME """) dataFrameWay = flightData2015\ .groupBy("DEST_COUNTRY_NAME")\ .count() sqlWay.explain() dataFrameWay.explain() # COMMAND ---------- from pyspark.sql.functions import max, col # flightData2015.select(max(col("count"))).show(1) # COMMAND ---------- maxSql = spark.sql(""" SELECT DEST_COUNTRY_NAME, sum(count) as destination_total FROM flight_data_2015 GROUP BY DEST_COUNTRY_NAME ORDER BY sum(count) DESC LIMIT 5 """) maxSql.show() # COMMAND ---------- from pyspark.sql.functions import desc flightData2015\ .groupBy("DEST_COUNTRY_NAME")\ .sum("count")\ .withColumnRenamed("sum(count)", "destination_total")\ .sort(desc("destination_total"))\ .limit(5)\ .show() # COMMAND ---------- flightData2015\ .groupBy("DEST_COUNTRY_NAME")\ .sum("count")\ .withColumnRenamed("sum(count)", "destination_total")\ .sort(desc("destination_total"))\ .limit(5)\ .explain() # COMMAND ----------
18.418605
106
0.680556
0
0
0
0
0
0
0
0
846
0.534091
a3e58f9e7062eea97241b4b05b8e709ab53b50c3
7,508
py
Python
tests/test_intake_postgres.py
ContinuumIO/intake-postgres
fda7f7b2b6255544ea7ffd365a4ac8b2655fd226
[ "BSD-2-Clause" ]
2
2018-11-26T00:14:10.000Z
2018-12-21T01:52:44.000Z
tests/test_intake_postgres.py
ContinuumIO/intake-postgres
fda7f7b2b6255544ea7ffd365a4ac8b2655fd226
[ "BSD-2-Clause" ]
1
2018-12-20T08:41:05.000Z
2018-12-21T15:00:08.000Z
tests/test_intake_postgres.py
ContinuumIO/intake-postgres
fda7f7b2b6255544ea7ffd365a4ac8b2655fd226
[ "BSD-2-Clause" ]
3
2018-12-19T08:34:14.000Z
2019-01-24T07:58:32.000Z
import os import pickle import pytest import pandas as pd from shapely import wkt from intake_postgres import PostgresSource from intake import open_catalog from .util import verify_datasource_interface TEST_DATA_DIR = 'tests' TEST_DATA = [ ('sample1', 'sample1.csv'), ('sample2_1', 'sample2_1.csv'), ('sample2_2', 'sample2_2.csv'), ] TEST_GIS_DATA = [ ('points', 'sample_points.psql'), ('multipoints', 'sample_multipoints.psql'), ('lines', 'sample_lines.psql'), ('multilines', 'sample_multilines.psql'), ('polygons', 'sample_polygons.psql'), ('multipolygons', 'sample_multipolygons.psql'), # ('triangles', 'sample_triangles.psql'), ] TEST_TEMPLATE_DATA = [ 'jinja2_params_with_env', ] @pytest.fixture(scope='module') def engine(): """Start docker container for PostgreSQL database, yield a tuple (engine, metadata), and cleanup connection afterward.""" from .util import start_postgres, stop_postgres from sqlalchemy import create_engine stop_postgres(let_fail=True) local_port = start_postgres() uri = 'postgresql://postgres@localhost:{}/postgres'.format(local_port) engine = create_engine(uri) for table_name, csv_fname in TEST_DATA: csv_fpath = os.path.join(TEST_DATA_DIR, csv_fname) df = pd.read_csv(csv_fpath) df.to_sql(table_name, engine, index=False) for table_name, psql_fname in TEST_GIS_DATA: psql_fpath = os.path.join(TEST_DATA_DIR, psql_fname) with engine.connect() as conn: with open(psql_fpath, 'r') as fp: cmds = fp.read().strip().split(';') for cmd in cmds: if cmd.strip(): conn.execute(' '.join(cmd.split())) try: yield engine finally: stop_postgres() @pytest.mark.parametrize('table_name,_', TEST_DATA) def test_open(engine, table_name, _): d = PostgresSource(str(engine.url), 'select * from '+table_name) assert d.container == 'dataframe' assert d.description is None verify_datasource_interface(d) @pytest.mark.parametrize('table_name,csv_fpath', TEST_DATA) def test_discover(engine, table_name, csv_fpath): expected_df = pd.read_csv(os.path.join(TEST_DATA_DIR, csv_fpath)) source = PostgresSource(str(engine.url), 'select * from '+table_name) info = source.discover() dt = {k: str(v) for k, v in expected_df.dtypes.to_dict().items()} assert info['dtype'] == dt assert info['shape'] == (None, 3) assert info['npartitions'] == 1 @pytest.mark.parametrize('table_name,csv_fpath', TEST_DATA) def test_read(engine, table_name, csv_fpath): expected_df = pd.read_csv(os.path.join(TEST_DATA_DIR, csv_fpath)) source = PostgresSource(str(engine.url), 'select * from '+table_name) df = source.read() assert expected_df.equals(df) @pytest.mark.parametrize('table_name,csv_fpath', TEST_DATA) def test_discover_after_read(engine, table_name, csv_fpath): """Assert that after reading the dataframe, discover() shows more accurate information. """ expected_df = pd.read_csv(os.path.join(TEST_DATA_DIR, csv_fpath)) source = PostgresSource(str(engine.url), 'select * from '+table_name) info = source.discover() dt = {k: str(v) for k, v in expected_df.dtypes.to_dict().items()} assert info['dtype'] == dt assert info['shape'] == (None, 3) assert info['npartitions'] == 1 df = source.read() assert expected_df.equals(df) info = source.discover() assert info['dtype'] == dt assert info['shape'] == (4, 3) assert info['npartitions'] == 1 assert expected_df.equals(df) @pytest.mark.parametrize('table_name,csv_fpath', TEST_DATA) def test_close(engine, table_name, csv_fpath): expected_df = pd.read_csv(os.path.join(TEST_DATA_DIR, csv_fpath)) source = PostgresSource(str(engine.url), 'select * from '+table_name) source.close() # Can reopen after close df = source.read() assert expected_df.equals(df) @pytest.mark.parametrize('table_name,csv_fpath', TEST_DATA) def test_pickle(engine, table_name, csv_fpath): source = PostgresSource(str(engine.url), 'select * from '+table_name) pickled_source = pickle.dumps(source) source_clone = pickle.loads(pickled_source) expected_df = source.read() df = source_clone.read() assert expected_df.equals(df) @pytest.mark.parametrize('table_name,_1', TEST_DATA) def test_catalog(engine, table_name, _1): catalog_fpath = os.path.join(TEST_DATA_DIR, 'catalog1.yml') catalog = open_catalog(catalog_fpath) ds_name = table_name.rsplit('_idx', 1)[0] src = catalog[ds_name] pgsrc = src.get() pgsrc._uri = str(engine.url) assert src.describe()['container'] == 'dataframe' assert src.describe_open()['plugin'] == 'postgres' assert src.describe_open()['args']['sql_expr'][:6] in ('select', 'SELECT') metadata = pgsrc.discover() assert metadata['npartitions'] == 1 expected_df = pd.read_sql_query(pgsrc._sql_expr, engine) df = pgsrc.read() assert expected_df.equals(df) pgsrc.close() def test_catalog_join(engine): catalog_fpath = os.path.join(TEST_DATA_DIR, 'catalog1.yml') catalog = open_catalog(catalog_fpath) ds_name = 'sample2' src = catalog[ds_name] pgsrc = src.get() pgsrc._uri = str(engine.url) assert src.describe()['container'] == 'dataframe' assert src.describe_open()['plugin'] == 'postgres' assert src.describe_open()['args']['sql_expr'][:6] in ('select', 'SELECT') metadata = pgsrc.discover() assert metadata['npartitions'] == 1 expected_df = pd.read_sql_query(pgsrc._sql_expr, engine) df = pgsrc.read() assert expected_df.equals(df) pgsrc.close() @pytest.mark.parametrize('table_name,_1', TEST_GIS_DATA) def test_postgis_data(engine, table_name, _1): from sqlalchemy import MetaData catalog_fpath = os.path.join(TEST_DATA_DIR, 'catalog1.yml') catalog = open_catalog(catalog_fpath) ds_name = table_name src = catalog[ds_name] pgsrc = src.get() pgsrc._uri = str(engine.url) assert src.describe()['container'] == 'dataframe' assert src.describe_open()['plugin'] == 'postgres' assert src.describe_open()['args']['sql_expr'][:6] in ('select', 'SELECT') metadata = pgsrc.discover() assert metadata['npartitions'] == 1 meta = MetaData() meta.reflect(bind=engine) col_exprs = ['ST_AsText({0}) as {0}'.format(col.name) for col in meta.tables[table_name].columns] _query = pgsrc._sql_expr.replace('*', ', '.join(col_exprs)) expected_df = pd.read_sql_query(_query, engine).applymap( lambda geom: str(wkt.loads(geom)) ) df = pgsrc.read().applymap(lambda geom: str(wkt.loads(geom))) assert expected_df.equals(df) pgsrc.close() @pytest.mark.parametrize('ds_name', TEST_TEMPLATE_DATA) def test_jinja2(engine, ds_name): catalog_fpath = os.path.join(TEST_DATA_DIR, 'catalog1.yml') catalog = open_catalog(catalog_fpath) src = catalog[ds_name] pgsrc = src.get() pgsrc._uri = str(engine.url) assert src.describe()['container'] == 'dataframe' assert src.describe_open()['plugin'] == 'postgres' assert src.describe_open()['args']['sql_expr'][:6] in ('select', 'SELECT') metadata = pgsrc.discover() assert metadata['npartitions'] == 1 expected_df = pd.read_sql_query(pgsrc._sql_expr, engine) df = pgsrc.read() assert expected_df.equals(df) pgsrc.close()
31.546218
78
0.678876
0
0
1,033
0.137587
6,100
0.812467
0
0
1,450
0.193127
a3e6e2cb9c18b7306bf960a8fcbaf212c1159394
351
py
Python
Module_3/testImage.py
dks1018/CoffeeShopCoding
13ac1700673c86c601eb2758570920620a956e4c
[ "ADSL" ]
null
null
null
Module_3/testImage.py
dks1018/CoffeeShopCoding
13ac1700673c86c601eb2758570920620a956e4c
[ "ADSL" ]
null
null
null
Module_3/testImage.py
dks1018/CoffeeShopCoding
13ac1700673c86c601eb2758570920620a956e4c
[ "ADSL" ]
null
null
null
# file = open('C:\\Users\\dks10\\OneDrive\\Desktop\\Projects\\Code\\Python\\PythonCrypto\\Module_3\\eye.png', 'rb') file = open('encrypt_eye.png', 'rb') image = file.read() file.close() image = bytearray(image) key = 48 for index, value in enumerate(image): image[index] = value^key file = open('2eye.png','wb') file.write(image) file.close()
21.9375
115
0.675214
0
0
0
0
0
0
0
0
150
0.42735
a3e8a92c23b5ddc471c49e37f3c8dc3fb274d2ab
1,702
py
Python
ledfxcontroller/effects/temporal.py
Aircoookie/LedFx
95628fc237497dd89aaf30fdbf88f780f3330166
[ "MIT" ]
17
2018-08-31T05:51:09.000Z
2022-02-12T15:41:33.000Z
ledfxcontroller/effects/temporal.py
Aircoookie/LedFx
95628fc237497dd89aaf30fdbf88f780f3330166
[ "MIT" ]
null
null
null
ledfxcontroller/effects/temporal.py
Aircoookie/LedFx
95628fc237497dd89aaf30fdbf88f780f3330166
[ "MIT" ]
5
2019-07-15T22:12:45.000Z
2022-02-05T10:50:44.000Z
import time import logging from ledfxcontroller.effects import Effect from threading import Thread import voluptuous as vol _LOGGER = logging.getLogger(__name__) DEFAULT_RATE = 1.0 / 60.0 @Effect.no_registration class TemporalEffect(Effect): _thread_active = False _thread = None CONFIG_SCHEMA = vol.Schema({ vol.Required('speed', default = 1.0): float }) def thread_function(self): while self._thread_active: startTime = time.time() # Treat the return value of the effect loop as a speed modifier # such that effects that are nartually faster or slower can have # a consistent feel. sleepInterval = self.effect_loop() if sleepInterval is None: sleepInterval = 1.0 sleepInterval = sleepInterval * DEFAULT_RATE # Calculate the time to sleep accounting for potential heavy # frame assembly operations timeToSleep = (sleepInterval / self._config['speed']) - (time.time() - startTime) if timeToSleep > 0: time.sleep(timeToSleep) def effect_loop(self): """ Triggered periodically based on the effect speed and any additional effect modifiers """ pass def activate(self, pixel_count): super().activate(pixel_count) self._thread_active = True self._thread = Thread(target = self.thread_function) self._thread.start() def deactivate(self): if self._thread_active: self._thread_active = False self._thread.join() self._thread = None super().deactivate()
28.847458
93
0.621622
1,487
0.873678
0
0
1,511
0.887779
0
0
365
0.214454
a3eb6e2df01a9295d0fd4c9d2d237ab568ea9c17
62
py
Python
07/c/3 - Square Census.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
07/c/3 - Square Census.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
07/c/3 - Square Census.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
n=int(input()) c = 1 while c**2 < n: print(c**2) c += 1
10.333333
15
0.451613
0
0
0
0
0
0
0
0
0
0
a3ec4aae5421f3c1473f18af462a1b949c04b4de
1,796
py
Python
utils.py
LuChang-CS/sherbet
d1061aca108eab8e0ccbd2202460e25261fdf1d5
[ "Apache-2.0" ]
2
2022-01-26T05:38:04.000Z
2022-03-20T08:54:18.000Z
utils.py
LuChang-CS/sherbet
d1061aca108eab8e0ccbd2202460e25261fdf1d5
[ "Apache-2.0" ]
null
null
null
utils.py
LuChang-CS/sherbet
d1061aca108eab8e0ccbd2202460e25261fdf1d5
[ "Apache-2.0" ]
null
null
null
import numpy as np class DataGenerator: def __init__(self, inputs, shuffle=True, batch_size=32): assert len(inputs) > 0 self.inputs = inputs self.idx = np.arange(len(inputs[0])) self.shuffle = shuffle self.batch_size = batch_size self.on_epoch_end() def data_length(self): return len(self.idx) def __len__(self): n = self.data_length() len_ = n // self.batch_size return len_ if n % self.batch_size == 0 else len_ + 1 def __getitem__(self, index): start = index * self.batch_size end = start + self.batch_size index = self.idx[start:end] data = [] for x in self.inputs: data.append(x[index]) return data def on_epoch_end(self): if self.shuffle: np.random.shuffle(self.idx) def set_batch_size(self, batch_size): self.batch_size = batch_size def lr_decay(total_epoch, init_lr, split_val): lr_map = [init_lr] * total_epoch if len(split_val) > 0: assert split_val[0][0] > 1 assert split_val[-1][0] <= total_epoch current_split_index = 0 current_lr = init_lr next_epoch, next_lr = split_val[current_split_index] for i in range(total_epoch): if i < next_epoch - 1: lr_map[i] = current_lr else: current_lr = next_lr lr_map[i] = current_lr current_split_index += 1 if current_split_index >= len(split_val): next_epoch = total_epoch + 1 else: next_epoch, next_lr = split_val[current_split_index] def lr_schedule_fn(epoch, lr): return lr_map[epoch] return lr_schedule_fn
28.967742
72
0.577394
917
0.510579
0
0
0
0
0
0
0
0
a3ec779913e7a7957725c231bcea5cdaa55dcfbf
810
py
Python
Version1_STI.py
sudhanshu55/Speech_to_Image
7a047725b3167cfcb7a68004b3c35b2ece75fde4
[ "MIT" ]
null
null
null
Version1_STI.py
sudhanshu55/Speech_to_Image
7a047725b3167cfcb7a68004b3c35b2ece75fde4
[ "MIT" ]
null
null
null
Version1_STI.py
sudhanshu55/Speech_to_Image
7a047725b3167cfcb7a68004b3c35b2ece75fde4
[ "MIT" ]
null
null
null
from nltk.tokenize import sent_tokenize, word_tokenize from nltk.corpus import stopwords import speech_recognition as sr import nltk from google_images_download import google_images_download response = google_images_download.googleimagesdownload() r = sr.Recognizer() with sr.Microphone() as source: print("Say something!") audio = r.listen(source) data = r.recognize_google(audio).encode("utf-8") print (data) stopWords = set(stopwords.words('english')) words = word_tokenize(data) wordsFiltered = [] for w in words: if w not in stopWords: wordsFiltered.append(w) into_string = str(wordsFiltered) print(into_string) arguments = {"keywords":into_string,"limit":2,"print_urls":True} #creating list of arguments response.download(arguments) #passing the arguments to the function
32.4
94
0.769136
0
0
0
0
0
0
0
0
126
0.155556
a3ee75515f2face15871213c79bd272768d90d2f
231
py
Python
src/models.py
jonathanlloyd/scratchstack-httpserver
72f9bb5b1673b132786d94c017dbf2d370886b79
[ "MIT" ]
null
null
null
src/models.py
jonathanlloyd/scratchstack-httpserver
72f9bb5b1673b132786d94c017dbf2d370886b79
[ "MIT" ]
null
null
null
src/models.py
jonathanlloyd/scratchstack-httpserver
72f9bb5b1673b132786d94c017dbf2d370886b79
[ "MIT" ]
null
null
null
from dataclasses import dataclass @dataclass class Request: method: str path: str headers: dict body: bytes @dataclass class Response: status_code: int reason_phrase: str headers: dict body: bytes
14.4375
33
0.688312
171
0.74026
0
0
193
0.835498
0
0
0
0
a3ee8470edc038ce5afdd46d3446170b34e86c08
6,596
py
Python
hoomd/communicator.py
EdwardZX/hoomd-blue
c87ac3f136534e8a80359a2faceeb730f445da21
[ "BSD-3-Clause" ]
204
2018-11-26T21:15:14.000Z
2022-03-31T17:17:21.000Z
hoomd/communicator.py
EdwardZX/hoomd-blue
c87ac3f136534e8a80359a2faceeb730f445da21
[ "BSD-3-Clause" ]
769
2019-02-15T08:58:04.000Z
2022-03-31T17:36:48.000Z
hoomd/communicator.py
YMWani/hoomd-blue
e574b49f0c2c6df3a1eac9cbb86fe612f1ee4c18
[ "BSD-3-Clause" ]
91
2018-10-04T21:07:46.000Z
2022-03-26T02:44:11.000Z
# Copyright (c) 2009-2021 The Regents of the University of Michigan # This file is part of the HOOMD-blue project, released under the BSD 3-Clause # License. """MPI communicator.""" from hoomd import _hoomd import hoomd import contextlib class Communicator(object): """MPI communicator. Args: mpi_comm: Accepts an mpi4py communicator. Use this argument to perform many independent hoomd simulations where you communicate between those simulations using mpi4py. ranks_per_partition (int): (MPI) Number of ranks to include in a partition. `Communicator` initialize MPI communications for a `hoomd.Simulation`. To use MPI, launch your Python script with an MPI launcher (e.g. ``mpirun`` or ``mpiexec``). By default, `Communicator` uses all ranks provided by the launcher ``num_launch_ranks`` for a single `hoomd.Simulation` object which decomposes the state onto that many domains. Set ``ranks_per_partition`` to an integer to partition launched ranks into ``num_launch_ranks / ranks_per_partition`` communicators, each with their own `partition` index. Use this to perform many simulations in parallel, for example by using `partition` as an index into an array of state points to execute. """ def __init__(self, mpi_comm=None, ranks_per_partition=None): # check ranks_per_partition if ranks_per_partition is not None: if not hoomd.version.mpi_enabled: raise RuntimeError( "The ranks_per_partition option is only available in MPI.\n" ) mpi_available = hoomd.version.mpi_enabled self.cpp_mpi_conf = None # create the specified configuration if mpi_comm is None: self.cpp_mpi_conf = _hoomd.MPIConfiguration() else: if not mpi_available: raise RuntimeError("mpi_comm is not supported in serial builds") handled = False # pass in pointer to MPI_Comm object provided by mpi4py try: import mpi4py if isinstance(mpi_comm, mpi4py.MPI.Comm): addr = mpi4py.MPI._addressof(mpi_comm) self.cpp_mpi_conf = \ _hoomd.MPIConfiguration._make_mpi_conf_mpi_comm(addr) handled = True except ImportError: # silently ignore when mpi4py is missing pass # undocumented case: handle plain integers as pointers to MPI_Comm # objects if not handled and isinstance(mpi_comm, int): self.cpp_mpi_conf = \ _hoomd.MPIConfiguration._make_mpi_conf_mpi_comm(mpi_comm) handled = True if not handled: raise RuntimeError( "Invalid mpi_comm object: {}".format(mpi_comm)) if ranks_per_partition is not None: # check validity if (self.cpp_mpi_conf.getNRanksGlobal() % ranks_per_partition): raise RuntimeError('Total number of ranks is not a multiple of ' 'ranks_per_partition.') # split the communicator into partitions self.cpp_mpi_conf.splitPartitions(ranks_per_partition) @property def num_ranks(self): """int: The number of ranks in this partition. When initialized with ``ranks_per_partition=None``, `num_ranks` is equal to the ``num_launch_ranks`` set by the MPI launcher. When using partitions, `num_ranks` is equal to ``ranks_per_partition``. Note: Returns 1 in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: return self.cpp_mpi_conf.getNRanks() else: return 1 @property def rank(self): """int: The current rank within the partition. Note: Returns 0 in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: return self.cpp_mpi_conf.getRank() else: return 0 @property def num_partitions(self): """int: The number of partitions in this execution. Create partitions with the ``ranks_per_partition`` argument on initialization. Then, the number of partitions is ``num_launch_ranks / ranks_per_partition``. Note: Returns 1 in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: return self.cpp_mpi_conf.getNPartitions() else: return 1 @property def partition(self): """int: The current partition. Note: Returns 0 in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: return self.cpp_mpi_conf.getPartition() else: return 0 def barrier_all(self): """Perform a MPI barrier synchronization across all ranks. Note: Does nothing in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: _hoomd.mpi_barrier_world() def barrier(self): """Perform a barrier synchronization across all ranks in the partition. Note: Does nothing in builds with ENABLE_MPI=off. """ if hoomd.version.mpi_enabled: self.cpp_mpi_conf.barrier() @contextlib.contextmanager def localize_abort(self): """Localize MPI_Abort to this partition. HOOMD calls ``MPI_Abort`` to tear down all running MPI processes whenever there is an uncaught exception. By default, this will abort the entire MPI execution. When using partitions, an uncaught exception on one partition will therefore abort all of them. Use the return value of :py:meth:`localize_abort()` as a context manager to tell HOOMD that all operations within the context will use only that MPI communicator so that an uncaught exception in one partition will only abort that partition and leave the others running. """ global _current_communicator prev = _current_communicator _current_communicator = self yield None _current_communicator = prev # store the "current" communicator to be used for MPI_Abort calls. This defaults # to the world communicator, but users can opt in to a more specific # communicator using the Device.localize_abort context manager _current_communicator = Communicator()
34.715789
80
0.632656
6,098
0.9245
846
0.12826
2,441
0.370073
0
0
3,736
0.566404
a3f0553d759d16377520becb3fec3c845edf0c9f
464
py
Python
src/affinity-propagation/generate_data.py
dominc8/affinity-propagation
b91b18b52eb68a7eafaadf0ceac39fe10955dcf2
[ "MIT" ]
1
2020-10-22T18:11:53.000Z
2020-10-22T18:11:53.000Z
src/affinity-propagation/generate_data.py
dominc8/affinity-propagation
b91b18b52eb68a7eafaadf0ceac39fe10955dcf2
[ "MIT" ]
null
null
null
src/affinity-propagation/generate_data.py
dominc8/affinity-propagation
b91b18b52eb68a7eafaadf0ceac39fe10955dcf2
[ "MIT" ]
null
null
null
from config import DataGeneratorCfg from sklearn.datasets.samples_generator import make_blobs import numpy as np def generate(): data, true_labels = make_blobs(n_samples=DataGeneratorCfg.n_samples, centers=DataGeneratorCfg.centers, cluster_std=DataGeneratorCfg.cluster_std, random_state=DataGeneratorCfg.random_state) print("Generating new data!") np.savetxt("data/data.txt", data) np.savetxt("data/true_labels.txt", true_labels) return data
38.666667
192
0.799569
0
0
0
0
0
0
0
0
59
0.127155
a3f0b2c627b66a9afed0141c901b2c8dc3a62a08
1,265
py
Python
peon/tests/test_project/test_file/test_function_def/test_functions/test_reflection_at_line.py
roch1990/peon
0e9e40956c05138c0820fe380b354fdd1fe95e01
[ "MIT" ]
32
2020-05-18T14:02:59.000Z
2022-02-06T15:00:12.000Z
peon/tests/test_project/test_file/test_function_def/test_functions/test_reflection_at_line.py
roch1990/peon
0e9e40956c05138c0820fe380b354fdd1fe95e01
[ "MIT" ]
42
2020-05-22T20:29:08.000Z
2021-03-10T21:24:23.000Z
peon/tests/test_project/test_file/test_function_def/test_functions/test_reflection_at_line.py
roch1990/peon
0e9e40956c05138c0820fe380b354fdd1fe95e01
[ "MIT" ]
4
2020-07-02T06:32:42.000Z
2022-01-24T22:46:02.000Z
import _ast from peon.src.project.file.function_def.function import FunctionLint class ReflectionAtLineFixture: empty_node = _ast.Pass is_instance_at_first_lvl = _ast.FunctionDef(id='isinstance', lineno=1) type_at_first_lvl = _ast.FunctionDef(id='type', lineno=1) is_instance_at_second_lvl = _ast.FunctionDef(body=[_ast.Expr(id='isinstance', lineno=2)], lineno=1) type_at_second_lvl = _ast.FunctionDef(body=[_ast.Expr(id='type', lineno=2)], lineno=1) def test_empty_node(): assert FunctionLint( definition=ReflectionAtLineFixture.empty_node, ).reflection_at_line() == tuple() def test_is_instance_at_first_lvl(): assert FunctionLint( definition=ReflectionAtLineFixture.is_instance_at_first_lvl, ).reflection_at_line() == (1,) def test_type_at_first_lvl(): assert FunctionLint( definition=ReflectionAtLineFixture.type_at_first_lvl, ).reflection_at_line() == (1,) def test_is_instance_at_second_lvl(): assert FunctionLint( definition=ReflectionAtLineFixture.is_instance_at_second_lvl, ).reflection_at_line() == (2,) def test_type_at_second_lvl(): assert FunctionLint( definition=ReflectionAtLineFixture.type_at_second_lvl, ).reflection_at_line() == (2,)
30.119048
103
0.746245
389
0.30751
0
0
0
0
0
0
36
0.028458
a3f0f192476289dad6988d88e198337f911d4da1
4,419
py
Python
db2_funcs.py
Nama/A.T.S.P.-Website
658db78da1b12c01ef9ead2dc44d1ecd97b178d8
[ "MIT" ]
4
2015-10-18T18:46:13.000Z
2019-11-16T02:34:05.000Z
db2_funcs.py
Adventure-Terraria-Server-Project/A.T.S.P.-Website
658db78da1b12c01ef9ead2dc44d1ecd97b178d8
[ "MIT" ]
null
null
null
db2_funcs.py
Adventure-Terraria-Server-Project/A.T.S.P.-Website
658db78da1b12c01ef9ead2dc44d1ecd97b178d8
[ "MIT" ]
2
2015-09-30T21:53:19.000Z
2019-03-22T07:59:49.000Z
############################################################################### # # '''Website Database-connection-related features''' # # # ############################################################################### import cymysql from conf import website_db from time import gmtime from time import strftime db_host = website_db.ip db_port = website_db.port db = website_db.db db_user = website_db.user db_pw = website_db.pw ############################################################################### # # '''Databse-connect and close''' # # # ############################################################################### def db_con(): conn = cymysql.connect(host=db_host, port=db_port, user=db_user, passwd=db_pw, db=db) cur = conn.cursor() return conn, cur def db_close(conn, cur): cur.close() conn.close() ############################################################################### # # '''Donation-Page data''' # # # ############################################################################### def donate_save(nick): conn, cur = db_con() time = strftime('%Y.%m.%d - %H:%M:%S', gmtime()) cur.execute('INSERT INTO `donate` (`time`, `user`) VALUES (%s, %s)', (time, nick)) conn.commit() db_close(conn, cur) def donate_read(): conn, cur = db_con() cur.execute('SELECT * FROM `donate` ORDER BY `time` DESC LIMIT 20') nicks = list() for r in cur.fetchall(): nicks.append([r[0], r[1]]) db_close(conn, cur) return nicks ############################################################################### # # '''Short-URL data''' # # # ############################################################################### def shorturl_save(surl, url): conn, cur = db_con() cur.execute('INSERT INTO `shorturls` (`surl`, `url`) VALUES (%s, %s)', (surl, url)) conn.commit() db_close(conn, cur) def shorturl_read(): conn, cur = db_con() cur.execute('SELECT * FROM `shorturls`') urls = list() for r in cur.fetchall(): urls.append([r[0], r[0], r[1]]) db_close(conn, cur) return urls ############################################################################### # # '''Old Worlds''' # # # ############################################################################### def get_old_worlds(item): conn, cur = db_con() sql = 'SELECT * FROM `oldworlds` ORDER BY `date` DESC LIMIT {0}, {1}'.format(item, 20) cur.execute(sql) worlds = cur.fetchall() db_close(conn, cur) return worlds ############################################################################### # # '''Server Backup-Size in Dash''' # # # ############################################################################### def backup_size(): conn, cur = db_con() dbtshock = [] tserver = [] htdocs = [] cur.execute('SELECT * FROM `backups`') for r in cur.fetchall(): if r[1] == 'db': dbtshock.append([r[0] * 1000, r[2]]) elif r[1] == 'tserver': tserver.append([r[0] * 1000, r[2]]) elif r[1] == 'htdocs': htdocs.append([r[0] * 1000, r[2]]) db_close(conn, cur) return (dbtshock, tserver, htdocs)
33.992308
90
0.296673
0
0
0
0
0
0
0
0
2,398
0.542657
a3f15add28c75465b292cc3d301905a5d66f9500
1,169
py
Python
nlp/handler.py
rgschmitz1/tcss702
b0fdd7b6107401dc297b467c9e63773dfb8fd487
[ "MIT" ]
null
null
null
nlp/handler.py
rgschmitz1/tcss702
b0fdd7b6107401dc297b467c9e63773dfb8fd487
[ "MIT" ]
null
null
null
nlp/handler.py
rgschmitz1/tcss702
b0fdd7b6107401dc297b467c9e63773dfb8fd487
[ "MIT" ]
null
null
null
from minio import Minio import json import os from .Inspector import Inspector from .topic_model import topic_model #def handle(event): def handle(event, context): with open("/var/openfaas/secrets/minio-access-key") as f: access_key = f.read() with open("/var/openfaas/secrets/minio-secret-key") as f: secret_key = f.read() mc = Minio(os.environ['minio_hostname'], access_key=access_key, secret_key=secret_key, secure=False) tm = topic_model(mc) # Collect data inspector = Inspector() inspector.inspectAll() # Add custom message and finish the function # if "startWallClock" in event: # inspector.addAttribute("startWallClock", event['startWallClock']) body = json.loads(event.body) print(body['fn'], flush=True) fn = {"p": tm.preprocess, "t": tm.train, "q": tm.query} fn[body['fn']]() inspector.inspectAllDeltas() # Include functionName inspector.addAttribute("functionName", fn[body['fn']].__name__) iret = inspector.finish() ret = { "status": 200, "body": iret } return ret
24.87234
74
0.622754
0
0
0
0
0
0
0
0
352
0.301112
a3f1d6f2fedc4710e7669c09fd2ad1b4f7d2b866
803
py
Python
src/pve_exporter/cli.py
jmangs/prometheus-pve-exporter
2947a1247d854791114eb5ed348a250739540708
[ "Apache-2.0" ]
null
null
null
src/pve_exporter/cli.py
jmangs/prometheus-pve-exporter
2947a1247d854791114eb5ed348a250739540708
[ "Apache-2.0" ]
null
null
null
src/pve_exporter/cli.py
jmangs/prometheus-pve-exporter
2947a1247d854791114eb5ed348a250739540708
[ "Apache-2.0" ]
null
null
null
""" Proxmox VE exporter for the Prometheus monitoring system. """ import sys from argparse import ArgumentParser from pve_exporter.http import start_http_server def main(args=None): """ Main entry point. """ parser = ArgumentParser() parser.add_argument('config', nargs='?', default='pve.yml', help='Path to configuration file (pve.yml)') parser.add_argument('port', nargs='?', type=int, default='9221', help='Port on which the exporter is listening (9221)') parser.add_argument('address', nargs='?', default='', help='Address to which the exporter will bind') params = parser.parse_args(args if args is None else sys.argv[1:]) start_http_server(params.config, params.port, params.address)
32.12
78
0.646326
0
0
0
0
0
0
0
0
274
0.34122
a3f41aa5108b3692f20a54704fcb143543339d31
12,692
py
Python
workers/repo_info_worker/repo_info_worker.py
vinodkahuja/augur
a7688af262c2f971767962d4a20110daf4b1179a
[ "MIT" ]
2
2020-08-27T17:34:38.000Z
2020-11-05T20:31:35.000Z
workers/repo_info_worker/repo_info_worker.py
BenjaminChilson/augur
8346be0b757c907e9b67ba870a9ace32a1b87b11
[ "MIT" ]
9
2021-04-16T23:42:53.000Z
2021-05-04T04:26:55.000Z
workers/repo_info_worker/repo_info_worker.py
BenjaminChilson/augur
8346be0b757c907e9b67ba870a9ace32a1b87b11
[ "MIT" ]
7
2019-03-25T13:26:42.000Z
2020-12-30T18:43:42.000Z
#SPDX-License-Identifier: MIT import logging, os, sys, time, requests, json from datetime import datetime from multiprocessing import Process, Queue import pandas as pd import sqlalchemy as s from workers.worker_base import Worker # NOTE: This worker primarily inserts rows into the REPO_INFO table, which serves the primary purposes of # 1. Displaying discrete metadata like "number of forks" and how they change over time # 2. Validating other workers, like those related to pull requests, issues, and commits. Our totals should be at or very near the totals in the repo_info table. # This table also updates the REPO table in 2 cases: # 1. Recognizing when a repository is a forked repository by updating the "forked_from" field and # 2. Recognizing when a repository is archived, and recording the data we observed the change in status. class RepoInfoWorker(Worker): def __init__(self, config={}): worker_type = "repo_info_worker" # Define what this worker can be given and know how to interpret given = [['github_url']] models = ['repo_info'] # Define the tables needed to insert, update, or delete on data_tables = ['repo_info', 'repo'] operations_tables = ['worker_history', 'worker_job'] # Run the general worker initialization super().__init__(worker_type, config, given, models, data_tables, operations_tables) # Define data collection info self.tool_source = 'Repo Info Worker' self.tool_version = '1.0.0' self.data_source = 'GitHub API' def repo_info_model(self, task, repo_id): github_url = task['given']['github_url'] self.logger.info("Beginning filling the repo_info model for repo: " + github_url + "\n") owner, repo = self.get_owner_repo(github_url) url = 'https://api.github.com/graphql' query = """ { repository(owner:"%s", name:"%s"){ updatedAt hasIssuesEnabled issues(states:OPEN) { totalCount } hasWikiEnabled forkCount defaultBranchRef { name } watchers { totalCount } id licenseInfo { name url } stargazers { totalCount } codeOfConduct { name url } issue_count: issues { totalCount } issues_closed: issues(states:CLOSED) { totalCount } pr_count: pullRequests { totalCount } pr_open: pullRequests(states: OPEN) { totalCount } pr_closed: pullRequests(states: CLOSED) { totalCount } pr_merged: pullRequests(states: MERGED) { totalCount } ref(qualifiedName: "master") { target { ... on Commit { history(first: 0){ totalCount } } } } } } """ % (owner, repo) # Hit the graphql endpoint and retry 3 times in case of failure num_attempts = 0 success = False data = None while num_attempts < 3: self.logger.info("Hitting endpoint: {} ...\n".format(url)) r = requests.post(url, json={'query': query}, headers=self.headers) self.update_gh_rate_limit(r) try: data = r.json() except: data = json.loads(json.dumps(r.text)) if 'errors' in data: self.logger.info("Error!: {}".format(data['errors'])) if data['errors'][0]['message'] == 'API rate limit exceeded': self.update_gh_rate_limit(r) continue if 'data' in data: success = True data = data['data']['repository'] break else: self.logger.info("Request returned a non-data dict: {}\n".format(data)) if data['message'] == 'Not Found': self.logger.info("Github repo was not found or does not exist for endpoint: {}\n".format(url)) break if data['message'] == 'You have triggered an abuse detection mechanism. Please wait a few minutes before you try again.': self.update_gh_rate_limit(r, temporarily_disable=True) continue if data['message'] == 'Bad credentials': self.update_gh_rate_limit(r, bad_credentials=True) continue num_attempts += 1 if not success: self.logger.error('Cannot hit endpoint after 3 attempts. \"Completing\" task.\n') self.register_task_completion(self.task, repo_id, 'repo_info') return # Just checking that the data is accessible (would not be if repo no longer exists) try: data['updatedAt'] except Exception as e: self.logger.error('Cannot access repo_info data: {}\nError: {}. \"Completing\" task.'.format(data, e)) self.register_task_completion(self.task, repo_id, 'repo_info') return # Get committers count info that requires seperate endpoint committers_count = self.query_committers_count(owner, repo) # Put all data together in format of the table self.logger.info(f'Inserting repo info for repo with id:{repo_id}, owner:{owner}, name:{repo}\n') rep_inf = { 'repo_id': repo_id, 'last_updated': data['updatedAt'] if 'updatedAt' in data else None, 'issues_enabled': data['hasIssuesEnabled'] if 'hasIssuesEnabled' in data else None, 'open_issues': data['issues']['totalCount'] if data['issues'] else None, 'pull_requests_enabled': None, 'wiki_enabled': data['hasWikiEnabled'] if 'hasWikiEnabled' in data else None, 'pages_enabled': None, 'fork_count': data['forkCount'] if 'forkCount' in data else None, 'default_branch': data['defaultBranchRef']['name'] if data['defaultBranchRef'] else None, 'watchers_count': data['watchers']['totalCount'] if data['watchers'] else None, 'UUID': None, 'license': data['licenseInfo']['name'] if data['licenseInfo'] else None, 'stars_count': data['stargazers']['totalCount'] if data['stargazers'] else None, 'committers_count': committers_count, 'issue_contributors_count': None, 'changelog_file': None, 'contributing_file': None, 'license_file': data['licenseInfo']['url'] if data['licenseInfo'] else None, 'code_of_conduct_file': data['codeOfConduct']['url'] if data['codeOfConduct'] else None, 'security_issue_file': None, 'security_audit_file': None, 'status': None, 'keywords': None, 'commit_count': data['ref']['target']['history']['totalCount'] if data['ref'] else None, 'issues_count': data['issue_count']['totalCount'] if data['issue_count'] else None, 'issues_closed': data['issues_closed']['totalCount'] if data['issues_closed'] else None, 'pull_request_count': data['pr_count']['totalCount'] if data['pr_count'] else None, 'pull_requests_open': data['pr_open']['totalCount'] if data['pr_open'] else None, 'pull_requests_closed': data['pr_closed']['totalCount'] if data['pr_closed'] else None, 'pull_requests_merged': data['pr_merged']['totalCount'] if data['pr_merged'] else None, 'tool_source': self.tool_source, 'tool_version': self.tool_version, 'data_source': self.data_source } result = self.db.execute(self.repo_info_table.insert().values(rep_inf)) self.logger.info(f"Primary Key inserted into repo_info table: {result.inserted_primary_key}\n") self.results_counter += 1 # Note that the addition of information about where a repository may be forked from, and whether a repository is archived, updates the `repo` table, not the `repo_info` table. forked = self.is_forked(owner, repo) archived = self.is_archived(owner, repo) archived_date_collected = None if archived is not False: archived_date_collected = archived archived = 1 else: archived = 0 rep_additional_data = { 'forked_from': forked, 'repo_archived': archived, 'repo_archived_date_collected': archived_date_collected } result = self.db.execute(self.repo_table.update().where( self.repo_table.c.repo_id==repo_id).values(rep_additional_data)) self.logger.info(f"Inserted info for {owner}/{repo}\n") # Register this task as completed self.register_task_completion(self.task, repo_id, "repo_info") def query_committers_count(self, owner, repo): self.logger.info('Querying committers count\n') url = f'https://api.github.com/repos/{owner}/{repo}/contributors?per_page=100' committers = 0 try: while True: r = requests.get(url, headers=self.headers) self.update_gh_rate_limit(r) committers += len(r.json()) if 'next' not in r.links: break else: url = r.links['next']['url'] except Exception: self.logger.exception('An error occured while querying contributor count\n') return committers def is_forked(self, owner, repo): #/repos/:owner/:repo parent self.logger.info('Querying parent info to verify if the repo is forked\n') url = f'https://api.github.com/repos/{owner}/{repo}' r = requests.get(url, headers=self.headers) self.update_gh_rate_limit(r) data = self.get_repo_data(url, r) if 'fork' in data: if 'parent' in data: return data['parent']['full_name'] return 'Parent not available' return False def is_archived(self, owner, repo): self.logger.info('Querying committers count\n') url = f'https://api.github.com/repos/{owner}/{repo}' r = requests.get(url, headers=self.headers) self.update_gh_rate_limit(r) data = self.get_repo_data(url, r) if 'archived' in data: if data['archived']: if 'updated_at' in data: return data['updated_at'] return 'Date not available' return False return False def get_repo_data(self, url, response): success = False try: data = response.json() except: data = json.loads(json.dumps(response.text)) if 'errors' in data: self.logger.info("Error!: {}".format(data['errors'])) if data['errors'][0]['message'] == 'API rate limit exceeded': self.update_gh_rate_limit(response) if 'id' in data: success = True else: self.logger.info("Request returned a non-data dict: {}\n".format(data)) if data['message'] == 'Not Found': self.logger.info("Github repo was not found or does not exist for endpoint: {}\n".format(url)) if data['message'] == 'You have triggered an abuse detection mechanism. Please wait a few minutes before you try again.': self.update_gh_rate_limit(r, temporarily_disable=True) if data['message'] == 'Bad credentials': self.update_gh_rate_limit(r, bad_credentials=True) if not success: self.register_task_failure(self.task, repo_id, "Failed to hit endpoint: {}".format(url)) return data
41.75
183
0.554365
11,842
0.933029
0
0
0
0
0
0
6,163
0.485581
a3f5451025cc5163c68a3eea15dfa30712bf9362
17,929
py
Python
benchmark/my_argparser.py
victor-estrade/SystGradDescent
822e7094290301ec47a99433381a8d6406798aff
[ "MIT" ]
2
2019-03-20T09:05:02.000Z
2019-03-20T15:23:44.000Z
benchmark/my_argparser.py
victor-estrade/SystGradDescent
822e7094290301ec47a99433381a8d6406798aff
[ "MIT" ]
null
null
null
benchmark/my_argparser.py
victor-estrade/SystGradDescent
822e7094290301ec47a99433381a8d6406798aff
[ "MIT" ]
null
null
null
# coding: utf-8 from __future__ import print_function from __future__ import division from __future__ import absolute_import from __future__ import unicode_literals import argparse def parse_args_tolerance(): parser = argparse.ArgumentParser(description='just for tolerance') parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") args, _ = parser.parse_known_args() return args.tolerance def GB_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--n-estimators', help='number of estimators', default=100, type=int) parser.add_argument('--max-depth', help='maximum depth of trees', default=3, type=int) parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-1, type=float) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') parser.add_argument('--skip-minuit', help='flag to skip minuit NLL minization', action='store_true') args = parser.parse_args() return args def REG_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-4, type=float) parser.add_argument('--beta1', help='beta 1 for Adam', default=0.5, type=float) parser.add_argument('--beta2', help='beta 2 for Adam', default=0.9, type=float) parser.add_argument('--weight-decay', help='weight decay for SGD', default=0.0, type=float) parser.add_argument('--optimizer', help='optimizer name', dest='optimizer_name', default='Adam', type=str, choices=('Adam', 'SGD', 'ADAM', 'sgd', 'adam')) parser.add_argument('--n-unit', help='Number of units in layers. Controls NN width.', default=200, type=int) parser.add_argument('--sample-size', help='data sample size', default=1000, type=int) parser.add_argument('--batch-size', help='mini-batch size', default=20, type=int) parser.add_argument('--n-steps', help='number of update steps', default=1000, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') args = parser.parse_args() return args def INFERNO_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-3, type=float) parser.add_argument('--temperature', help='control initial softmax steepness', default=1.0, type=float) parser.add_argument('--beta1', help='beta 1 for Adam', default=0.5, type=float) parser.add_argument('--beta2', help='beta 2 for Adam', default=0.9, type=float) parser.add_argument('--weight-decay', help='weight decay for SGD', default=0.0, type=float) parser.add_argument('--optimizer', help='optimizer name', dest='optimizer_name', default='Adam', type=str, choices=('Adam', 'SGD', 'ADAM', 'sgd', 'adam')) parser.add_argument('--n-unit', help='Number of units in layers. Controls NN width.', default=200, type=int) parser.add_argument('--n-bins', help='number of output bins', default=10, type=int) parser.add_argument('--sample-size', help='data sample size', default=1000, type=int) parser.add_argument('--batch-size', help='mini-batch size', default=20, type=int) parser.add_argument('--n-steps', help='number of update steps', default=1000, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') args = parser.parse_args() return args def NET_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-3, type=float) parser.add_argument('--beta1', help='beta 1 for Adam', default=0.9, type=float) parser.add_argument('--beta2', help='beta 2 for Adam', default=0.999, type=float) parser.add_argument('--weight-decay', help='weight decay for SGD', default=0.0, type=float) parser.add_argument('--optimizer', help='optimizer name', dest='optimizer_name', default='Adam', type=str, choices=('Adam', 'SGD', 'ADAM', 'sgd', 'adam')) parser.add_argument('--n-unit', help='Number of units in layers. Controls NN width.', default=200, type=int) parser.add_argument('--sample-size', help='data sample size', default=1000, type=int) parser.add_argument('--batch-size', help='mini-batch size', default=1000, type=int) parser.add_argument('--n-steps', help='number of update steps', default=1000, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') args = parser.parse_args() return args def TP_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-3, type=float) parser.add_argument('--trade-off', help='trade-off between classic loss and adversarial loss', default=1.0, type=float) parser.add_argument('--beta1', help='beta 1 for Adam', default=0.9, type=float) parser.add_argument('--beta2', help='beta 2 for Adam', default=0.999, type=float) parser.add_argument('--weight-decay', help='weight decay for SGD', default=0.0, type=float) parser.add_argument('--optimizer', help='optimizer name', dest='optimizer_name', default='Adam', type=str, choices=('Adam', 'SGD', 'ADAM', 'sgd', 'adam')) parser.add_argument('--n-unit', help='Number of units in layers. Controls NN width.', default=200, type=int) parser.add_argument('--sample-size', help='data sample size', default=1000, type=int) parser.add_argument('--batch-size', help='mini-batch size', default=1000, type=int) parser.add_argument('--n-steps', help='number of update steps', default=1000, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') args = parser.parse_args() return args def PIVOT_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--learning-rate', '--lr', help='learning rate', default=1e-3, type=float) parser.add_argument('--trade-off', help='trade-off between classic loss and adversarial loss', default=1.0, type=float) parser.add_argument('--beta1', help='beta 1 for Adam', default=0.9, type=float) parser.add_argument('--beta2', help='beta 2 for Adam', default=0.999, type=float) parser.add_argument('--weight-decay', help='weight decay for SGD', default=0.0, type=float) parser.add_argument('--optimizer', help='optimizer name', dest='optimizer_name', default='Adam', type=str, choices=('Adam', 'SGD', 'ADAM', 'sgd', 'adam')) parser.add_argument('--n-unit', help='Number of units in layers. Controls NN width.', default=200, type=int) parser.add_argument('--sample-size', help='data sample size', default=1000, type=int) parser.add_argument('--batch-size', help='mini-batch size', default=1000, type=int) parser.add_argument('--n-steps', help='number of update steps', default=1000, type=int) parser.add_argument('--n-net-pre-training-steps', help='number of update steps for pretraining the classifier', default=1000, type=int) parser.add_argument('--n-adv-pre-training-steps', help='number of update steps for pretraining the adversarial', default=1000, type=int) parser.add_argument('--n-recovery-steps', help='number of update steps for adversarial recovery', default=1, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') args = parser.parse_args() return args def FF_parse_args(main_description="Training launcher"): parser = argparse.ArgumentParser(description=main_description) parser.add_argument("--verbose", "-v", type=int, choices=[0, 1, 2], default=0, help="increase output verbosity") parser.add_argument("--start-cv", type=int, default=0, help="start of i_cv for range(start, end)") parser.add_argument("--end-cv", type=int, default=30, help="end of i_cv for range(start, end)") parser.add_argument("--tolerance", type=float, default=0.1, help="tolerance value for Minuit migrad and simplex minimization") parser.add_argument('--load-run', help='load saved runs. Do not run the models', action='store_true') parser.add_argument('--estimate-only', help='Turns off conditional estimation for V_stat and V_syst', action='store_true') parser.add_argument('--conditional-only', help='Turns off common estimation', action='store_true') # MODEL HYPER PARAMETERS parser.add_argument('--feature-id', help='feature index to filter on', default=0, type=int) # OTHER parser.add_argument('--no-cuda', '--no-gpu', help='flag to use or not the gpu', action='store_false', dest='cuda') parser.add_argument('--retrain', help='flag to force retraining', action='store_true') parser.add_argument('--skip-minuit', help='flag to skip minuit NLL minization', action='store_true') args = parser.parse_args() return args
46.81201
116
0.593787
0
0
0
0
0
0
0
0
6,757
0.376875
a3f6015f3b9c537d076933e65004a8315446ca82
11,482
py
Python
src/main/python/main.py
SarthakJariwala/Shockley-Queisser-Calculator
5f9cfd4c97b8141e8b4ee8d15fa5f3cccfe25b7e
[ "MIT" ]
1
2020-04-08T06:33:47.000Z
2020-04-08T06:33:47.000Z
src/main/python/main.py
SarthakJariwala/Schokley-Queisser-Calculator
5f9cfd4c97b8141e8b4ee8d15fa5f3cccfe25b7e
[ "MIT" ]
null
null
null
src/main/python/main.py
SarthakJariwala/Schokley-Queisser-Calculator
5f9cfd4c97b8141e8b4ee8d15fa5f3cccfe25b7e
[ "MIT" ]
2
2020-05-31T02:57:55.000Z
2020-07-30T13:24:22.000Z
from fbs_runtime.application_context.PyQt5 import ApplicationContext, cached_property from fbs_runtime.platform import is_windows, is_mac # system imports import sys # module imports from PyQt5 import uic, QtWidgets from PyQt5.QtWidgets import QMessageBox import numpy as np import matplotlib.pyplot as plt import matplotlib import scipy.constants as constants from scipy.integrate import simps, quad from scipy.interpolate import splrep, splint from scipy.optimize import fmin class AppContext(ApplicationContext): def run(self): self.main_window.show() return self.app.exec_() def get_design(self): qtCreatorFile = self.get_resource("SQ_GUI.ui") return qtCreatorFile def get_file(self): astmg_file = self.get_resource("ASTMG173.csv") return astmg_file @cached_property def main_window(self): return MainWindow(self.get_design(), self.get_file()) if is_windows(): matplotlib.use('Qt5Agg') elif is_mac(): matplotlib.use('macosx') class MainWindow(QtWidgets.QMainWindow): def __init__(self, uiFile, astmg173_file): super(MainWindow, self).__init__() #Create Main Window self.ui = uic.loadUi(uiFile, self) #self.ui = WindowTemplate() #self.ui.setupUi(self) #Connect PushButtons to Functions etc self.ui.CalcualteSQ_pushButton.clicked.connect(self.calculate_SQ) self.ui.load_pushButton.clicked.connect(self.load_SMARTS_spectrum) self.ui.save_pushButton.clicked.connect(self.save_bandgap_array) #start app with checked "plot j-v curve" self.ui.plot_checkBox.setChecked(True) self.astmg173_file = astmg173_file self.out_array = None self.show() def load_SMARTS_spectrum(self): filename = QtWidgets.QFileDialog.getOpenFileName(self) try: self.SMARTS = np.genfromtxt(filename[0], skip_header=1) self.ui.load_checkBox.setChecked(False) except Exception as e: QMessageBox.information( self, None, str(e), QMessageBox.Ok ) def calculate_SQ(self): h = constants.physical_constants['Planck constant'][0] # units of J*s h_ev = constants.physical_constants['Planck constant in eV s'][0] c_nm = (constants.physical_constants['speed of light in vacuum'][0]) * 1e9 c = (constants.physical_constants['speed of light in vacuum'][0]) e_charge = constants.physical_constants['elementary charge'][0] kb_ev = constants.physical_constants['Boltzmann constant in eV/K'][0] """User settings""" Tcell = self.ui.temp_spinBox.value() #temperature of solar cell in degrees K bandgap = self.ui.bandgap_doubleSpinBox.value() #enter bandgap in eV #self.ui.textBrowser.append(str('Tcell = %.3f' %(Tcell))) plot_jv = self.ui.plot_checkBox.isChecked() #'True' if you want to plot the SQ JV curve for "bandgap" plot_bandgap_array = self.ui.calc_SQ_array_checkBox.isChecked() #'True' if you want to plot SQ parameters for an array of bandgaps # starting from "mbandgap_array_min" to "bandgap_array_max" # with number of points "num_points_bandgap_array" # (see below) #'False' if you just want SQ data for one bandgap (faster) bandgap_array_min = self.ui.bandgap_min_doubleSpinBox.value() #in eV bandgap_array_max = self.ui.bandgap_max_doubleSpinBox.value() # in eV num_points_bandgap_array = self.ui.no_points_spinBox.value() """Programming below""" bandgap_array = np.linspace(bandgap_array_min, bandgap_array_max, num_points_bandgap_array) #First convert AM1.5 spectrum from W/m^2/nm to W/m^2/ev if self.ui.load_checkBox.isChecked(): astmg173 = np.loadtxt(self.astmg173_file, delimiter = ',', skiprows = 2) am15_wav = np.copy(astmg173[:,0]) #AM1.5 wavelength axis in nm am15 = np.copy(astmg173[:,2]) #AM1.5 in units of W/m^2/nm = J/s*m^2/nm else: try: astmg173 = self.SMARTS am15_wav = np.copy(astmg173[:,0]) #AM1.5 wavelength axis in nm am15 = np.copy(astmg173[:,1]) #AM1.5 in units of W/m^2/nm = J/s*m^2/nm except: QMessageBox.information( self, None, "No valid spectrum file found!\n\n"+ "Load a valid file or check the 'Use ASTMG173'box" ) return total_power_nm = simps(am15, x = am15_wav) #Integrate over nm to check that total power density = 1000 W/m^2 am15_ev = h_ev * (c_nm) / (am15_wav ) am15_wats_ev = am15 * (h_ev * c_nm/ ((am15_ev) ** 2.0)) am15_ev_flip = am15_ev[::-1] am15_wats_ev_flip = am15_wats_ev[::-1] total_power_ev = simps(am15_wats_ev_flip, x = am15_ev_flip) #Integrate over eV to check that total power density = 1000 W/m^2 am15_photons_ev = am15_wats_ev_flip / (am15_ev_flip * e_charge) am15_photons_nm = am15 / (am15_ev * e_charge) total_photonflux_ev = simps(am15_photons_ev, x = am15_ev_flip) total_photonflux_nm = simps(am15_photons_nm , x = am15_wav) total_photonflux_ev_splrep = splrep(am15_ev_flip, am15_photons_ev) emin = am15_ev_flip[0] emax = am15_ev_flip[len(am15_ev_flip) - 1] def solar_photons_above_gap(Egap): #units of photons / sec *m^2 return splint(Egap, emax,total_photonflux_ev_splrep) def RR0(Egap): integrand = lambda eV : eV ** 2.0 / (np.exp(eV / (kb_ev * Tcell)) - 1) integral = quad(integrand, Egap, emax, full_output=1)[0] return ((2.0 * np.pi / ((c ** 2.0) * (h_ev ** 3.0)))) * integral def current_density(V, Egap): #to get from units of amps / m^2 to mA/ cm^2 ---multiply by 1000 to convert to mA ---- multiply by (0.01 ^2) to convert to cm^2 cur_dens = e_charge * (solar_photons_above_gap(Egap) - RR0(Egap) * np.exp( V / (kb_ev * Tcell))) return cur_dens * 1000 * (0.01 ** 2.0) def JSC(Egap): return current_density(0, Egap) def VOC(Egap): return (kb_ev * Tcell) * np.log(solar_photons_above_gap(Egap) / RR0(Egap)) def fmax(func_to_maximize, initial_guess=0): """return the x that maximizes func_to_maximize(x)""" func_to_minimize = lambda x : -func_to_maximize(x) return fmin(func_to_minimize, initial_guess, disp=False)[0] def V_mpp_Jmpp_maxpower_maxeff_ff(Egap): vmpp = fmax(lambda V : V * current_density(V, Egap)) jmpp = current_density(vmpp, Egap) maxpower = vmpp * jmpp max_eff = maxpower / (total_power_ev * 1000 * (0.01 ** 2.0)) jsc_return = JSC(Egap) voc_return = VOC(Egap) ff = maxpower / (jsc_return * voc_return) return [vmpp, jmpp, maxpower, max_eff, ff, jsc_return, voc_return] maxpcemeta = V_mpp_Jmpp_maxpower_maxeff_ff(bandgap) self.ui.textBrowser.append(str('For Bandgap = %.3f eV, TCell = %.3f K:\nJSC = %.3f mA/cm^2\nVOC = %.3f V\nFF = %.3f\nPCE = %.3f' % (bandgap, Tcell, maxpcemeta[5], maxpcemeta[6],maxpcemeta[4], maxpcemeta[3] * 100))) if plot_bandgap_array == True: pce_array = np.empty_like(bandgap_array) ff_array = np.empty_like(bandgap_array) voc_array = np.empty_like(bandgap_array) jsc_array = np.empty_like(bandgap_array) for i in range(len(bandgap_array)): metadata = V_mpp_Jmpp_maxpower_maxeff_ff(bandgap_array[i]) pce_array[i] = metadata[3] ff_array[i] = metadata[4] voc_array[i] = metadata[6] jsc_array[i] = metadata[5] self.out_array = np.array((bandgap_array,pce_array,ff_array, voc_array,jsc_array)).T plt.figure(figsize=(5,4)) plt.title('Cell Temperature = %.2f K' %(Tcell)) plt.xlim(bandgap_array[0], bandgap_array[len(bandgap_array) - 1]) plt.ylabel('PCE (%)') plt.xlabel('Bandgap (eV)') plt.plot(bandgap_array, pce_array * 100) plt.tight_layout() plt.show() plt.figure(figsize=(5,4)) plt.title('Cell Temperature = %.2f K' %(Tcell)) plt.ylim(0, 1) plt.xlim(bandgap_array[0], bandgap_array[len(bandgap_array) - 1]) plt.ylabel('Fill Factor') plt.xlabel('Bandgap (eV)') plt.plot(bandgap_array, ff_array) plt.tight_layout() plt.show() plt.figure(figsize=(5,4)) plt.title('Cell Temperature = %.2f K' %(Tcell)) plt.xlim(bandgap_array[0], bandgap_array[len(bandgap_array) - 1]) plt.ylabel('Jsc (mA/cm$^2$)') plt.xlabel('Bandgap (eV)') plt.plot(bandgap_array, jsc_array) plt.tight_layout() plt.show() plt.figure(figsize=(5,4)) plt.title('Cell Temperature = %.2f K' %(Tcell)) plt.xlim(bandgap_array[0], bandgap_array[len(bandgap_array) - 1]) plt.ylabel('Voc (V)') plt.xlabel('Bandgap (eV)') plt.plot(bandgap_array, voc_array, label = 'S-Q Voc') plt.plot(bandgap_array, bandgap_array, '--', label = 'Bandgap') plt.legend(loc = 'best') plt.tight_layout() plt.show() self.ui.textBrowser.append('--') else: self.ui.textBrowser.append('--') def JV_curve(Egap): volt_array = np.linspace(0, VOC(Egap), 200) j_array = np.empty_like(volt_array) for i in range(len(volt_array)): j_array[i] = current_density(volt_array[i], Egap) return [volt_array, j_array] if plot_jv == True: jv_meta = JV_curve(bandgap) v_array = jv_meta[0] jv_array = jv_meta[1] plt.figure(figsize=(5,4)) plt.ylabel('Current Density (mA/cm$^2$)') plt.xlabel('Voltage (V)') plt.plot(v_array, -jv_array) plt.title('J-V Curve for '+str(self.ui.bandgap_doubleSpinBox.value())+'eV') plt.tight_layout() plt.show() self.ui.textBrowser.append('--') else: self.ui.textBrowser.append('--') def save_bandgap_array(self): if self.out_array is None: self.ui.textBrowser.append("Calculate SQ limit before saving file!") else: filename = QtWidgets.QFileDialog.getSaveFileName(self) np.savetxt(filename[0]+".txt", self.out_array, delimiter='\t', header="Bandgap, PCE, FillFactor, Voc, Jsc") #def run(): # win = MainWindow() # QtGui.QApplication.instance().exec_() # return win #run() if __name__ == '__main__': appctxt = AppContext() # 1. Instantiate ApplicationContext exit_code = appctxt.run() sys.exit(exit_code) # 2. Invoke appctxt.app.exec_()
38.659933
222
0.592406
10,628
0.925623
0
0
105
0.009145
0
0
2,150
0.18725
a3f604410a10116c403796e4d6e056235535c4f9
9,832
py
Python
helpus/core.py
tov101/HelpUs
6b53d9651cf45c191774be2f70b70b130251d2a6
[ "MIT" ]
null
null
null
helpus/core.py
tov101/HelpUs
6b53d9651cf45c191774be2f70b70b130251d2a6
[ "MIT" ]
null
null
null
helpus/core.py
tov101/HelpUs
6b53d9651cf45c191774be2f70b70b130251d2a6
[ "MIT" ]
null
null
null
import io import logging import os import sys from PyQt5 import QtGui, QtCore, QtWidgets from helpus import icon_file_path from helpus import __version__ LOGGER = logging.getLogger('HelpUs') LOGGER.setLevel(logging.DEBUG) class XStream(QtCore.QObject): _stdout = None _stderr = None messageWritten = QtCore.pyqtSignal(str) @staticmethod def flush(): pass @staticmethod def fileno(): return -1 def write(self, msg): if not self.signalsBlocked(): self.messageWritten.emit(msg) @staticmethod def stdout(): if not XStream._stdout: XStream._stdout = XStream() sys.stdout = XStream._stdout return XStream._stdout @staticmethod def stderr(): if not XStream._stderr: XStream._stderr = XStream() sys.stderr = XStream._stderr return XStream._stderr class MyBreakPoint(QtWidgets.QDialog): _stdout = None _stderr = None messageWritten = QtCore.pyqtSignal(str) HOOK_HEADER = '(Pdb) ' HOOK_INTERACT = '>>> ' HOOK_LINE_BREAK = '... ' HOOKS = [HOOK_HEADER, HOOK_INTERACT] BUTTONS = [ 'Continue', 'Next', 'Step', 'Where', 'Up', 'Down' ] def __init__(self, parent=None): super().__init__() if not parent: self.parentWidget = QtWidgets.QMainWindow() else: self.parentWidget = parent # Change Window Modality, otherwise parentWidget won't let you use this widget if self.parentWidget.windowModality() == QtCore.Qt.WindowModality.ApplicationModal: self.parentWidget.hide() self.parentWidget.setWindowModality(QtCore.Qt.WindowModality.NonModal) self.parentWidget.showNormal() # Set Icon if icon_file_path and os.path.exists(icon_file_path): self.setWindowIcon(QtGui.QIcon(icon_file_path)) # Set Flags self.setWindowFlags( QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint | QtCore.Qt.WindowCloseButtonHint ) # Resize self.resize(513, 300) # Create Layout self.main_layout = QtWidgets.QHBoxLayout() self.setLayout(self.main_layout) self.setWindowTitle("HelpUs {}".format(__version__)) # Create Content Layouts self.ConsoleLayout = QtWidgets.QVBoxLayout() self.ButtonsLayout = QtWidgets.QVBoxLayout() self.main_layout.addLayout(self.ButtonsLayout) self.main_layout.addLayout(self.ConsoleLayout) # Create OutputConsole self.console = QtWidgets.QTextEdit(parent) self.console.insertPlainText = self.__insert_plain_text self.console.keyPressEvent = self.__key_press_event self.ConsoleLayout.addWidget(self.console) # Create buttons for button_text in self.BUTTONS: # Create Button Name button_name = 'button_%s' % button_text.lower() setattr(self, button_name, QtWidgets.QPushButton(button_text)) getattr(self, button_name).clicked.connect(self.__push_button) # Add Button to Widget self.ButtonsLayout.addWidget(getattr(self, button_name)) # Init Buffer self.buffer = io.StringIO() self.__set_enable_gui(False) self.showNormal() def __set_enable_gui(self, state=True): """ :param state: :return: """ self.console.setEnabled(state) for button_text in self.BUTTONS: # Get Button Name button_name = 'button_%s' % button_text.lower() getattr(self, button_name).setEnabled(state) if state: self.console.setFocus() def redirect_outerr_stream(self): """ :return: """ # Link Stream Output XStream.stdout().messageWritten.connect(self.console.insertPlainText) XStream.stderr().messageWritten.connect(self.console.insertPlainText) def readline(self): """ :return: """ if not self.console.isEnabled(): self.__set_enable_gui(True) # Reset Buffer self.__reset_buffer() # Check Position while self.buffer.tell() == 0: QtCore.QCoreApplication.processEvents() value = self.buffer.getvalue() return value def __key_press_event(self, event): """ :param event: :return: """ # Get Last Line document = self.console.document() line_index = document.lineCount() raw_last_line = document.findBlockByLineNumber(line_index - 1).text() text = '' current_hook = '' # Exclude first 6 chars: (Pdb)\s if raw_last_line: for hook in self.HOOKS: if raw_last_line.startswith(hook): current_hook = hook text = raw_last_line[len(hook):] break else: text = raw_last_line # Get Cursor position line_from_zero = line_index - 1 current_cursor_line = self.console.textCursor().blockNumber() current_cursor_column = self.console.textCursor().columnNumber() # If Enter was pressed -> Process Expression if event.key() == QtCore.Qt.Key.Key_Return and text: # Consider Custom Clear Screen Command if text == 'cls': self.__clear_screen(raw_last_line) return # Replace Line Break with Enter if self.HOOK_LINE_BREAK == text: text = '\r\n' elif self.HOOK_LINE_BREAK in text: # Replace Line Break with tab text = text.replace(self.HOOK_LINE_BREAK, '\t') current_hook = self.HOOK_LINE_BREAK self.__reset_buffer() self.buffer.write(text) self.__set_enable_gui(False) # If User want to delete something and there is no value in buffer -> Reject if event.key() == QtCore.Qt.Key.Key_Backspace or event.key() == QtCore.Qt.Key.Key_Delete: if current_cursor_line != line_from_zero or current_cursor_column <= len(current_hook): return if event.key() == QtCore.Qt.Key.Key_Home and current_cursor_line == line_from_zero: if text: temp_cursor = self.console.textCursor() temp_cursor.movePosition( QtGui.QTextCursor.MoveOperation.StartOfLine, QtGui.QTextCursor.MoveMode.MoveAnchor ) temp_cursor.movePosition( QtGui.QTextCursor.MoveOperation.Right, QtGui.QTextCursor.MoveMode.MoveAnchor, len(current_hook) ) self.console.setTextCursor(temp_cursor) return # Set Console Text to Black self.console.setTextColor(QtCore.Qt.GlobalColor.black) # Execute default method QtWidgets.QTextEdit.keyPressEvent(self.console, event) def __push_button(self): # Read text from Button and use it as pdb keyword button_scope = self.sender().text().lower() self.__reset_buffer() self.buffer.write(button_scope) self.__set_enable_gui(False) def __reset_buffer(self): if isinstance(self.buffer, io.StringIO): # Clear Buffer self.buffer.truncate(0) self.buffer.seek(0) else: self.buffer = io.StringIO() def __insert_plain_text(self, message): # Do some stylistics if message.startswith(self.HOOK_HEADER): self.console.setTextColor(QtCore.Qt.GlobalColor.magenta) QtWidgets.QTextEdit.insertPlainText(self.console, message) return elif message.startswith(self.HOOK_INTERACT): self.console.setTextColor(QtCore.Qt.GlobalColor.darkMagenta) QtWidgets.QTextEdit.insertPlainText(self.console, message) return if message.startswith('***'): self.console.setTextColor(QtCore.Qt.GlobalColor.red) QtWidgets.QTextEdit.insertPlainText(self.console, message) # AutoScroll self.console.verticalScrollBar().setValue(self.console.verticalScrollBar().maximum()) def __clear_screen(self, text): current_hook = text for hook in self.HOOKS: if hook in current_hook: current_hook = hook break self.console.clear() self.console.insertPlainText(current_hook) def get_qtconsole_object(): if isinstance(sys.stdin, MyBreakPoint): return sys.stdin.console else: return MyBreakPoint.console def setup_breakpoint_hook(parent, method, redirect_streams=False): def __method(*args, **kwargs): breakpoint() return method(*args, **kwargs) if not isinstance(sys.stdin, MyBreakPoint): sys.stdin = MyBreakPoint(parent) else: # Restore Streams sys.stdin = sys.__stdin__ sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ raise Exception( "Multiple Instances are not allowed. Can be possible, but I'm to lazy to go deep with development." ) if redirect_streams: sys.stdin.redirect_outerr_stream() return __method if __name__ == '__main__': p = QtWidgets.QApplication(sys.argv) LOGGER.error('Ceva') LOGGER.error = setup_breakpoint_hook(None, LOGGER.error, redirect_streams=True) # LOGGER.error = setup_breakpoint_hook(None, LOGGER.error, redirect_streams=True) x = 90 LOGGER.error('Altceva') print(x)
31.015773
111
0.610151
8,497
0.864219
0
0
442
0.044955
0
0
1,260
0.128153
a3f6dd9ede6bbc22ab1ea49e8e955647bc30a83d
2,206
py
Python
biothings/hub/dataindex/indexer_schedule.py
newgene/biothings.api
e3278695ac15a55fe420aa49c464946f81ec019d
[ "Apache-2.0" ]
30
2017-07-23T14:50:29.000Z
2022-02-08T08:08:16.000Z
biothings/hub/dataindex/indexer_schedule.py
kevinxin90/biothings.api
8ff3bbaecd72d04db4933ff944898ee7b7c0e04a
[ "Apache-2.0" ]
163
2017-10-24T18:45:40.000Z
2022-03-28T03:46:26.000Z
biothings/hub/dataindex/indexer_schedule.py
newgene/biothings.api
e3278695ac15a55fe420aa49c464946f81ec019d
[ "Apache-2.0" ]
22
2017-06-12T18:30:15.000Z
2022-03-01T18:10:47.000Z
import math class Schedule(): def __init__(self, total, batch_size): self._batch_size = batch_size self._state = "" self.total = total self.scheduled = 0 self.finished = 0 @property def _batch(self): return math.ceil(self.scheduled / self._batch_size) @property def _batches(self): return math.ceil(self.total / self._batch_size) @property def _percentage(self): _percentage = self.scheduled / self.total * 100 return "%.1f%%" % _percentage def suffix(self, string): return " ".join(( string, "#%d/%d %s" % ( self._batch, self._batches, self._percentage ) )) def completed(self): if self.finished != self.total: raise ValueError(self.finished, self.total) def __iter__(self): return self def __next__(self): if self.scheduled >= self.total: self._state = "pending, waiting for completion," raise StopIteration() self.scheduled += self._batch_size if self.scheduled > self.total: self.scheduled = self.total self._state = self.suffix("running, on batch") + "," return self._batch def __str__(self): return " ".join(f""" <Schedule {"done" if self.finished >= self.total else self._state} total={self.total} scheduled={self.scheduled} finished={self.finished}> """.split()) def test_01(): schedule = Schedule(100, 10) for batch in schedule: print(batch) print(schedule) def test_02(): schedule = Schedule(25, 10) for batch in schedule: print(batch) print(schedule) print(schedule.suffix("Task")) def test_03(): schedule = Schedule(0, 10) for batch in schedule: print(batch) print(schedule) print(schedule.suffix("Task")) def test_04(): schedule = Schedule(1, 10) for batch in schedule: print(batch) print(schedule) print(schedule.suffix("Task")) if __name__ == "__main__": test_02()
24.241758
87
0.56165
1,552
0.703536
0
0
310
0.140526
0
0
298
0.135086
a3f7032251ab8fdd92446dda433cb7125e3c866d
447
py
Python
examples/py/async-basic.py
voBits/ccxt
edd2dd92053bd06232769a63465a43912b21eda0
[ "MIT" ]
73
2018-05-15T00:53:50.000Z
2022-03-07T14:45:11.000Z
examples/py/async-basic.py
voBits/ccxt
edd2dd92053bd06232769a63465a43912b21eda0
[ "MIT" ]
46
2020-01-06T07:32:19.000Z
2021-07-26T06:33:33.000Z
examples/py/async-basic.py
voBits/ccxt
edd2dd92053bd06232769a63465a43912b21eda0
[ "MIT" ]
11
2018-05-15T00:09:30.000Z
2022-03-07T14:45:27.000Z
# -*- coding: utf-8 -*- import asyncio import os import sys root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(root + '/python') import ccxt.async as ccxt # noqa: E402 async def test_gdax(): gdax = ccxt.gdax() markets = await gdax.load_markets() await gdax.close() return markets if __name__ == '__main__': print(asyncio.get_event_loop().run_until_complete(test_gdax()))
21.285714
83
0.695749
0
0
0
0
0
0
127
0.284116
54
0.120805
a3f8263738f1248425bd621ee69178be5e83cbe4
19
py
Python
pymclevel/test/__init__.py
bennettdc/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
[ "0BSD" ]
673
2015-01-02T02:08:13.000Z
2022-03-24T19:38:14.000Z
pymclevel/test/__init__.py
bennettdc/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
[ "0BSD" ]
551
2015-01-01T02:36:53.000Z
2018-02-01T00:03:12.000Z
pymclevel/test/__init__.py
bennettdc/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
[ "0BSD" ]
231
2015-01-01T16:47:30.000Z
2022-03-31T21:51:55.000Z
__author__ = 'Rio'
9.5
18
0.684211
0
0
0
0
0
0
0
0
5
0.263158
a3f86c1b680627a4f18d2261e3c26090baebd672
261
py
Python
xview/datasets/wrapper.py
ethz-asl/modular_semantic_segmentation
7c950f24df11540a7ddae4ff806d5b31934a3210
[ "BSD-3-Clause" ]
20
2018-08-01T15:02:59.000Z
2021-04-19T07:22:17.000Z
xview/datasets/wrapper.py
davesean/modular_semantic_segmentation
5f9e34243915b862e8fef5e6195f1e29f4cebf50
[ "BSD-3-Clause" ]
null
null
null
xview/datasets/wrapper.py
davesean/modular_semantic_segmentation
5f9e34243915b862e8fef5e6195f1e29f4cebf50
[ "BSD-3-Clause" ]
9
2018-08-01T15:03:03.000Z
2019-12-17T05:12:48.000Z
from abc import ABCMeta, abstractmethod class DataWrapper: """Interface for access to datasets.""" __metaclass__ = ABCMeta @abstractmethod def next(self): """Returns next minibatch for training.""" return NotImplementedError
20.076923
50
0.685824
218
0.835249
0
0
121
0.463602
0
0
81
0.310345
a3f937683bc5952ca13a05b1c4f5742ed9f21027
2,307
py
Python
partd/core.py
jrbourbeau/partd
74016a296a760de9c7a0e0d4b012a3478c9a0831
[ "BSD-3-Clause" ]
2
2018-12-29T13:47:40.000Z
2018-12-29T13:47:49.000Z
partd/core.py
jrbourbeau/partd
74016a296a760de9c7a0e0d4b012a3478c9a0831
[ "BSD-3-Clause" ]
2
2021-05-11T16:00:55.000Z
2021-08-23T20:45:22.000Z
partd/core.py
jrbourbeau/partd
74016a296a760de9c7a0e0d4b012a3478c9a0831
[ "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import import os import shutil import locket import string from toolz import memoize from contextlib import contextmanager from .utils import nested_get, flatten # http://stackoverflow.com/questions/295135/turn-a-string-into-a-valid-filename-in-python valid_chars = "-_.() " + string.ascii_letters + string.digits + os.path.sep def escape_filename(fn): """ Escape text so that it is a valid filename >>> escape_filename('Foo!bar?') 'Foobar' """ return ''.join(filter(valid_chars.__contains__, fn)) def filename(path, key): return os.path.join(path, escape_filename(token(key))) def token(key): """ >>> token('hello') 'hello' >>> token(('hello', 'world')) # doctest: +SKIP 'hello/world' """ if isinstance(key, str): return key elif isinstance(key, tuple): return os.path.join(*map(token, key)) else: return str(key) class Interface(object): def __init__(self): self._iset_seen = set() def __setstate__(self, state): self.__dict__.update(state) self._iset_seen = set() def iset(self, key, value, **kwargs): if key in self._iset_seen: return else: self._iset(key, value, **kwargs) self._iset_seen.add(key) def __enter__(self): return self def __exit__(self, type, value, traceback): self.drop() def iget(self, key): return self._get([key], lock=False)[0] def get(self, keys, **kwargs): if not isinstance(keys, list): return self.get([keys], **kwargs)[0] elif any(isinstance(key, list) for key in keys): # nested case flatkeys = list(flatten(keys)) result = self.get(flatkeys, **kwargs) return nested_get(keys, dict(zip(flatkeys, result))) else: return self._get(keys, **kwargs) def delete(self, keys, **kwargs): if not isinstance(keys, list): return self._delete([keys], **kwargs) else: return self._delete(keys, **kwargs) def pop(self, keys, **kwargs): with self.partd.lock: result = self.partd.get(keys, lock=False) self.partd.delete(keys, lock=False) return result
24.806452
89
0.604681
1,358
0.588643
0
0
0
0
0
0
334
0.144777
a3f93f71de692d828156d343cfeb58d0babb5f0e
1,248
py
Python
pretraining/model_ensemble.py
VITA-Group/Adv-SS-Pretraining
4ffbebea582f858ec6165f082f52ded1fc9b817d
[ "MIT" ]
32
2020-08-31T01:28:29.000Z
2022-03-19T05:40:05.000Z
pretraining/model_ensemble.py
VITA-Group/Adv-SS-Pretraining
4ffbebea582f858ec6165f082f52ded1fc9b817d
[ "MIT" ]
null
null
null
pretraining/model_ensemble.py
VITA-Group/Adv-SS-Pretraining
4ffbebea582f858ec6165f082f52ded1fc9b817d
[ "MIT" ]
7
2020-09-19T14:03:47.000Z
2020-12-10T00:42:08.000Z
''' model ensemble for cifar10 // input size(32,32) ''' import torch import torchvision import copy import torch.nn as nn from resnetv2 import ResNet50 as resnet50v2 def split_resnet50(model): return nn.Sequential( model.conv1, model.layer1, model.layer2, model.layer3 ) class PretrainEnsembleModel(nn.Module): def __init__(self): super(PretrainEnsembleModel, self).__init__() self.blocks = split_resnet50(resnet50v2()) self.layer4_rotation = resnet50v2().layer4 self.layer4_jigsaw = resnet50v2().layer4 self.fc_rotation = nn.Linear(2048, 4) self.fc_jigsaw = nn.Linear(2048, 31) self.avgpool1 = nn.AdaptiveAvgPool2d((1,1)) self.avgpool2 = nn.AdaptiveAvgPool2d((1,1)) self.avgpool3 = nn.AdaptiveAvgPool2d((1,1)) def _Normal(self,x): mean=torch.Tensor([0.485, 0.456, 0.406]) mean=mean[None,:,None,None].cuda() std = torch.Tensor([0.229, 0.224, 0.225]) std = std[None,:,None,None].cuda() return x.sub(mean).div(std) def forward(self, x): feature_map = self.blocks(self._Normal(x)) return feature_map
23.54717
54
0.604167
905
0.72516
0
0
0
0
0
0
58
0.046474
a3fa8d70909c40648b30be357bd41df712b21d5f
1,519
py
Python
scripts/ccdf.py
glciampaglia/HoaxyBots
db8d2b7d9927d5d4d94ded125f9785590dace906
[ "MIT" ]
null
null
null
scripts/ccdf.py
glciampaglia/HoaxyBots
db8d2b7d9927d5d4d94ded125f9785590dace906
[ "MIT" ]
null
null
null
scripts/ccdf.py
glciampaglia/HoaxyBots
db8d2b7d9927d5d4d94ded125f9785590dace906
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Function that implement Complement the Complementary Cumulative Distribution Function (CCDF). """ # # written by Chengcheng Shao <[email protected]> import numpy as np import pandas as pd def ccdf(s): """ Parameters: `s`, series, the values of s should be variable to be handled Return: a new series `s`, index of s will be X axis (number), value of s will be Y axis (probability) """ s = s.copy() s = s.sort_values(ascending=True, inplace=False) s.reset_index(drop=True, inplace=True) n = len(s) s.drop_duplicates(keep='first', inplace=True) X = s.values Y = [n - i for i in s.index] return pd.Series(data=Y, index=X) / n def sum_cdf(s): s = s.copy() s = s.value_counts() s = s.sort_index(ascending=True) cumulative = [] for i in range(len(s)): s0 = s.iloc[:i + 1] cumulative.append(np.inner(s0.index, s0.values)) s = pd.Series(cumulative, index=s.index) return s / s.max() def sum_ccdf(s): """ Parameters: `s`, series, the values of s should be variable to be handled Return: a news series `s`, index of s will be X axis (number), values will be Y axis (sum(X>=x)) """ s = s.copy() s = s.value_counts() s = s.sort_index(ascending=True) cumulative = [] for i in range(len(s)): s1 = s.iloc[i:] cumulative.append(np.inner(s1.index, s1.values)) return pd.Series(cumulative, index=s.index)
25.745763
72
0.601712
0
0
0
0
0
0
0
0
613
0.403555
a3fac0d6ce92c1972de436f2ce748bbe19017407
5,335
py
Python
lifelines/fitters/kaplan_meier_fitter.py
eliracho37/lifelines
b1c6c2732d1ccfc2ae08f7178371d0f95ae3027b
[ "MIT" ]
null
null
null
lifelines/fitters/kaplan_meier_fitter.py
eliracho37/lifelines
b1c6c2732d1ccfc2ae08f7178371d0f95ae3027b
[ "MIT" ]
null
null
null
lifelines/fitters/kaplan_meier_fitter.py
eliracho37/lifelines
b1c6c2732d1ccfc2ae08f7178371d0f95ae3027b
[ "MIT" ]
1
2020-05-06T14:46:25.000Z
2020-05-06T14:46:25.000Z
# -*- coding: utf-8 -*- from __future__ import print_function import numpy as np import pandas as pd from lifelines.fitters import UnivariateFitter from lifelines.utils import _preprocess_inputs, _additive_estimate, StatError, inv_normal_cdf,\ median_survival_times from lifelines.plotting import plot_loglogs class KaplanMeierFitter(UnivariateFitter): """ Class for fitting the Kaplan-Meier estimate for the survival function. KaplanMeierFitter( alpha=0.95) alpha: The alpha value associated with the confidence intervals. """ def fit(self, durations, event_observed=None, timeline=None, entry=None, label='KM_estimate', alpha=None, left_censorship=False, ci_labels=None): """ Parameters: duration: an array, or pd.Series, of length n -- duration subject was observed for timeline: return the best estimate at the values in timelines (postively increasing) event_observed: an array, or pd.Series, of length n -- True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None entry: an array, or pd.Series, of length n -- relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population were born at time 0. label: a string to name the column of the estimate. alpha: the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. left_censorship: True if durations and event_observed refer to left censorship events. Default False ci_labels: add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> Returns: self, with new properties like 'survival_function_'. """ # if the user is interested in left-censorship, we return the cumulative_density_, no survival_function_, estimate_name = 'survival_function_' if not left_censorship else 'cumulative_density_' v = _preprocess_inputs(durations, event_observed, timeline, entry) self.durations, self.event_observed, self.timeline, self.entry, self.event_table = v self._label = label alpha = alpha if alpha else self.alpha log_survival_function, cumulative_sq_ = _additive_estimate(self.event_table, self.timeline, self._additive_f, self._additive_var, left_censorship) if entry is not None: # a serious problem with KM is that when the sample size is small and there are too few early # truncation times, it may happen that is the number of patients at risk and the number of deaths is the same. # we adjust for this using the Breslow-Fleming-Harrington estimator n = self.event_table.shape[0] net_population = (self.event_table['entrance'] - self.event_table['removed']).cumsum() if net_population.iloc[:int(n / 2)].min() == 0: ix = net_population.iloc[:int(n / 2)].argmin() raise StatError("""There are too few early truncation times and too many events. S(t)==0 for all t>%.1f. Recommend BreslowFlemingHarringtonFitter.""" % ix) # estimation setattr(self, estimate_name, pd.DataFrame(np.exp(log_survival_function), columns=[self._label])) self.__estimate = getattr(self, estimate_name) self.confidence_interval_ = self._bounds(cumulative_sq_[:, None], alpha, ci_labels) self.median_ = median_survival_times(self.__estimate, left_censorship=left_censorship) # estimation methods self.predict = self._predict(estimate_name, label) self.subtract = self._subtract(estimate_name) self.divide = self._divide(estimate_name) # plotting functions self.plot = self._plot_estimate(estimate_name) setattr(self, "plot_" + estimate_name, self.plot) self.plot_loglogs = plot_loglogs(self) return self def _bounds(self, cumulative_sq_, alpha, ci_labels): # See http://courses.nus.edu.sg/course/stacar/internet/st3242/handouts/notes2.pdf alpha2 = inv_normal_cdf((1. + alpha) / 2.) df = pd.DataFrame(index=self.timeline) v = np.log(self.__estimate.values) if ci_labels is None: ci_labels = ["%s_upper_%.2f" % (self._label, alpha), "%s_lower_%.2f" % (self._label, alpha)] assert len(ci_labels) == 2, "ci_labels should be a length 2 array." df[ci_labels[0]] = np.exp(-np.exp(np.log(-v) + alpha2 * np.sqrt(cumulative_sq_) / v)) df[ci_labels[1]] = np.exp(-np.exp(np.log(-v) - alpha2 * np.sqrt(cumulative_sq_) / v)) return df def _additive_f(self, population, deaths): np.seterr(invalid='ignore', divide='ignore') return (np.log(population - deaths) - np.log(population)) def _additive_var(self, population, deaths): np.seterr(divide='ignore') return (1. * deaths / (population * (population - deaths))).replace([np.inf], 0)
51.298077
171
0.662418
5,017
0.940394
0
0
0
0
0
0
2,296
0.430366
a3fad8e543716f6089f3ea4419938d3d14b1b941
8,498
py
Python
pydmfet/qcwrap/pyscf_rhf.py
fishjojo/pydmfe
93cfc655314933d3531b5733521a1f95a044f6cb
[ "MIT" ]
3
2021-02-26T06:26:00.000Z
2022-02-20T08:58:20.000Z
pydmfet/qcwrap/pyscf_rhf.py
fishjojo/pydmfet
93cfc655314933d3531b5733521a1f95a044f6cb
[ "MIT" ]
null
null
null
pydmfet/qcwrap/pyscf_rhf.py
fishjojo/pydmfet
93cfc655314933d3531b5733521a1f95a044f6cb
[ "MIT" ]
null
null
null
import numpy as np from pydmfet import tools from .fermi import find_efermi, entropy_corr from pyscf import ao2mo, gto, scf, dft, lib from pydmfet.qcwrap import fermi import time from functools import reduce def scf_oei( OEI, Norb, Nelec, smear_sigma = 0.0): OEI = 0.5*(OEI.T + OEI) eigenvals, eigenvecs = np.linalg.eigh( OEI ) idx = np.argmax(abs(eigenvecs), axis=0) eigenvecs[:,eigenvecs[ idx, np.arange(len(eigenvals)) ]<0] *= -1 Nocc = Nelec//2 #closed shell e_homo = eigenvals[Nocc-1] e_lumo = eigenvals[Nocc] print ('HOMO: ', e_homo, 'LUMO: ', e_lumo) print ("mo_energy:") print (eigenvals[:Nocc+5]) e_fermi = e_homo mo_occ = np.zeros((Norb)) if(smear_sigma < 1e-8): #T=0 mo_occ[:Nocc] = 1.0 else: #finite T e_fermi, mo_occ = find_efermi(eigenvals, smear_sigma, Nocc, Norb) mo_occ*=2.0 #closed shell Ne_error = np.sum(mo_occ) - Nelec if(Ne_error > 1e-8): print ('Ne error = ', Ne_error) print ("fermi energy: ", e_fermi) np.set_printoptions(precision=4) flag = mo_occ > 1e-4 print (mo_occ[flag]) np.set_printoptions() RDM1 = reduce(np.dot, (eigenvecs, np.diag(mo_occ), eigenvecs.T)) RDM1 = (RDM1.T + RDM1)/2.0 energy = np.trace(np.dot(RDM1,OEI)) es = entropy_corr(mo_occ, smear_sigma) print ('entropy correction: ', es) energy += es print ('e_tot = ', energy) return ( energy, RDM1, eigenvecs, eigenvals, mo_occ ) # The following is deprecated! class scf_pyscf(): ''' subspace scf wrapper for scf module of pyscf ''' def __init__(self, Ne, Norb, mol=None, oei=None, tei=None, ovlp=1, dm0=None, coredm=0, ao2sub=None, mf_method='HF'): self.mol = mol self.Ne = Ne self.Norb = Norb self.method = mf_method self.oei = oei self.tei = tei self.ovlp = ovlp self.dm0 = dm0 self.coredm = coredm self.ao2sub = ao2sub self.method = mf_method.lower() self.mf = None if(self.mol is None): #what molecule does not matter self.mol = gto.Mole() self.mol.build( verbose=0 ) self.mol.atom.append(('C', (0, 0, 0))) #adjust number of electrons self.mol.nelectron = Ne if(self.tei is not None): self.mol.incore_anyway = True if(self.method == 'hf'): self.mf = scf.RHF(self.mol) self.prep_rhf() else: self.mf = scf.RKS(self.mol) self.mf.xc = self.method self.prep_rhf() self.prep_rks() self.elec_energy = 0.0 self.rdm1 = None self.mo_coeff = None self.mo_energy = None self.mo_occ = None def prep_rhf(self): if(self.ovlp == 1): self.mf.get_ovlp = lambda *args: np.eye( self.Norb ) if(self.oei is not None): self.mf.get_hcore = lambda *args: self.oei if(self.tei is not None): self.mf._eri = ao2mo.restore(8, self.tei, self.Norb) def prep_rks(self): if(self.ao2sub is None): return #overload dft.rks.get_veff if necessary self.mf.get_veff = get_veff_rks_decorator(self.ao2sub, self.coredm) def kernel(self): self.mf.kernel(self.dm0) if ( self.mf.converged == False ): raise Exception("scf not converged!") rdm1 = self.mf.make_rdm1() self.rdm1 = 0.5*(rdm1.T + rdm1) self.elec_energy = self.mf.energy_elec(self.rdm1)[0] self.mo_coeff = self.mf.mo_coeff self.mo_energy = self.mf.mo_energy self.mo_occ = self.mf.mo_occ def get_veff_rks_decorator(ao2sub, coredm): def get_veff(ks, mol=None, dm=None, dm_last=0, vhf_last=0, hermi=1): if mol is None: mol = ks.mol if dm is None: dm = ks.make_rdm1() dm_sub = np.asarray(dm) + coredm dm_ao = tools.dm_sub2ao(dm_sub, ao2sub) if hasattr(dm, 'mo_coeff'): mo_coeff_sub = dm.mo_coeff mo_occ_sub = dm.mo_occ mo_coeff_ao = tools.mo_sub2ao(mo_coeff_sub, ao2sub) mo_occ_ao = mo_occ_sub dm_ao = lib.tag_array(dm_ao, mo_coeff=mo_coeff_ao, mo_occ=mo_occ_ao) n, exc, vxc_ao, hyb = get_vxc(ks, mol, dm_ao) vxc = tools.op_ao2sub(vxc_ao, ao2sub) vj = None vk = None if abs(hyb) < 1e-10: if (ks._eri is None and ks.direct_scf and getattr(vhf_last, 'vj', None) is not None): ddm = numpy.asarray(dm) - numpy.asarray(dm_last) vj = ks.get_jk(mol, ddm, hermi)[0] vj += vhf_last.vj else: vj = ks.get_jk(mol, dm, hermi)[0] vxc += vj else: if (ks._eri is None and ks.direct_scf and getattr(vhf_last, 'vk', None) is not None): ddm = numpy.asarray(dm) - numpy.asarray(dm_last) vj, vk = ks.get_jk(mol, ddm, hermi) vj += vhf_last.vj vk += vhf_last.vk else: vj, vk = ks.get_jk(mol, dm, hermi) vxc += vj - vk * (hyb * .5) exc -= np.einsum('ij,ji', dm, vk) * .5 * hyb*.5 ecoul = np.einsum('ij,ji', dm, vj) * .5 vxc = lib.tag_array(vxc, ecoul=ecoul, exc=exc, vj=vj, vk=vk) return vxc return get_veff def get_vxc(ks, mol, dm, hermi=1): ground_state = (isinstance(dm, numpy.ndarray) and dm.ndim == 2) if(not ground_state): raise Exception("fatal error") if ks.grids.coords is None: ks.grids.build(with_non0tab=True) if ks.small_rho_cutoff > 1e-20 and ground_state: # Filter grids the first time setup grids t0 = (time.clock(), time.time()) ks.grids = dft.rks.prune_small_rho_grids_(ks, mol, dm, ks.grids) t1 = tools.timer("prune grid",t0) if hermi == 2: # because rho = 0 n, exc, vxc = 0, 0, 0 else: n, exc, vxc = ks._numint.nr_rks(mol, ks.grids, ks.xc, dm) hyb = ks._numint.hybrid_coeff(ks.xc, spin=mol.spin) return n, exc, vxc, hyb ''' def rhf(mol, OEI, TEI, Norb, Nelec, OneDM0=None ): # Get the RHF solution OEI = 0.5*(OEI.T + OEI) #mol = gto.Mole() #mol.max_memory = 8000 #mol.build( verbose=0 ) #mol.atom.append(('C', (0, 0, 0))) mol.nelectron = Nelec mol.incore_anyway = True mf = pyscf_scf.RHF( mol ) mf.get_hcore = lambda *args: OEI mf.get_ovlp = lambda *args: np.eye( Norb ) mf._eri = ao2mo.restore(8, TEI, Norb) mf.max_cycle = 100 #mf.conv_tol = 1e-8 #adiis = pyscf_scf.diis.ADIIS() #mf.diis = adiis #mf.verbose = 5 mf.kernel(OneDM0) if ( mf.converged == False ): #RDM1 = mf.make_rdm1() #cdiis = pyscf_scf.diis.SCF_DIIS() #mf.diis = cdiis #mf.max_cycle = 200 #mf.kernel(RDM1) if ( mf.converged == False ): raise Exception(" rhf not converged!") return mf def rks(mol, OEI, TEI, Norb, Nelec, xcfunc, OneDM0=None ): # Get the RKS solution OEI = 0.5*(OEI.T + OEI) #mol = gto.Mole() #mol.build( verbose=5 ) #mol.atom.append(('C', (0, 0, 0))) mol.nelectron = Nelec # mol.incore_anyway = True mf = pyscf_scf.RKS( mol ) mf.xc = xcfunc.lower() # mf.get_hcore = lambda *args: OEI # mf.get_ovlp = lambda *args: np.eye( Norb ) # mf._eri = ao2mo.restore(8, TEI, Norb) OneDM0 = None mf.kernel( OneDM0 ) if ( mf.converged == False ): raise Exception(" rks not converged!") return mf def scf(mol, OEI, TEI, Norb, Nelec, OneDM0=None, mf_method = 'HF' ): # Get the mean-field solution if(mf_method.lower() == 'hf'): mf = rhf(mol, OEI, TEI, Norb, Nelec, OneDM0 ) else: mf = rks(mol, OEI, TEI, Norb, Nelec, mf_method ,OneDM0 ) RDM1 = mf.make_rdm1() RDM1 = 0.5*(RDM1.T + RDM1) mo_coeff = mf.mo_coeff mo_energy = mf.mo_energy energy = mf.energy_elec(RDM1)[0] mo = np.zeros([Norb,Norb+1],dtype=float) mo[:,:-1] = mo_coeff mo[:,-1] = mo_energy #print "mo energy" #print mf.mo_energy #tools.MatPrint(mf.get_fock(),"fock") #JK = mf.get_veff(None, dm=RDM1) #tools.MatPrint(JK,"JK") #tools.MatPrint(np.dot(mf.get_fock(), mf.mo_coeff),"test") #tools.MatPrint(mf.mo_coeff,"mo_coeff") return (energy, RDM1, mo) '''
27.324759
120
0.567075
2,198
0.258649
0
0
0
0
0
0
2,751
0.323723
a3fb5ed7db5f1e3f6fa910bcda6e0eacb2e1cb30
2,470
py
Python
backends/fortify/summarize-fortify.py
tautschnig/one-line-scan
24e1deedd595e3406eb8d5c69ff9629c5a87d0aa
[ "Apache-2.0" ]
16
2018-05-21T09:53:01.000Z
2022-02-15T08:48:11.000Z
backends/fortify/summarize-fortify.py
tautschnig/one-line-scan
24e1deedd595e3406eb8d5c69ff9629c5a87d0aa
[ "Apache-2.0" ]
6
2018-07-09T06:13:05.000Z
2020-11-11T14:49:35.000Z
backends/fortify/summarize-fortify.py
tautschnig/one-line-scan
24e1deedd595e3406eb8d5c69ff9629c5a87d0aa
[ "Apache-2.0" ]
5
2019-05-27T19:51:48.000Z
2021-12-14T13:04:49.000Z
#!/usr/bin/env python # # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. # A copy of the License is located at # # http://www.apache.org/licenses/LICENSE-2.0 # # or in the "license" file accompanying this file. This file is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. # # Parse the report.html of Fortify and create an ASCII summary import os import sys from subprocess import call from xml.etree import ElementTree # print usage if len(sys.argv) != 2: print "usage summarizy-fortify.py LOGDIR" sys.exit(1) # get directory where the logs are placed logdir=sys.argv[1] # strip this part of the directory information of workdirectory = os.getcwd() + '/' # get the fortify report; first make it valid XML filename=logdir+'/log/report.html' call(['perl', '-p', '-i', '-e', 's#<((img|meta) [^>]+)>#<$1/>#', filename]) # make sure we can run this script multiple times on the same html file call(['perl', '-p', '-i', '-e', 's#//>#/>#', filename]) # parse the html file and jump to the last table data=ElementTree.parse(filename).getroot() table=data.find('.//table')[-1] # iterate over all rows and print their content in a more useable format for data in table.iter('tr'): # handle only the rows that contain results if len(data) != 4: continue # extract file information, convert absolute path into relative one location=data[2].find('a') # header does not have <a ...> if location is None: continue filename=location.get('href') filename=filename.replace('file://','') filename=filename.replace(workdirectory,'') severity=data[3].text if severity is None: severity=data[3].find('span').text # strip newline and space sequences problem=data[0].text.replace('\n','').replace('\r','') short=problem.replace(' ',' ') while len(short) < len(problem): problem=short short=problem.replace(' ',' ') column=ElementTree.tostring(data[2].findall("*")[0]).split(':')[2] printstring = filename + ':' + column.strip() + ', ' + \ severity.strip() + ', ' + \ problem if data[1].text is not None: printstring = printstring + ', ' + data[1].text print printstring
33.835616
77
0.681781
0
0
0
0
0
0
0
0
1,402
0.567611
a3fc78d36ccfb5728f04880a3739b99e0d64d7a7
91,209
py
Python
angr/procedures/definitions/win32_wsmsvc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_wsmsvc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_wsmsvc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
# pylint:disable=line-too-long import logging from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64 from .. import SIM_PROCEDURES as P from . import SimLibrary _l = logging.getLogger(name=__name__) lib = SimLibrary() lib.set_default_cc('X86', SimCCStdcall) lib.set_default_cc('AMD64', SimCCMicrosoftAMD64) lib.set_library_names("wsmsvc.dll") prototypes = \ { # 'WSManInitialize': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_API", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["flags", "apiHandle"]), # 'WSManDeinitialize': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_API", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["apiHandle", "flags"]), # 'WSManGetErrorMessage': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_API", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["apiHandle", "flags", "languageCode", "errorCode", "messageLength", "message", "messageLengthUsed"]), # 'WSManCreateSession': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_API", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"authenticationMechanism": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"userAccount": SimStruct({"username": SimTypePointer(SimTypeChar(label="Char"), offset=0), "password": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_USERNAME_PASSWORD_CREDS", pack=False, align=None), "certificateThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="<anon>", label="None")}, name="WSMAN_AUTHENTICATION_CREDENTIALS", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"accessType": SimTypeInt(signed=False, label="UInt32"), "authenticationCredentials": SimStruct({"authenticationMechanism": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"userAccount": SimStruct({"username": SimTypePointer(SimTypeChar(label="Char"), offset=0), "password": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_USERNAME_PASSWORD_CREDS", pack=False, align=None), "certificateThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="<anon>", label="None")}, name="WSMAN_AUTHENTICATION_CREDENTIALS", pack=False, align=None)}, name="WSMAN_PROXY_INFO", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["apiHandle", "connection", "flags", "serverAuthenticationCredentials", "proxyInfo", "session"]), # 'WSManCloseSession': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["session", "flags"]), # 'WSManSetSessionOption': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="WSManSessionOption"), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["session", "option", "data"]), # 'WSManGetSessionOptionAsDword': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="WSManSessionOption"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["session", "option", "value"]), # 'WSManGetSessionOptionAsString': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="WSManSessionOption"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["session", "option", "stringLength", "string", "stringLengthUsed"]), # 'WSManCloseOperation': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["operationHandle", "flags"]), # 'WSManCreateShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_wsman_L665_C48": SimStruct({"inputStreamSet": SimTypePointer(SimStruct({"streamIDsCount": SimTypeInt(signed=False, label="UInt32"), "streamIDs": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_STREAM_ID_SET", pack=False, align=None), offset=0), "outputStreamSet": SimTypePointer(SimStruct({"streamIDsCount": SimTypeInt(signed=False, label="UInt32"), "streamIDs": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_STREAM_ID_SET", pack=False, align=None), offset=0), "idleTimeoutMs": SimTypeInt(signed=False, label="UInt32"), "workingDirectory": SimTypePointer(SimTypeChar(label="Char"), offset=0), "variableSet": SimTypePointer(SimStruct({"varsCount": SimTypeInt(signed=False, label="UInt32"), "vars": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ENVIRONMENT_VARIABLE", pack=False, align=None), offset=0)}, name="WSMAN_ENVIRONMENT_VARIABLE_SET", pack=False, align=None), offset=0)}, name="WSMAN_SHELL_STARTUP_INFO_V10", pack=False, align=None), "name": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SHELL_STARTUP_INFO_V11", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["session", "flags", "resourceUri", "startupInfo", "options", "createXml", "async", "shell"]), # 'WSManRunShellCommand': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"argsCount": SimTypeInt(signed=False, label="UInt32"), "args": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_COMMAND_ARG_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "flags", "commandLine", "args", "options", "async", "command"]), # 'WSManSignalShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "command", "flags", "code", "async", "signalOperation"]), # 'WSManReceiveShellOutput': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"streamIDsCount": SimTypeInt(signed=False, label="UInt32"), "streamIDs": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_STREAM_ID_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "command", "flags", "desiredStreamSet", "async", "receiveOperation"]), # 'WSManSendShellInput': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "command", "flags", "streamId", "streamData", "endOfStream", "async", "sendOperation"]), # 'WSManCloseCommand': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["commandHandle", "flags", "async"]), # 'WSManCloseShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["shellHandle", "flags", "async"]), # 'WSManCreateShellEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_wsman_L665_C48": SimStruct({"inputStreamSet": SimTypePointer(SimStruct({"streamIDsCount": SimTypeInt(signed=False, label="UInt32"), "streamIDs": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_STREAM_ID_SET", pack=False, align=None), offset=0), "outputStreamSet": SimTypePointer(SimStruct({"streamIDsCount": SimTypeInt(signed=False, label="UInt32"), "streamIDs": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_STREAM_ID_SET", pack=False, align=None), offset=0), "idleTimeoutMs": SimTypeInt(signed=False, label="UInt32"), "workingDirectory": SimTypePointer(SimTypeChar(label="Char"), offset=0), "variableSet": SimTypePointer(SimStruct({"varsCount": SimTypeInt(signed=False, label="UInt32"), "vars": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ENVIRONMENT_VARIABLE", pack=False, align=None), offset=0)}, name="WSMAN_ENVIRONMENT_VARIABLE_SET", pack=False, align=None), offset=0)}, name="WSMAN_SHELL_STARTUP_INFO_V10", pack=False, align=None), "name": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SHELL_STARTUP_INFO_V11", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["session", "flags", "resourceUri", "shellId", "startupInfo", "options", "createXml", "async", "shell"]), # 'WSManRunShellCommandEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"argsCount": SimTypeInt(signed=False, label="UInt32"), "args": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="WSMAN_COMMAND_ARG_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "flags", "commandId", "commandLine", "args", "options", "async", "command"]), # 'WSManDisconnectShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"idleTimeoutMs": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_SHELL_DISCONNECT_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "flags", "disconnectInfo", "async"]), # 'WSManReconnectShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "flags", "async"]), # 'WSManReconnectShellCommand': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["commandHandle", "flags", "async"]), # 'WSManConnectShell': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SESSION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["session", "flags", "resourceUri", "shellID", "options", "connectXml", "async", "shell"]), # 'WSManConnectShellCommand': SimTypeFunction([SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"operationContext": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "completionFunction": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"code": SimTypeInt(signed=False, label="UInt32"), "errorDetail": SimTypePointer(SimTypeChar(label="Char"), offset=0), "language": SimTypePointer(SimTypeChar(label="Char"), offset=0), "machineName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pluginName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_ERROR", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_SHELL", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="WSMAN_OPERATION", pack=False, align=None), offset=0), SimTypePointer(SimUnion({"receiveData": SimStruct({"streamId": SimTypePointer(SimTypeChar(label="Char"), offset=0), "streamData": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), "commandState": SimTypePointer(SimTypeChar(label="Char"), offset=0), "exitCode": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_RECEIVE_DATA_RESULT", pack=False, align=None), "connectData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CONNECT_DATA", pack=False, align=None), "createData": SimStruct({"data": SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None)}, name="WSMAN_CREATE_SHELL_DATA", pack=False, align=None)}, name="<anon>", label="None"), offset=0)], SimTypeBottom(label="Void"), arg_names=["operationContext", "flags", "error", "shell", "command", "operationHandle", "data"]), offset=0)}, name="WSMAN_SHELL_ASYNC", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="WSMAN_COMMAND", pack=False, align=None), offset=0), offset=0)], SimTypeBottom(label="Void"), arg_names=["shell", "flags", "commandID", "options", "connectXml", "async", "command"]), # 'WSManPluginReportContext': SimTypeFunction([SimTypePointer(SimStruct({"senderDetails": SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), "locale": SimTypePointer(SimTypeChar(label="Char"), offset=0), "resourceUri": SimTypePointer(SimTypeChar(label="Char"), offset=0), "operationInfo": SimTypePointer(SimStruct({"fragment": SimStruct({"path": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FRAGMENT", pack=False, align=None), "filter": SimStruct({"filter": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FILTER", pack=False, align=None), "selectorSet": SimStruct({"numberKeys": SimTypeInt(signed=False, label="UInt32"), "keys": SimTypePointer(SimStruct({"key": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_KEY", pack=False, align=None), offset=0)}, name="WSMAN_SELECTOR_SET", pack=False, align=None), "optionSet": SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), "reserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "version": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_OPERATION_INFO", pack=False, align=None), offset=0), "shutdownNotification": SimTypeInt(signed=True, label="Int32"), "shutdownNotificationHandle": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "dataLocale": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_PLUGIN_REQUEST", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["requestDetails", "flags", "context"]), # 'WSManPluginReceiveResult': SimTypeFunction([SimTypePointer(SimStruct({"senderDetails": SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), "locale": SimTypePointer(SimTypeChar(label="Char"), offset=0), "resourceUri": SimTypePointer(SimTypeChar(label="Char"), offset=0), "operationInfo": SimTypePointer(SimStruct({"fragment": SimStruct({"path": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FRAGMENT", pack=False, align=None), "filter": SimStruct({"filter": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FILTER", pack=False, align=None), "selectorSet": SimStruct({"numberKeys": SimTypeInt(signed=False, label="UInt32"), "keys": SimTypePointer(SimStruct({"key": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_KEY", pack=False, align=None), offset=0)}, name="WSMAN_SELECTOR_SET", pack=False, align=None), "optionSet": SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), "reserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "version": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_OPERATION_INFO", pack=False, align=None), offset=0), "shutdownNotification": SimTypeInt(signed=True, label="Int32"), "shutdownNotificationHandle": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "dataLocale": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_PLUGIN_REQUEST", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["requestDetails", "flags", "stream", "streamResult", "commandState", "exitCode"]), # 'WSManPluginOperationComplete': SimTypeFunction([SimTypePointer(SimStruct({"senderDetails": SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), "locale": SimTypePointer(SimTypeChar(label="Char"), offset=0), "resourceUri": SimTypePointer(SimTypeChar(label="Char"), offset=0), "operationInfo": SimTypePointer(SimStruct({"fragment": SimStruct({"path": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FRAGMENT", pack=False, align=None), "filter": SimStruct({"filter": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FILTER", pack=False, align=None), "selectorSet": SimStruct({"numberKeys": SimTypeInt(signed=False, label="UInt32"), "keys": SimTypePointer(SimStruct({"key": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_KEY", pack=False, align=None), offset=0)}, name="WSMAN_SELECTOR_SET", pack=False, align=None), "optionSet": SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), "reserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "version": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_OPERATION_INFO", pack=False, align=None), offset=0), "shutdownNotification": SimTypeInt(signed=True, label="Int32"), "shutdownNotificationHandle": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "dataLocale": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_PLUGIN_REQUEST", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["requestDetails", "flags", "errorCode", "extendedInformation"]), # 'WSManPluginGetOperationParameters': SimTypeFunction([SimTypePointer(SimStruct({"senderDetails": SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), "locale": SimTypePointer(SimTypeChar(label="Char"), offset=0), "resourceUri": SimTypePointer(SimTypeChar(label="Char"), offset=0), "operationInfo": SimTypePointer(SimStruct({"fragment": SimStruct({"path": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FRAGMENT", pack=False, align=None), "filter": SimStruct({"filter": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FILTER", pack=False, align=None), "selectorSet": SimStruct({"numberKeys": SimTypeInt(signed=False, label="UInt32"), "keys": SimTypePointer(SimStruct({"key": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_KEY", pack=False, align=None), offset=0)}, name="WSMAN_SELECTOR_SET", pack=False, align=None), "optionSet": SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), "reserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "version": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_OPERATION_INFO", pack=False, align=None), offset=0), "shutdownNotification": SimTypeInt(signed=True, label="Int32"), "shutdownNotificationHandle": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "dataLocale": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_PLUGIN_REQUEST", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["requestDetails", "flags", "data"]), # 'WSManPluginGetConfiguration': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"type": SimTypeInt(signed=False, label="WSManDataType"), "Anonymous": SimUnion({"text": SimStruct({"bufferLength": SimTypeInt(signed=False, label="UInt32"), "buffer": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_DATA_TEXT", pack=False, align=None), "binaryData": SimStruct({"dataLength": SimTypeInt(signed=False, label="UInt32"), "data": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="WSMAN_DATA_BINARY", pack=False, align=None), "number": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None")}, name="WSMAN_DATA", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pluginContext", "flags", "data"]), # 'WSManPluginReportCompletion': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["pluginContext", "flags"]), # 'WSManPluginFreeRequestDetails': SimTypeFunction([SimTypePointer(SimStruct({"senderDetails": SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), "locale": SimTypePointer(SimTypeChar(label="Char"), offset=0), "resourceUri": SimTypePointer(SimTypeChar(label="Char"), offset=0), "operationInfo": SimTypePointer(SimStruct({"fragment": SimStruct({"path": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FRAGMENT", pack=False, align=None), "filter": SimStruct({"filter": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dialect": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_FILTER", pack=False, align=None), "selectorSet": SimStruct({"numberKeys": SimTypeInt(signed=False, label="UInt32"), "keys": SimTypePointer(SimStruct({"key": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_KEY", pack=False, align=None), offset=0)}, name="WSMAN_SELECTOR_SET", pack=False, align=None), "optionSet": SimStruct({"optionsCount": SimTypeInt(signed=False, label="UInt32"), "options": SimTypePointer(SimStruct({"name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "value": SimTypePointer(SimTypeChar(label="Char"), offset=0), "mustComply": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION", pack=False, align=None), offset=0), "optionsMustUnderstand": SimTypeInt(signed=True, label="Int32")}, name="WSMAN_OPTION_SET", pack=False, align=None), "reserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "version": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_OPERATION_INFO", pack=False, align=None), offset=0), "shutdownNotification": SimTypeInt(signed=True, label="Int32"), "shutdownNotificationHandle": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "dataLocale": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_PLUGIN_REQUEST", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["requestDetails"]), # 'WSManPluginAuthzUserComplete': SimTypeFunction([SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["senderDetails", "flags", "userAuthorizationContext", "impersonationToken", "userIsAdministrator", "errorCode", "extendedErrorInformation"]), # 'WSManPluginAuthzOperationComplete': SimTypeFunction([SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["senderDetails", "flags", "userAuthorizationContext", "errorCode", "extendedErrorInformation"]), # 'WSManPluginAuthzQueryQuotaComplete': SimTypeFunction([SimTypePointer(SimStruct({"senderName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "authenticationMechanism": SimTypePointer(SimTypeChar(label="Char"), offset=0), "certificateDetails": SimTypePointer(SimStruct({"subject": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "issuerThumbprint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "subjectName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_CERTIFICATE_DETAILS", pack=False, align=None), offset=0), "clientToken": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "httpURL": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="WSMAN_SENDER_DETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"maxAllowedConcurrentShells": SimTypeInt(signed=False, label="UInt32"), "maxAllowedConcurrentOperations": SimTypeInt(signed=False, label="UInt32"), "timeslotSize": SimTypeInt(signed=False, label="UInt32"), "maxAllowedOperationsPerTimeslot": SimTypeInt(signed=False, label="UInt32")}, name="WSMAN_AUTHZ_QUOTA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["senderDetails", "flags", "quota", "errorCode", "extendedErrorInformation"]), } lib.set_prototypes(prototypes)
1,036.465909
6,433
0.744082
0
0
0
0
0
0
0
0
28,165
0.308796
a3fc7e9736f8ff7c6e4924c0d8a73afdf2dd7f02
81
py
Python
aiolookin/__init__.py
bachya/aiolookin
553731047b6910b1cb74667fbb343faf9b8656ac
[ "MIT" ]
null
null
null
aiolookin/__init__.py
bachya/aiolookin
553731047b6910b1cb74667fbb343faf9b8656ac
[ "MIT" ]
3
2021-08-16T21:32:30.000Z
2021-10-05T00:30:03.000Z
aiolookin/__init__.py
bachya/aiolookin
553731047b6910b1cb74667fbb343faf9b8656ac
[ "MIT" ]
null
null
null
"""Define the aiolookin package.""" from .device import async_get_device # noqa
27
44
0.753086
0
0
0
0
0
0
0
0
41
0.506173
a3fcab0b445701487209c1560ead48389dc4c643
2,067
py
Python
odepy/collision_space.py
yuemingl/ode-python-1
a9a12d9d3b7e611874a8d30f6a5c0b83b6087f86
[ "MIT" ]
9
2020-05-31T09:22:40.000Z
2021-09-15T18:15:15.000Z
odepy/collision_space.py
yuemingl/ode-python-1
a9a12d9d3b7e611874a8d30f6a5c0b83b6087f86
[ "MIT" ]
1
2020-11-15T11:38:45.000Z
2020-11-15T11:38:45.000Z
odepy/collision_space.py
yuemingl/ode-python-1
a9a12d9d3b7e611874a8d30f6a5c0b83b6087f86
[ "MIT" ]
2
2020-11-14T21:47:01.000Z
2021-08-03T02:28:10.000Z
# -*- coding: utf-8 -*- from .common import loadOde from .common import dGeomID from .common import dSpaceID from .common import dVector3 from ctypes import POINTER from ctypes import CFUNCTYPE from ctypes import c_void_p from ctypes import c_int32 dNearCallback = CFUNCTYPE(None, c_void_p, dGeomID, dGeomID) def dSimpleSpaceCreate(space): if isinstance(space, int): return loadOde('dSimpleSpaceCreate', dSpaceID, c_int32)(space) else: return loadOde('dSimpleSpaceCreate', dSpaceID, dSpaceID)(space) def dHashSpaceCreate(space): if isinstance(space, int): return loadOde('dHashSpaceCreate', dSpaceID, c_int32)(space) else: return loadOde('dHashSpaceCreate', dSpaceID, dSpaceID)(space) dQuadTreeSpaceCreate = loadOde('dQuadTreeSpaceCreate', dSpaceID, dSpaceID, dVector3, dVector3, c_int32) dSweepAndPruneSpaceCreate = loadOde('dSweepAndPruneSpaceCreate', dSpaceID, dSpaceID, c_int32) dSpaceDestroy = loadOde('dSpaceDestroy', None, dSpaceID) dHashSpaceSetLevels = loadOde('dHashSpaceSetLevels', None, dSpaceID, c_int32, c_int32) dHashSpaceGetLevels = loadOde('dHashSpaceGetLevels', None, dSpaceID, POINTER(c_int32), POINTER(c_int32)) dSpaceSetCleanup = loadOde('dSpaceSetCleanup', None, dSpaceID, c_int32) dSpaceGetCleanup = loadOde('dSpaceGetCleanup', c_int32, dSpaceID) dSpaceSetSublevel = loadOde('dSpaceSetSublevel', None, dSpaceID, c_int32) dSpaceGetSublevel = loadOde('dSpaceGetSublevel', c_int32, dSpaceID) dSpaceSetManualCleanup = loadOde('dSpaceSetManualCleanup', None, dSpaceID, c_int32) dSpaceGetManualCleanup = loadOde('dSpaceGetManualCleanup', c_int32, dSpaceID) dSpaceAdd = loadOde('dSpaceAdd', None, dSpaceID, dGeomID) dSpaceRemove = loadOde('dSpaceRemove', None, dSpaceID, dGeomID) dSpaceQuery = loadOde('dSpaceQuery', c_int32, dSpaceID, dGeomID) dSpaceClean = loadOde('dSpaceClean', None, dSpaceID) dSpaceGetNumGeoms = loadOde('dSpaceGetNumGeoms', c_int32, dSpaceID) dSpaceGetGeom = loadOde('dSpaceGetGeom', dGeomID, dSpaceID, c_int32) dSpaceGetClass = loadOde('dSpaceGetClass', c_int32, dSpaceID)
45.933333
104
0.786164
0
0
0
0
0
0
0
0
428
0.207063
a3fcd7848a2f35d426c874d1ab768f72fe615409
1,563
py
Python
bst.py
phildavis17/DS_A
2d950a62b1ea36dff5dcd2c17266ddf088719472
[ "MIT" ]
null
null
null
bst.py
phildavis17/DS_A
2d950a62b1ea36dff5dcd2c17266ddf088719472
[ "MIT" ]
null
null
null
bst.py
phildavis17/DS_A
2d950a62b1ea36dff5dcd2c17266ddf088719472
[ "MIT" ]
null
null
null
class BSTNode: def __init__(self, data = None) -> None: self.data = data self.left = None self.right = None def __repr__(self) -> str: return(f"BSTNode({self.data})") def __str__(self) -> str: return str(self.data) def __eq__(self, o: object) -> bool: pass def __hash__(self) -> int: pass class BST: def __init__(self) -> None: pass def insert(self, item: int) -> None: pass def remove(self, item: int) -> int: pass def swap_nodes(self, item_a: int, item_b: int) -> None: pass def rebalance(self) -> None: pass def get_min_value(self) -> int: pass def get_max_value(self) -> int: pass def clear(self) -> None: pass def get_dept(self) -> int: """Returns the current depth of the tree.""" pass def is_bst(self) -> bool: """Returns True if the tree is properly configured bst.""" pass def is_balanced(self) -> bool: """ Returns True if the tree is balanced """ pass def is_perfect(self) -> bool: """ Returns True if the tree is perfect """ pass def in_order(self): """Returns an iterable of the nodes in the tree.""" pass def pre_order(self): """Returns an iterable of the nodes in the tree.""" pass def post_order(self): """Returns an iterable of the nodes in the tree.""" pass
20.565789
66
0.525912
1,559
0.997441
0
0
0
0
0
0
397
0.253999
a3fdeb4319b73c8b5241edc4f4a1fca818eec403
4,279
py
Python
pctest/test_publish.py
DaveWK/pyth-client
4332ef3287f584be46ec38ddd800cae8d4e7b792
[ "Apache-2.0" ]
null
null
null
pctest/test_publish.py
DaveWK/pyth-client
4332ef3287f584be46ec38ddd800cae8d4e7b792
[ "Apache-2.0" ]
null
null
null
pctest/test_publish.py
DaveWK/pyth-client
4332ef3287f584be46ec38ddd800cae8d4e7b792
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python3 # pip3 install websockets import asyncio import websockets import json import datetime import sys class test_publish: idnum = 1 def __init__( self, sym, price, spread ): self.symbol = sym self.pidnum = test_publish.idnum test_publish.idnum += 1 self.sidnum = test_publish.idnum test_publish.idnum += 1 self.psubid = -1 self.ssubid = -1 self.price = price self.spread = spread def gen_subscribe_price(self): req = { 'jsonrpc': '2.0', 'method' : 'subscribe_price', 'params' : { 'account': self.account, 'price_type' : 'price' }, 'id': self.sidnum } return json.dumps( req ) def gen_subscribe_price_sched(self): req = { 'jsonrpc': '2.0', 'method' : 'subscribe_price_sched', 'params' : { 'account': self.account, 'price_type' : 'price' }, 'id': self.pidnum } return json.dumps( req ) def gen_update_price(self): req = { 'jsonrpc': '2.0', 'method': 'update_price', 'params':{ 'account': self.account, 'price_type': 'price', 'status': 'trading', 'price': self.price, 'conf': self.spread }, 'id': None } self.price += self.spread return json.dumps( req ) def parse_reply( self, msg, allsub ): # parse subscription replies subid = msg['result']['subscription'] allsub[subid] = self if msg['id'] == self.pidnum: self.psubid = subid; else: self.ssubid = subid async def parse_notify( self, ws, msg ): # parse subscription notification messages subid = msg['params']['subscription'] ts = datetime.datetime.utcnow().isoformat() if subid == self.ssubid: # aggregate price update res = msg['params']['result'] price = res['price'] spread = res['conf'] status = res['status'] print( f'{ts} received aggregate price update symbol=' + self.symbol + f',price={price}, spread={spread}, status={status}' ) else: # request to submit price print( f'{ts} submit price to block-chain symbol=' + self.symbol + f',price={self.price}, spread={self.spread}, subscription={subid}') await ws.send( self.gen_update_price() ) async def subscribe( self, acct, ws, allids ): # submmit initial subscriptions self.account = acct allids[self.pidnum] = self allids[self.sidnum] = self await ws.send( self.gen_subscribe_price() ) await ws.send( self.gen_subscribe_price_sched() ) # wbsocket event loop async def poll( uri ): # connect to pythd ws = await websockets.connect(uri) # submit subscriptions to pythd allids = {} allsub = {} allsym = {} sym1 = test_publish( 'SYMBOL1/USD', 10000, 100 ) sym2 = test_publish( 'SYMBOL2/USD', 2000000, 20000 ) allsym[sym1.symbol] = sym1 allsym[sym2.symbol] = sym2 # lookup accounts by symbol and subscribe req = { 'jsonrpc': '2.0', 'method': 'get_product_list', 'id': None } await ws.send( json.dumps( req ) ) msg = json.loads( await ws.recv() ) for prod in msg['result']: sym = prod['attr_dict']['symbol'] for px in prod['price']: if sym in allsym and px['price_type'] == 'price': await allsym[sym].subscribe( px['account'], ws, allids ); # poll for updates from pythd while True: msg = json.loads( await ws.recv() ) # print(msg) if 'error' in msg: ts = datetime.datetime.utcnow().isoformat() code = msg['error']['code'] emsg = msg['error']['message'] print( f'{ts} error code: {code} msg: {emsg}' ) sys.exit(1) elif 'result' in msg: msgid = msg['id'] if msgid in allids: allids[msgid].parse_reply( msg, allsub ) else: subid = msg['params']['subscription'] if subid in allsub: await allsub[subid].parse_notify( ws, msg ) # connect to pythd, subscribe to and start publishing on two symbols if __name__ == '__main__': uri='ws://localhost:8910' eloop = asyncio.get_event_loop() try: eloop.run_until_complete( poll( uri ) ) except ConnectionRefusedError: print( f'connection refused uri={uri}' ) sys.exit(1)
28.151316
77
0.597803
2,522
0.58939
0
0
0
0
2,315
0.541014
1,280
0.299135
a3fdf673713dab6653653e81925c27451e5f5544
1,866
py
Python
Python/other/merge_interval.py
TechSpiritSS/NeoAlgo
08f559b56081a191db6c6b1339ef37311da9e986
[ "MIT" ]
897
2020-06-25T00:12:52.000Z
2022-03-24T00:49:31.000Z
Python/other/merge_interval.py
AnshikaAgrawal5501/NeoAlgo
d66d0915d8392c2573ba05d5528e00af52b0b996
[ "MIT" ]
5,707
2020-06-24T17:53:28.000Z
2022-01-22T05:03:15.000Z
Python/other/merge_interval.py
AnshikaAgrawal5501/NeoAlgo
d66d0915d8392c2573ba05d5528e00af52b0b996
[ "MIT" ]
1,817
2020-06-25T03:51:05.000Z
2022-03-29T05:14:07.000Z
''' Given an array of intervals, merge all overlapping intervals, and return an array of the non-overlapping intervals that cover all the intervals in the input. Input: intervals = [[1,3],[2,6],[8,10],[15,18]] Output: [[1,6],[8,10],[15,18]] Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6]. ''' def merge(intervals): #sort the array intervals.sort() #take another empty list intervals_stack = [] for pair in intervals: if len(intervals_stack) == 0: intervals_stack.append(pair) #adding all the number in intervals elements in empty list #check number is equal or greater and less than pop elements else: current_pair = intervals_stack[-1] if current_pair[1]>=pair[0]: intervals_stack.pop() if current_pair[1]<pair[1]: new_pair = [current_pair[0],pair[1]] intervals_stack.append(new_pair) else: new_pair = [current_pair[0],current_pair[1]] intervals_stack.append(new_pair) else: intervals_stack.append(pair) # result return intervals_stack if __name__ == '__main__': R = int(input("Enter the number of rows:")) C = int(input("Enter the number of columns:")) interval = [[int(input("Enter the elements: ")) for x in range (C)] for y in range(R)] print("Overlapping interval: ",interval) print("Non-overlapping intervals: ",merge(interval)) """ Time complexity : O(n^2) Space complexity : O(n^2) INPUT:- Enter the number of rows:4 Enter the number of columns:2 Enter the elements: 1 Enter the elements: 3 Enter the elements: 2 Enter the elements: 6 Enter the elements: 8 Enter the elements: 10 Enter the elements: 15 Enter the elements: 18 OUTPUT:- Overlapping interval: [[1, 3], [2, 6], [8, 10], [15, 18]] Non-overlapping intervals: [[1, 6], [8, 10], [15, 18]] """
29.619048
95
0.659164
0
0
0
0
0
0
0
0
1,079
0.578242
a3fe08a8aed62efc53ccd8e1fe4f7bf8c6183509
9,708
py
Python
tests/test_all.py
InnovativeTravel/humilis-lambdautils
344e13efb68d19f61f0be8178eb6cc2219913fb0
[ "MIT" ]
null
null
null
tests/test_all.py
InnovativeTravel/humilis-lambdautils
344e13efb68d19f61f0be8178eb6cc2219913fb0
[ "MIT" ]
null
null
null
tests/test_all.py
InnovativeTravel/humilis-lambdautils
344e13efb68d19f61f0be8178eb6cc2219913fb0
[ "MIT" ]
null
null
null
"""Unit tests.""" import inspect import json from mock import Mock import os import sys import uuid import pytest # Add the lambda directory to the python library search path lambda_dir = os.path.join( os.path.dirname(inspect.getfile(inspect.currentframe())), '..') sys.path.append(lambda_dir) import lambdautils.utils @pytest.mark.parametrize( "key,environment,stage,namespace,table,nkey", [ ("k", "e", "s", None, "e-s-secrets", "k"), ("k", "e", None, None, "e-dummystage-secrets", "k"), ("k", "e", None, "n", "e-dummystage-secrets", "n:k"), ("k", "e", "s", "n", "e-s-secrets", "n:k")]) def test_get_secret_from_vault(key, environment, stage, namespace, table, nkey, boto3_resource, boto3_client, monkeypatch): """Gets a secret from the DynamoDB secrets vault.""" # Call to the DynamoDB client to retrieve the encrypted secret monkeypatch.setattr("boto3.resource", boto3_resource) monkeypatch.setattr("boto3.client", boto3_client) secret = lambdautils.utils.get_secret(key, namespace=namespace, environment=environment, stage=stage) assert secret == "dummy" boto3_client("dynamodb").get_item.assert_called_with( TableName=table, Key={"id": {"S": nkey}}) # Call to the KMS client to decrypt the secret boto3_client('kms').decrypt.assert_called_with(CiphertextBlob="encrypted") def test_get_secret_from_env(monkeypatch): """Get a secret from an (encrypted) environment variable.""" key = str(uuid.uuid4()).replace('-', '.') value = str(uuid.uuid4()) monkeypatch.setenv(key.replace('.', '_').upper(), value) secret = lambdautils.utils.get_secret(key) assert secret == value def test_get_setting(monkeypatch): """Should be an alias for get_secret.""" resp = str(uuid.uuid4()) arg = str(uuid.uuid4()) kwarg = str(uuid.uuid4()) get_secret = Mock(return_value=resp) monkeypatch.setattr("lambdautils.state.get_secret", get_secret) resp2 = lambdautils.state.get_setting(arg, kwarg=kwarg) assert resp2 == resp get_secret.assert_called_with(arg, kwarg=kwarg) @pytest.mark.parametrize( "key,environment,layer,stage,shard_id,namespace,table,consistent,nkey", [ ("k", "e", "l", "s", None, None, "e-l-s-state", False, "k"), ("k", "e", "l", "s", None, "n", "e-l-s-state", False, "n:k"), ("k", "e", "l", "s", "s-012", "n", "e-l-s-state", True, "s-012:n:k"), ("k", "e", "l", "s", "s-0001", None, "e-l-s-state", True, "s-0001:k")]) def test_get_state(boto3_resource, monkeypatch, key, environment, layer, stage, shard_id, namespace, table, consistent, nkey): """Get a state value from DynamoDB.""" monkeypatch.setattr("boto3.resource", boto3_resource) lambdautils.utils.get_state(key, environment=environment, layer=layer, stage=stage, shard_id=shard_id, namespace=namespace, consistent=consistent) boto3_resource("dynamodb").Table.assert_called_with(table) if consistent is None: # The default setting: use consistent reads consistent = True boto3_resource("dynamodb").Table().get_item.assert_called_with( Key={"id": nkey}, ConsistentRead=consistent) def test_no_state_table(boto3_resource, monkeypatch): """Test accessing state variable without having a state table.""" monkeypatch.setattr("boto3.resource", boto3_resource) monkeypatch.delenv("HUMILIS_ENVIRONMENT") with pytest.raises(lambdautils.state.StateTableError): lambdautils.utils.set_state("sample_state_key", "sample_state_value") with pytest.raises(lambdautils.state.StateTableError): lambdautils.utils.delete_state("sample_state_key") with pytest.raises(lambdautils.state.StateTableError): lambdautils.utils.get_state("sample_state_key") @pytest.mark.parametrize( "key,value,environment,layer,stage,shard_id,namespace,table,nkey", [ ("k", "v", "e", "l", "s", None, None, "e-l-s-state", "k"), ("k", "v", "e", "l", "s", None, "n", "e-l-s-state", "n:k"), ("k", "v", "e", "l", "s", "s1", "n", "e-l-s-state", "s1:n:k"), ("k", "v", "e", "l", "s", "s2", None, "e-l-s-state", "s2:k")]) def test_set_state(boto3_resource, monkeypatch, key, value, environment, layer, stage, shard_id, namespace, table, nkey): """Tests setting a state variable.""" monkeypatch.setattr("boto3.resource", boto3_resource) lambdautils.utils.set_state(key, value, environment=environment, layer=layer, stage=stage, shard_id=shard_id, namespace=namespace) boto3_resource("dynamodb").Table.assert_called_with(table) boto3_resource("dynamodb").Table().put_item.assert_called_with( Item={"id": nkey, "value": json.dumps(value)}) @pytest.mark.parametrize( "key,environment,layer,stage,shard_id,namespace,table,nkey", [ ("k", "e", "l", "s", None, None, "e-l-s-state", "k"), ("k", "e", "l", "s", None, "n", "e-l-s-state", "n:k"), ("k", "e", "l", "s", "s1", "n", "e-l-s-state", "s1:n:k"), ("k", "e", "l", "s", "s2", None, "e-l-s-state", "s2:k")]) def test_delete_state(boto3_resource, monkeypatch, key, environment, layer, stage, shard_id, namespace, table, nkey): """Tests setting a state variable.""" monkeypatch.setattr("boto3.resource", boto3_resource) lambdautils.utils.delete_state(key, environment=environment, layer=layer, stage=stage, shard_id=shard_id, namespace=namespace) boto3_resource("dynamodb").Table.assert_called_with(table) boto3_resource("dynamodb").Table().delete_item.assert_called_with( Key={"id": nkey}) def test_sentry_monitor_bad_client(boto3_client, raven_client, context, monkeypatch): """Test that sentry_monitor handles raven client errors gracefully.""" class ClientError(Exception): pass def raise_error(dsn): raise ClientError monkeypatch.setattr("raven.Client", Mock(side_effect=raise_error)) monkeypatch.setattr("boto3.client", boto3_client) @lambdautils.utils.sentry_monitor(environment="dummyenv", stage="dummystage") def lambda_handler(event, context): pass lambda_handler(None, context) raven_client.captureException.assert_not_called() @pytest.mark.parametrize( "kstream, fstream, rcalls, kcalls, fcalls, ev", [ ("a", "b", 1, 0, 0, {"Records": [{}]}), (None, "b", 1, 0, 0, {"Records": [{}]}), (None, None, 1, 0, 0, None), (None, None, 1, 0, 0, None), ("a", "b", 1, 0, 0, None), ("a", None, 1, 0, 0, None)]) def test_sentry_monitor_exception( kstream, fstream, rcalls, kcalls, fcalls, ev, boto3_client, raven_client, context, kinesis_event, monkeypatch): """Tests the sentry_monitor decorator when throwing an exception and lacking an error stream where to dump the errors.""" if ev is None: # Default to a Kinesis event ev = kinesis_event monkeypatch.setattr("boto3.client", boto3_client) monkeypatch.setattr("raven.Client", Mock(return_value=raven_client)) monkeypatch.setattr("lambdautils.monitor.SentryHandler", Mock()) monkeypatch.setattr("lambdautils.utils.get_secret", Mock(return_value="dummydsn")) error_stream = { "kinesis_stream": kstream, "firehose_delivery_stream": fstream} @lambdautils.utils.sentry_monitor(error_stream=error_stream) def lambda_handler(event, context): """Raise an error.""" raise KeyError with pytest.raises(KeyError): lambda_handler(ev, context) # Should have captured only 1 error: # * The original KeyError assert raven_client.captureException.call_count == rcalls # And should have send the events to the Kinesis and FH error streams assert boto3_client("kinesis").put_records.call_count == kcalls assert boto3_client("firehose").put_record_batch.call_count == fcalls def test_send_to_kinesis_stream(search_events, boto3_client, monkeypatch): """Tests sending events to a Kinesis stream.""" monkeypatch.setattr("boto3.client", boto3_client) lambdautils.utils.send_to_kinesis_stream(search_events, "dummy_stream") boto3_client("kinesis").put_records.call_count == 1 def test_send_to_delivery_stream(search_events, boto3_client, monkeypatch): """Tests sending events to a Firehose delivery stream.""" monkeypatch.setattr("boto3.client", boto3_client) lambdautils.utils.send_to_delivery_stream(search_events, "dummy_stream") boto3_client("firehose").put_record_batch.call_count == 1 @pytest.mark.parametrize("deserializer, embed_ts", [ [json.loads, False], [json.loads, "kinesis_timestamp"], [None, False]]) def test_unpack_kinesis_event(kinesis_event, deserializer, embed_ts): """Extracts json-serialized events from a Kinesis events.""" events, shard_id = lambdautils.utils.unpack_kinesis_event( kinesis_event, deserializer=deserializer, embed_timestamp=embed_ts) # There should be one event per kinesis record assert len(events) == len(kinesis_event["Records"]) assert shard_id == kinesis_event["Records"][0]["eventID"].split(":")[0] if embed_ts: assert all(embed_ts in ev for ev in events)
41.844828
79
0.642872
42
0.004326
0
0
6,857
0.706325
0
0
2,733
0.28152
a3ff284c249c767a8e6d1b66a73bf03b2d790a9e
366
py
Python
packages/starcheck/post_regress.py
sot/ska_testr
dd84b89d0b5ebf6158c6cda4c1df432138044e20
[ "MIT" ]
null
null
null
packages/starcheck/post_regress.py
sot/ska_testr
dd84b89d0b5ebf6158c6cda4c1df432138044e20
[ "MIT" ]
27
2016-10-19T19:39:46.000Z
2022-03-04T14:56:40.000Z
packages/starcheck/post_regress.py
sot/ska_testr
dd84b89d0b5ebf6158c6cda4c1df432138044e20
[ "MIT" ]
null
null
null
import os from testr.packages import make_regress_files regress_files = ['starcheck.txt', 'starcheck/pcad_att_check.txt'] clean = {'starcheck.txt': [(r'\s*Run on.*[\n\r]*', ''), (os.environ['SKA'], '')], 'starcheck/pcad_att_check.txt': [(os.environ['SKA'], '')]} make_regress_files(regress_files, clean=clean)
30.5
67
0.592896
0
0
0
0
0
0
0
0
127
0.346995
430006e2469bed3f7e4c977ba2de4f246799468c
1,714
py
Python
testsite/wsgi.py
stungkit/djaodjin-saas
93c8631509ffd5b0fb91283cd4a4aeaf9826e97e
[ "BSD-2-Clause" ]
null
null
null
testsite/wsgi.py
stungkit/djaodjin-saas
93c8631509ffd5b0fb91283cd4a4aeaf9826e97e
[ "BSD-2-Clause" ]
null
null
null
testsite/wsgi.py
stungkit/djaodjin-saas
93c8631509ffd5b0fb91283cd4a4aeaf9826e97e
[ "BSD-2-Clause" ]
null
null
null
""" WSGI config for testsite project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os, signal #pylint: disable=invalid-name def save_coverage(*args, **kwargs): #pylint:disable=unused-argument sys.stderr.write("saving coverage\n") cov.stop() cov.save() if os.getenv('DJANGO_COVERAGE'): import atexit, sys import coverage cov = coverage.coverage(data_file=os.path.join(os.getenv('DJANGO_COVERAGE'), ".coverage.%d" % os.getpid())) cov.start() atexit.register(save_coverage) try: signal.signal(signal.SIGTERM, save_coverage) except ValueError as e: # trapping signals does not work with manage # trying to do so fails with # ValueError: signal only works in main thread pass os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testsite.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application #pylint: disable=invalid-name application = get_wsgi_application()
34.979592
80
0.748541
0
0
0
0
0
0
0
0
1,149
0.670362
4300329d7c7ed5214d1d6d7a95fd6dd634fbc6d1
11,269
py
Python
authserver/maildaemons/forwarder/server.py
jdelic/authserver
e800664436b252fcdf224a9af46a1122c87be3ca
[ "MIT" ]
8
2017-07-04T10:07:32.000Z
2022-01-02T10:31:43.000Z
authserver/maildaemons/forwarder/server.py
jdelic/authserver
e800664436b252fcdf224a9af46a1122c87be3ca
[ "MIT" ]
14
2020-02-11T21:42:38.000Z
2022-03-28T16:00:55.000Z
authserver/maildaemons/forwarder/server.py
jdelic/authserver
e800664436b252fcdf224a9af46a1122c87be3ca
[ "MIT" ]
1
2020-03-01T10:39:28.000Z
2020-03-01T10:39:28.000Z
#!/usr/bin/env python3 -u # -* encoding: utf-8 *- import argparse import asyncore import json import logging import signal import sys import os from types import FrameType from typing import Tuple, Sequence, Any, Union, Optional, List, Dict from concurrent.futures import ThreadPoolExecutor as Pool import daemon from django.db.utils import OperationalError import authserver from maildaemons.utils import SMTPWrapper, PatchedSMTPChannel, SaneSMTPServer _log = logging.getLogger(__name__) pool = Pool() class ForwarderServer(SaneSMTPServer): def __init__(self, remote_relay_ip: str, remote_relay_port: int, local_delivery_ip: str, local_delivery_port: int, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.smtp = SMTPWrapper( external_ip=remote_relay_ip, external_port=remote_relay_port, error_relay_ip=local_delivery_ip, error_relay_port=local_delivery_port ) # ** must be thread-safe, don't modify shared state, # _log should be thread-safe as stated by the docs. Django ORM should be as well. def _process_message(self, peer: Tuple[str, int], mailfrom: str, rcpttos: Sequence[str], data: bytes, *, channel: PatchedSMTPChannel, **kwargs: Any) -> Optional[str]: # we can't import the Domain model before Django has been initialized from mailauth.models import EmailAlias, Domain data = self.add_received_header(peer, data, channel) remaining_rcpttos = list(rcpttos) # ensure that new_rcpttos is a mutable list combined_rcptto = {} # type: Dict[str, List[str]] # { new_mailfrom: [recipients] } def add_rcptto(mfrom: str, rcpt: Union[str, List]) -> None: if mfrom in combined_rcptto: if isinstance(rcpt, list): combined_rcptto[mfrom] += rcpt else: combined_rcptto[mfrom].append(rcpt) else: if isinstance(rcpt, list): combined_rcptto[mfrom] = rcpt else: combined_rcptto[mfrom] = [rcpt] # we're going to modify remaining_rcpttos so we start from its end for ix in range(len(remaining_rcpttos) - 1, -1, -1): rcptto = rcpttos[ix].lower() rcptuser, rcptdomain = rcptto.split("@", 1) # implement domain catch-all redirect domain = None # type: Optional[Domain] try: domain = Domain.objects.get(name=rcptdomain) except Domain.DoesNotExist: pass except OperationalError: _log.exception("Database unavailable.") return "421 Processing problem. Please try again later." if domain: if domain.redirect_to: _log.debug("ix: %s - rcptto: %s - remaining rcpttos: %s", ix, rcptto, remaining_rcpttos) del remaining_rcpttos[ix] new_rcptto = "%s@%s" % (rcptuser, domain.redirect_to) _log.info("%sForwarding email from <%s> to <%s> to domain @%s", "(Retry) " if "retry" in kwargs and kwargs["retry"] else "", mailfrom, rcptto, domain.redirect_to) add_rcptto(mailfrom, new_rcptto) continue # follow the same path like the stored procedure authserver_resolve_alias(...) if "-" in rcptuser: # convert the first - to a + user_mailprefix = "%s+%s" % tuple(rcptuser.split("-", 1)) # type: ignore else: user_mailprefix = rcptuser if "+" in user_mailprefix: # if we had a dashext, or a plusext, we're left with just the prefix after this user_mailprefix = user_mailprefix.split("+", 1)[0] try: alias = EmailAlias.objects.get(mailprefix__iexact=user_mailprefix, domain__name__iexact=rcptdomain) # type: EmailAlias except EmailAlias.DoesNotExist: # OpenSMTPD shouldn't even call us for invalid addresses if we're configured correctly _log.error("%sUnknown mail address: %s (from: %s, prefix: %s)", "(Retry) " if "retry" in kwargs and kwargs["retry"] else "", rcptto, mailfrom, user_mailprefix) continue except OperationalError: _log.exception("Database unavailable.") return "421 Processing problem. Please try again later." if alias.forward_to is not None: # it's a mailing list, forward the email to all connected addresses del remaining_rcpttos[ix] # remove this recipient from the list _newmf = mailfrom if alias.forward_to.new_mailfrom != "": _newmf = alias.forward_to.new_mailfrom _log.info("%sForwarding email from <%s> with new sender <%s> to <%s>", "(Retry) " if "retry" in kwargs and kwargs["retry"] else "", mailfrom, _newmf, alias.forward_to.addresses) add_rcptto(_newmf, alias.forward_to.addresses) # if there are any remaining non-list/non-forward recipients, we inject them back to OpenSMTPD here if len(remaining_rcpttos) > 0: _log.info("%sDelivering email from <%s> to remaining recipients <%s>", "(Retry) " if "retry" in kwargs and kwargs["retry"] else "", mailfrom, remaining_rcpttos) add_rcptto(mailfrom, remaining_rcpttos) if len(combined_rcptto.keys()) == 1: _log.debug("Only one mail envelope sender, forwarding is atomic") results = {k: "unsent" for k in combined_rcptto.keys()} # type: Dict[str, str] for new_mailfrom in combined_rcptto.keys(): _log.debug("Injecting email from <%s> to <%s>", new_mailfrom, combined_rcptto[new_mailfrom]) ret = self.smtp.sendmail(new_mailfrom, combined_rcptto[new_mailfrom], data) if ret is not None: results[new_mailfrom] = "failure" if len(combined_rcptto.keys()) > 1: _log.error("Non-atomic mail sending failed from <%s> in dict(%s)", combined_rcptto.keys(), json.dumps(results)) return ret results[new_mailfrom] = "success" # TODO: log results _log.debug("Done processing.") return None def process_message(self, *args: Any, **kwargs: Any) -> Optional[str]: future = pool.submit(ForwarderServer._process_message, self, *args, **kwargs) return future.result() def run(_args: argparse.Namespace) -> None: server = ForwarderServer(_args.remote_relay_ip, _args.remote_relay_port, _args.local_delivery_ip, _args.local_delivery_port, (_args.input_ip, _args.input_port), None, decode_data=False, daemon_name="mailforwarder") asyncore.loop() def _sigint_handler(sig: int, frame: FrameType) -> None: print("CTRL+C exiting") pool.shutdown(wait=False) sys.exit(1) def _main() -> None: signal.signal(signal.SIGINT, _sigint_handler) parser = argparse.ArgumentParser( description="This is a SMTP daemon that is used through OpenSMTPD configuration " "to check whether incoming emails are addressed to a forwarding email alias " "and if they are, inject emails to all list delivery addresses / expand the alias." ) grp_daemon = parser.add_argument_group("Daemon options") grp_daemon.add_argument("-p", "--pidfile", dest="pidfile", default="./mailforwarder-server.pid", help="Path to a pidfile") grp_daemon.add_argument("-u", "--user", dest="user", default=None, help="Drop privileges and switch to this user") grp_daemon.add_argument("-g", "--group", dest="group", default=None, help="Drop privileges and switch to this group") grp_daemon.add_argument("-d", "--daemonize", dest="daemonize", default=False, action="store_true", help="If set, fork into background") grp_daemon.add_argument("-v", "--verbose", dest="verbose", default=False, action="store_true", help="Output extra logging (not implemented right now)") grp_daemon.add_argument("-C", "--chdir", dest="chdir", default=".", help="Change working directory to the provided value") grp_network = parser.add_argument_group("Network options") grp_network.add_argument("--input-ip", dest="input_ip", default="127.0.0.1", help="The network address to bind to") grp_network.add_argument("--input-port", dest="input_port", metavar="PORT", type=int, default=10046, help="The port to bind to") grp_network.add_argument("--local-delivery-ip", dest="local_delivery_ip", default="127.0.0.1", help="The OpenSMTPD instance IP for local email to be delivered.") grp_network.add_argument("--local-delivery-port", dest="local_delivery_port", metavar="PORT", type=int, default=10045, help="The port where OpenSMTPD listens for local email to be delivered") grp_network.add_argument("--remote-relay-ip", dest="remote_relay_ip", default="127.0.0.1", help="The OpenSMTPD instance IP that accepts mail for relay to external domains.") grp_network.add_argument("--remote-relay-port", dest="remote_relay_port", default=10045, help="The port where OpenSMTPD listens for mail to relay.") grp_django = parser.add_argument_group("Django options") grp_django.add_argument("--settings", dest="django_settings", default="authserver.settings", help="The Django settings module to use for authserver database access (default: " "authserver.settings)") _args = parser.parse_args() os.environ.setdefault("DJANGO_SETTINGS_MODULE", _args.django_settings) # noinspection PyUnresolvedReferences from django.conf import settings # initialize Django import django django.setup() _log.info("mailforwarder v%s: Forwarding Alias Service starting" % authserver.version) _log.info("Django ORM initialized") pidfile = open(_args.pidfile, "w") ctx = daemon.DaemonContext( working_directory=_args.chdir, pidfile=pidfile, uid=_args.user, gid=_args.group, detach_process=_args.daemonize, files_preserve=[1, 2, 3, pidfile], stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, ) with ctx: run(_args) def main() -> None: try: _main() except Exception as e: _log.critical("Unhandled exception", exc_info=True) sys.exit(1) if __name__ == "__main__": main()
45.623482
119
0.604401
6,406
0.568462
0
0
0
0
0
0
3,390
0.300825
430070d2630f84dfb573574ae265484cbf0ee429
2,074
py
Python
services/backend/project/api/sites.py
kzkaneoka/custom-job-search
ca6054aee979cb1eff701dc5ba0cf56fb92baf44
[ "MIT" ]
null
null
null
services/backend/project/api/sites.py
kzkaneoka/custom-job-search
ca6054aee979cb1eff701dc5ba0cf56fb92baf44
[ "MIT" ]
1
2021-06-02T00:51:06.000Z
2021-06-02T00:51:06.000Z
services/backend/project/api/sites.py
kzkaneoka/custom-job-search
ca6054aee979cb1eff701dc5ba0cf56fb92baf44
[ "MIT" ]
null
null
null
import requests from bs4 import BeautifulSoup, element class Indeed: def __init__(self, words, location, offset): self.url = "https://www.indeed.com/jobs?as_and={}&l={}&sort=date&start={}".format( "+".join(set(d.strip().lower() for d in words.split(",") if d)), "+".join(list(d.lower() for d in location.split(" ") if d)), int(offset), ) def extract(self, soup): if not soup: return [] jobs = [] for tag in soup.find_all(name="div", attrs={"class": "jobsearch-SerpJobCard"}): job = {} for child in tag.children: if child and type(child) == element.Tag and child.attrs: if child.attrs["class"][0] == "title": job["title"] = child.get_text().strip() for grandchild in child.find_all(name="a"): if grandchild.has_attr("href"): job["link"] = ( "https://www.indeed.com" + grandchild["href"] ) elif child.attrs["class"][0] == "sjcl": lines = child.get_text().strip().split("\n") job["company"] = lines[0] job["location"] = lines[-1] elif child.attrs["class"][0] == "jobsearch-SerpJobCard-footer": job["date"] = "n/a" for grandchild in child.find_all( name="span", attrs={"class": "date"} ): job["date"] = grandchild.get_text() jobs.append(job) return jobs def fetch(self): soup = None try: r = requests.get(self.url) r.raise_for_status() soup = BeautifulSoup(r.text, "html.parser") finally: return soup def search(self): soup = self.fetch() jobs = self.extract(soup) return jobs
38.407407
90
0.450338
2,016
0.972035
0
0
0
0
0
0
298
0.143684
4301cf37bd9ece6b54456c22562dfc5aa8e8a7cb
748
py
Python
product_details/utils.py
gene1wood/django-product-details
53f245d76fa11d073ba686e0ece7b0293ec21942
[ "BSD-3-Clause" ]
null
null
null
product_details/utils.py
gene1wood/django-product-details
53f245d76fa11d073ba686e0ece7b0293ec21942
[ "BSD-3-Clause" ]
null
null
null
product_details/utils.py
gene1wood/django-product-details
53f245d76fa11d073ba686e0ece7b0293ec21942
[ "BSD-3-Clause" ]
null
null
null
from django.conf import settings from django.core.exceptions import ImproperlyConfigured from product_details import settings_defaults def settings_fallback(key): """Grab user-defined settings, or fall back to default.""" try: return getattr(settings, key) except (AttributeError, ImportError, ImproperlyConfigured): return getattr(settings_defaults, key) def get_django_cache(cache_name): try: from django.core.cache import caches # django 1.7+ return caches[cache_name] except ImportError: from django.core.cache import get_cache return get_cache(cache_name) except ImproperlyConfigured: # dance to get around not-setup-django at import time return {}
29.92
63
0.720588
0
0
0
0
0
0
0
0
124
0.165775
4302245408f7928b493623fbaac5ca5daff6a97c
229
py
Python
kattis/Soda Slurper.py
jaredliw/python-question-bank
9c8c246623d8d171f875700b57772df0afcbdcdf
[ "MIT" ]
1
2021-04-08T07:49:15.000Z
2021-04-08T07:49:15.000Z
kattis/Soda Slurper.py
jaredliw/leetcode-solutions
9c8c246623d8d171f875700b57772df0afcbdcdf
[ "MIT" ]
null
null
null
kattis/Soda Slurper.py
jaredliw/leetcode-solutions
9c8c246623d8d171f875700b57772df0afcbdcdf
[ "MIT" ]
1
2022-01-23T02:12:24.000Z
2022-01-23T02:12:24.000Z
# CPU: 0.06 s possessed, found, condition = map(int, input().split()) possessed += found count = 0 while possessed >= condition: div, mod = divmod(possessed, condition) count += div possessed = div + mod print(count)
22.9
55
0.663755
0
0
0
0
0
0
0
0
13
0.056769
430328989c54e456016360148c864a60ebe10a5f
6,405
py
Python
efficientdet/dataset/csv_.py
HyunjiEllenPak/automl
fedf04adf12c5fd11045ea06e2f5c11a5a5490c4
[ "Apache-2.0" ]
null
null
null
efficientdet/dataset/csv_.py
HyunjiEllenPak/automl
fedf04adf12c5fd11045ea06e2f5c11a5a5490c4
[ "Apache-2.0" ]
null
null
null
efficientdet/dataset/csv_.py
HyunjiEllenPak/automl
fedf04adf12c5fd11045ea06e2f5c11a5a5490c4
[ "Apache-2.0" ]
null
null
null
""" Copyright 2017-2018 yhenon (https://github.com/yhenon/) Copyright 2017-2018 Fizyr (https://fizyr.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # from generators.common import Generator import cv2 import numpy as np from PIL import Image from six import raise_from import csv import sys import os.path as osp from collections import OrderedDict import os def _parse(value, function, fmt): """ Parse a string into a value, and format a nice ValueError if it fails. Returns `function(value)`. Any `ValueError` raised is catched and a new `ValueError` is raised with message `fmt.format(e)`, where `e` is the caught `ValueError`. """ try: return function(value) except ValueError as e: raise_from(ValueError(fmt.format(e)), None) def _read_classes(csv_reader): """ Parse the classes file given by csv_reader. """ result = OrderedDict() for line, row in enumerate(csv_reader): line += 1 try: class_name, class_id = row except ValueError: raise_from(ValueError('line {}: format should be \'class_name,class_id\''.format(line)), None) class_id = _parse(class_id, int, 'line {}: malformed class ID: {{}}'.format(line)) if class_name in result: raise ValueError('line {}: duplicate class name: \'{}\''.format(line, class_name)) result[class_name] = class_id return result def _read_quadrangle_annotations(csv_reader, classes, detect_text=False): """ Read annotations from the csv_reader. Args: csv_reader: csv reader of args.annotations_path classes: list[str] all the class names read from args.classes_path Returns: result: dict, dict is like {image_path: [{'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'x3': x3, 'y3': y3, 'x4': x4, 'y4': y4, 'class': class_name}]} """ result = OrderedDict() for line, row in enumerate(csv_reader, 1): try: img_file, x1, y1, x2, y2, x3, y3, x4, y4, class_name = row[:10] if img_file not in result: result[img_file] = [] # If a row contains only an image path, it's an image without annotations. if (x1, y1, x2, y2, x3, y3, x4, y4, class_name) == ('', '', '', '', '', '', '', '', ''): continue x1 = _parse(x1, int, 'line {}: malformed x1: {{}}'.format(line)) y1 = _parse(y1, int, 'line {}: malformed y1: {{}}'.format(line)) x2 = _parse(x2, int, 'line {}: malformed x2: {{}}'.format(line)) y2 = _parse(y2, int, 'line {}: malformed y2: {{}}'.format(line)) x3 = _parse(x3, int, 'line {}: malformed x3: {{}}'.format(line)) y3 = _parse(y3, int, 'line {}: malformed y3: {{}}'.format(line)) x4 = _parse(x4, int, 'line {}: malformed x4: {{}}'.format(line)) y4 = _parse(y4, int, 'line {}: malformed y4: {{}}'.format(line)) # check if the current class name is correctly present if detect_text: if class_name == '###': continue else: class_name = 'text' if class_name not in classes: raise ValueError(f'line {line}: unknown class name: \'{class_name}\' (classes: {classes})') result[img_file].append({'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'x3': x3, 'y3': y3, 'x4': x4, 'y4': y4, 'class': class_name}) except ValueError: raise_from(ValueError( f'line {line}: format should be \'img_file,x1,y1,x2,y2,x3,y3,x4,y4,class_name\' or \'img_file,,,,,\''), None) return result def _read_annotations(csv_reader, classes, base_dir): """ Read annotations from the csv_reader. Args: csv_reader: csv reader of args.annotations_path classes: list[str] all the class names read from args.classes_path Returns: result: dict, dict is like {image_path: [{'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'class': class_name}]} """ result = OrderedDict() for line, row in enumerate(csv_reader, 1): try: img_file, x1, y1, x2, y2 = row[:5] class_name = img_file.split("/")[0] if img_file not in result: result[img_file] = [] # If a row contains only an image path, it's an image without annotations. if (x1, y1, x2, y2, class_name) == ('', '', '', '', ''): continue x1 = _parse(x1, int, 'line {}: malformed x1: {{}}'.format(line)) y1 = _parse(y1, int, 'line {}: malformed y1: {{}}'.format(line)) x2 = _parse(x2, int, 'line {}: malformed x2: {{}}'.format(line)) y2 = _parse(y2, int, 'line {}: malformed y2: {{}}'.format(line)) if class_name not in classes: raise ValueError(f'line {line}: unknown class name: \'{class_name}\' (classes: {classes})') result[img_file].append({'x1': x1, 'y1': y1, 'x2': x2, 'y2': y2, 'class': class_name, 'filename':img_file}) except ValueError: raise_from(ValueError( f'line {line}: format should be \'img_file,x1,y1,x2,y2,class_name\' or \'img_file,,,,,\''), None) return result def _open_for_csv(path): """ Open a file with flags suitable for csv.reader. This is different for python2 it means with mode 'rb', for python3 this means 'r' with "universal newlines". """ if sys.version_info[0] < 3: return open(path, 'rb') else: return open(path, 'r', newline='') def load_image(path): """ Load an image at the image_index. """ image = cv2.imread(path) image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) return image
37.023121
119
0.577518
0
0
0
0
0
0
0
0
3,079
0.480718