max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
src/api/storage/smart.py
ScottDay/DFN-Maintenance-GUI-Backend
2
12793051
<filename>src/api/storage/smart.py from time import sleep import src.wrappers as wrappers from src.console import console from .partitions import disk_partitions @wrappers.jwt @wrappers.endpoint @wrappers.stats @wrappers.injector def get(handler, log): partitions = disk_partitions() devices = [] log.info('Starting smart tests.') for partition in partitions: console('smartctl -X {}'.format(partition['device'])) console('smartctl -t short {}'.format(partition['device'])) devices.append(partition['device']) log.info('Waiting for smart test completion (2 minutes).') sleep(120) partitions = disk_partitions() log.info('Checking smart test results.') for partition in partitions: if partition['device'] in devices: result = console('smartctl -a {}'.format(partition['device'])) if 'No Errors Logged' in result: partition['smart'] = 'Passed' else: partition['smart'] = 'Failed' handler.add({ 'partitions': partitions })
2.125
2
link2clip.py
searchsam/link2clip
0
12793052
<reponame>searchsam/link2clip<gh_stars>0 #!/usr/bin/python3 # Import libraries import re import bs4 import sys import time import progress.bar import requests import pyperclip def main(url, page): # Connect to the URL response = requests.get(url) f = open(page + ".txt", "a+") # Parse HTML and save to BeautifulSoup object soup = bs4.BeautifulSoup(response.text, "html.parser") patron = re.compile( r"(http://|https://)[a-z0-9\-\.\/\?=\&]*(" + page + "[.a-z/?#!A-Z0-9-_]*)" ) # To download the whole data set, let's do a for loop through all a tags bar1 = progress.bar.Bar("Procesando:", max=len(soup.findAll("a"))) for i in range(len(soup.findAll("a"))): # 'a' tags are for links one_a_tag = soup.findAll("a")[i] try: link = one_a_tag["href"] if page in link: p = patron.match(link) urlink = p.group(1) + p.group(2) f.write(urlink + "\n") pyperclip.copy(urlink) pyperclip.paste() time.sleep(1) except (KeyError): pass bar1.next() bar1.finish() if __name__ == "__main__": # Set the URL you want to webscrape from url = sys.argv[1] page = sys.argv[2] main(url, page)
3.328125
3
tests/conftest.py
mattip/numpy-threading-extensions
0
12793053
import numpy as np import pytest from packaging.utils import Version import fast_numpy_loops old_numpy = Version(np.__version__) < Version('1.18') @pytest.fixture(scope='session') def initialize_fast_numpy_loops(): fast_numpy_loops.initialize() @pytest.fixture(scope='function') def rng(): if old_numpy: class OldRNG(np.random.RandomState): pass rng = OldRNG(1234) rng.random = rng.random_sample rng.integers = rng.randint return rng else: return np.random.default_rng(1234)
2.140625
2
quavl/lib/expressions/rqbit.py
fabianbauermarquart/quavl
0
12793054
<reponame>fabianbauermarquart/quavl<filename>quavl/lib/expressions/rqbit.py<gh_stars>0 from typing import List import numpy as np from z3 import Bool, BoolRef, And, Or from quavl.lib.expressions.qbit import QbitVal class RQbitVal(QbitVal): def __init__(self, z0: BoolRef = False, z1: BoolRef = False, h0: BoolRef = False, h1: BoolRef = False, zm0: BoolRef = False, zm1: BoolRef = False, hm0: BoolRef = False, hm1: BoolRef = False, v0: BoolRef = False, v1: BoolRef = False): """ Constructor for a reduced state space qbit. :param z0: (1, 0) (Computational basis) :param z1: (0, 1) :param h0: (1, 1)/sqrt(2) (Hadamard basis) :param h1: (1, -1)/sqrt(2) :param zm0: (-1, 0) :param zm1: (0, -1) :param hm0: (-1, -1)/sqrt(2) :param hm1: (-1, 1)/sqrt(2) :param v0: ((1+1j)/2, (1-1j)/2) :param v1: ((1-1j)/2, (1+1j/2) """ super().__init__() if not isinstance(z0, BoolRef) and sum(np.array([z0, z1, h0, h1, zm0, zm1, hm0, hm1, v0, v1], dtype=int)) != 1: raise ValueError('Exactly one parameter has to be one.') self.z0 = z0 self.z1 = z1 self.h0 = h0 self.h1 = h1 self.zm0 = zm0 self.zm1 = zm1 self.hm0 = hm0 self.hm1 = hm1 self.v0 = v0 self.v1 = v1 def __eq__(self, other: 'RQbitVal'): return self.z0 == other.z0 and self.z1 == other.z1 and \ self.h0 == other.h0 and self.h1 == other.h1 and \ self.zm0 == other.zm0 and self.zm1 == other.zm1 and \ self.hm0 == other.hm0 and self.hm1 == other.hm1 and \ self.v0 == other.v0 and self.v1 == other.v1 def __neq__(self, other: 'RQbitVal'): return self.z0 != other.z0 or self.z1 != other.z1 or \ self.h0 != other.h0 or self.h1 != other.h1 or \ self.zm0 != other.zm0 or self.zm1 != other.zm1 or \ self.hm0 != other.hm0 or self.hm1 != other.hm1 or \ self.v0 != other.v0 or self.v1 != other.v1 def __neg__(self): return RQbitVal(z0=self.z1, z1=self.z0, h0=self.h1, h1=self.h0, zm0=self.zm1, zm1=self.zm0, hm0=self.hm1, hm1=self.hm0, v0=self.v1, v1=self.v0) def __repr__(self): return self.get_identifier() def get_constraints(self, computational_basis_only: False): if computational_basis_only: attributes = ['z0', 'z1'] else: attributes = ['z0', 'z1', 'v0', 'v1'] # ['z0', 'z1', 'h0', 'h1', 'zm0', 'zm1', 'hm0', 'hm1', 'v0', 'v1'] conjunctions = [] for attribute in attributes: conjunction = And([getattr(self, v) == (v == attribute) for v in attributes]) conjunctions.append(conjunction) return Or(conjunctions) def get_identifier(self): """ :return: Identifier. """ return str(self.z0).split('.')[0] def RQbit(identifier: str) -> RQbitVal: """ Generate a named rqbit. :param identifier: chosen identifier. :return: RQbit. """ return RQbitVal(Bool('%s.z0' % identifier), Bool('%s.z1' % identifier), Bool('%s.h0' % identifier), Bool('%s.h1' % identifier), Bool('%s.zm0' % identifier), Bool('%s.zm1' % identifier), Bool('%s.hm0' % identifier), Bool('%s.hm1' % identifier), Bool('%s.v0' % identifier), Bool('%s.v1' % identifier)) def RQbits(identifiers: List[str]) -> List[RQbitVal]: """ Generate many named rqbits. :param identifiers: chose identifiers. :return: List of rqbits. """ return [RQbit(identifier) for identifier in identifiers]
2.703125
3
ingest/country_functions.py
csmets/travel-wish-list
0
12793055
#!/usr/bin/python3 """ Bunch of country related function to help get extended data. """ import json import requests # Load country codes file and store into variable countries_file = open('assets/country-codes.json') countries = json.load(countries_file) countries_file.close() # Load country lat and long file and store into variable country_latlng_file = open('assets/countrycode-latlong.json') country_latlng = json.load(country_latlng_file) country_latlng_file.close() def get_country_latlng(cc): """ using country code find lat and long """ cc_key = cc.lower() lat = country_latlng[cc_key]['lat'] lng = country_latlng[cc_key]['long'] return lat, lng def get_country_code(country_name): """ with country name find the country code """ for country in countries: if country['name'].lower() == country_name.lower(): return country['code'].upper() def get_country_name(country_code): """ using the country code, find the name """ for country in countries: if country['code'].lower() == country_code.lower(): return country['name'].capitalize() def get_wikipedia_description(search): """ Using wikipedia api to fetch descriptions """ """ It's found that wikipedia's api is too slow that it takes a lot of time to ingest the data, for now I decided to deactivate this as I want this up and running quickly. Descriptions will have to be called via the app (front-end) """ disable = True if disable is False: wiki_req = requests.get( 'https://en.wikipedia.org/w/api.php' + '?format=json' + '&action=query' + '&prop=extracts' + '&exintro=' + '&explaintext=' + '&titles={query}' .format(query=search)) response = wiki_req.json() pages = response['query']['pages'] description = "" for value in pages.values(): if 'extract' in value: description = value['extract'] else: description = "" break else: description = "" return description def insert_countries(db_func): for country in countries: record = dict() record['name'] = country['name'] record['code'] = country['code'] record['lat'], record['lng'] = get_country_latlng(country['code']) record['description'] = get_wikipedia_description(country['name']) db_func(record) def insert_cities(db_func): with open('assets/cities.json') as cities_file: cities = json.load(cities_file) for city in cities: record = dict() record['name'] = city['name'] record['code'] = city['country'] record['country'] = get_country_name(city['country']) record['lat'] = city['lat'] record['lng'] = city['lng'] record['description'] = get_wikipedia_description(city['name']) db_func(record)
3.53125
4
python/pyarmnn/test/test_deserializer.py
Project-Xtended/external_armnn
856
12793056
<gh_stars>100-1000 # Copyright © 2020 Arm Ltd and Contributors. All rights reserved. # SPDX-License-Identifier: MIT import os import pytest import pyarmnn as ann import numpy as np @pytest.fixture() def parser(shared_data_folder): """ Parse and setup the test network to be used for the tests below """ parser = ann.IDeserializer() parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn')) yield parser def test_deserializer_swig_destroy(): assert ann.IDeserializer.__swig_destroy__, "There is a swig python destructor defined" assert ann.IDeserializer.__swig_destroy__.__name__ == "delete_IDeserializer" def test_check_deserializer_swig_ownership(parser): # Check to see that SWIG has ownership for parser. This instructs SWIG to take # ownership of the return value. This allows the value to be automatically # garbage-collected when it is no longer in use assert parser.thisown def test_deserializer_get_network_input_binding_info(parser): # use 0 as a dummy value for layer_id, which is unused in the actual implementation layer_id = 0 input_name = 'input_1' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) tensor = input_binding_info[1] assert tensor.GetDataType() == 2 assert tensor.GetNumDimensions() == 4 assert tensor.GetNumElements() == 784 assert tensor.GetQuantizationOffset() == 128 assert tensor.GetQuantizationScale() == 0.007843137718737125 def test_deserializer_get_network_output_binding_info(parser): # use 0 as a dummy value for layer_id, which is unused in the actual implementation layer_id = 0 output_name = "dense/Softmax" output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name) # Check the tensor info retrieved from GetNetworkOutputBindingInfo tensor1 = output_binding_info1[1] assert tensor1.GetDataType() == 2 assert tensor1.GetNumDimensions() == 2 assert tensor1.GetNumElements() == 10 assert tensor1.GetQuantizationOffset() == 0 assert tensor1.GetQuantizationScale() == 0.00390625 def test_deserializer_filenotfound_exception(shared_data_folder): parser = ann.IDeserializer() with pytest.raises(RuntimeError) as err: parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn')) # Only check for part of the exception since the exception returns # absolute path which will change on different machines. assert 'Cannot read the file' in str(err.value) def test_deserializer_end_to_end(shared_data_folder): parser = ann.IDeserializer() network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, "mock_model.armnn")) # use 0 as a dummy value for layer_id, which is unused in the actual implementation layer_id = 0 input_name = 'input_1' output_name = 'dense/Softmax' input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name) preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] options = ann.CreationOptions() runtime = ann.IRuntime(options) opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) assert 0 == len(messages) net_id, messages = runtime.LoadNetwork(opt_network) assert "" == messages # Load test image data stored in input_lite.npy input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy')) input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) output_tensors = [] out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name) out_tensor_info = out_bind_info[1] out_tensor_id = out_bind_info[0] output_tensors.append((out_tensor_id, ann.Tensor(out_tensor_info))) runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) output_vectors = [] for index, out_tensor in enumerate(output_tensors): output_vectors.append(out_tensor[1].get_memory_area()) # Load golden output file for result comparison. expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy')) # Check that output matches golden output assert (expected_outputs == output_vectors[0]).all()
1.960938
2
examples/personzju/run.py
zjuchenyuan/EasyLogin
33
12793057
<reponame>zjuchenyuan/EasyLogin<filename>examples/personzju/run.py from EasyLogin import EasyLogin, mymd5 from mpms import MPMS import threading thread_data = threading.local() import time myprint = lambda s: print("[{showtime}] {s}".format(showtime=time.strftime("%Y-%m-%d %H:%M:%S"), s=s)) import time a=EasyLogin(cachedir="cache") def sign(timestamp, url, params): t = "1f11192bd9d14a09b29fc59d556e24e3" o = [i[0]+str(i[1]) for i in sorted(params.items())] s = t+url+"".join(o)+str(timestamp)+" "+t return mymd5(s) def get(url, params, **kwargs): global a t = int(time.time())*1000 a.s.headers.update({"appKey":"50634610756a4c0e82d5a13bb692e257", "timestamp":str(t), "sign": sign(t, url, params)}) x = a.get("https://person.zju.edu.cn/server"+url+"?"+"&".join(k+"="+str(v) for (k,v) in sorted(params.items())), o=True, result=False, **kwargs) return x.json() def tprint(*args): print("\t".join([str(i) for i in args])) def worker(item): global thread_data a = thread_data.__dict__.get("a") if not a: a = EasyLogin(cachedir="cache") thread_data.__dict__["a"] = a html = a.get("https://person.zju.edu.cn/"+item[3], result=False) uid = html.split("getQRcode.php?uid=",2)[1].split("&",2)[0] item.append(uid) return item def handler(meta, item): meta["fp"].write("\t".join([str(i) for i in item])+"\n") if __name__ == "__main__": #print(sign(1579490640000, "/api/front/psons/search", {"size": 12, "page":0, "lang": "cn"})) meta = {"fp": open("personzju.txt","w",encoding="utf-8")} m = MPMS(worker, handler, 2, 2, meta=meta) m.start() for t in get("/api/front/psons/search", {"size":10000, "page":0, "lang":"cn"}, cache=True)["data"]["content"]: #tprint(t["cn_name"], t["college_name"], t["work_title"], t["mapping_name"], t["access_count"]) m.put([t["cn_name"], t["college_name"], t["work_title"], t["mapping_name"], t["access_count"]]) while len(m)>10: myprint("Remaning "+str(len(m))) time.sleep(2) m.join() myprint("Done!")
2.390625
2
map-reduce with caching on HDFS/serverHDFS.py
Tianyi6679/mincemeatpy
3
12793058
<reponame>Tianyi6679/mincemeatpy import socket import pickle import os import re class ProcessData: messageType="" fileName = "" data = "" dataNodeAddressList=[] serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.bind(('192.168.56.103', 12347)) serv.listen(5) #serv.settimeout(0.0) #serv.setblocking(0) while True: conn, addr = serv.accept() #serv.setblocking(0) #serv.settimeout(1.0) from_client = '' try: data=b"" while True: packet = conn.recv(4096) if not packet: break if packet[-4:]=="FINI".encode(): data +=packet[:-4] break data += packet #print(packet) except: print("recv error") print(data) data_variable = pickle.loads(data) if data_variable.messageType==4: command="$HADOOP_HOME/bin/hdfs dfs -copyToLocal /mapreduce/"+data_variable.fileName+ " /root/hdfsTemp2/" os.system(command) file = open("/root/hdfsTemp2/"+data_variable.fileName, 'rb') #file = open("/root/hdfsTemp2/fileName453", 'rb') baseFileName=os.path.basename(file.name) sendData= ProcessData(); sendData.fileName=baseFileName sendData.data=pickle.load(file) data_string = pickle.dumps(sendData) conn.send(data_string) conn.send("FINI".encode()) elif data_variable.messageType==2: file = open("/root/hdfsTemp/"+data_variable.fileName, 'wb') pickle.dump(data_variable.data, file) file.close() print(data_variable.fileName) print(data_variable.data) print("OS COMMAND CALLED") fileName1="$HADOOP_HOME/bin/hdfs dfs -copyFromLocal /root/hdfsTemp/"+data_variable.fileName+ " /mapreduce/" os.system(fileName1) conn.send("I am SERVER\n".encode()) elif data_variable.messageType==3: command="$HADOOP_HOME/bin/hdfs fsck /mapreduce/"+data_variable.fileName +" -files -blocks -locations | grep 'Data' | sed 's/^.*: //'e" result=os.popen(command).read() r1 = re.findall(r"\b(?:\d{1,3}\.){3}\d{1,3}\b",result) print(r1) sendData= ProcessData(); sendData.dataNodeAddressList=r1 data_string = pickle.dumps(sendData) conn.send(data_string) conn.close() print ('client disconnected')
2.75
3
research/cv/PAMTRI/MultiTaskNet/src/loss/hard_mine_triplet_loss.py
mindspore-ai/models
77
12793059
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Triplet loss with hard positive/negative mining""" import mindspore.nn as nn import mindspore.ops as ops import mindspore.numpy as np import mindspore.common.dtype as mstype from mindspore import Tensor class MarginRankingLoss(nn.Cell): """function MarginRankingLoss""" def __init__(self, margin=0.0, reduction='mean'): super(MarginRankingLoss, self).__init__() self.reduction = reduction self.margin = margin self.sum = ops.ReduceSum(keep_dims=False) def construct(self, input1, input2, target): output = np.maximum(0, -target*(input1 - input2) + self.margin) if self.reduction == 'mean': output = np.mean(output) elif self.reduction == 'sum': output = self.sum(output, 0) return output class addmm(nn.Cell): """function _addmm""" def construct(self, mat, alpha, beta, mat1, mat2): out = ops.matmul(mat1, mat2) return mat * alpha + out * beta class TripletLoss(nn.Cell): """Triplet loss with hard positive/negative mining""" def __init__(self, batch_size, margin=0.3): super(TripletLoss, self).__init__() self.addmm = addmm() self.pow = ops.Pow() self.equal = ops.Equal() self.cast = ops.Cast() self.select = ops.Select() self.reducemax = ops.ReduceMax() self.reducemin = ops.ReduceMin() self.sum = ops.ReduceSum(keep_dims=True) self.ranking_loss = MarginRankingLoss(margin=margin) self.expand = ops.BroadcastTo((batch_size, batch_size)) self.zeros = Tensor(np.zeros((batch_size, batch_size)).astype(np.float32)) self.maxs = Tensor(np.full((batch_size, batch_size), 65535).astype(np.float32)) def construct(self, inputs, targets): """TripletLoss construct""" inputs_ = self.pow(inputs, 2) inputs_ = self.sum(inputs_, 1) dist = self.expand(inputs_) # (32, 32) dist = dist + dist.T dist = self.addmm(dist, 1, -2, inputs, inputs.T) dist = np.sqrt(np.clip(dist, xmin=1e-12, xmax=np.amax(dist)), dtype=dist.dtype) targets = self.cast(targets, mstype.float32) mask = self.equal(self.expand(targets), self.expand(targets).T) dist_ap = self.select(mask, dist, self.zeros) mask_zeros = self.equal(self.cast(mask, mstype.int32), self.zeros) dist_an = self.select(mask_zeros, dist, self.maxs) dist_ap = self.reducemax(dist_ap, 1) dist_an = self.reducemin(dist_an, 1) y = np.ones_like((dist_an)) loss = self.ranking_loss(dist_an, dist_ap, y) return loss
2.46875
2
test/test_io.py
thomasleese/fitbod-analysis
0
12793060
from pathlib import Path from unittest import TestCase from gains.io import FitbodLoader class TestFitbodLoader(TestCase): data_filename = Path(__file__).parent / "data.csv" def test_loading_data(self): loader = FitbodLoader(self.data_filename) analysis = loader.analysis exercises = analysis.exercises self.assertEqual(len(exercises), 890) self.assertEqual(exercises[3].average_weight, 38.5)
2.40625
2
django_enum_choices/tests/test_form_fields.py
okapies/django-enum-choices
73
12793061
from django.test import TestCase from django_enum_choices.forms import EnumChoiceField from .testapp.enumerations import CharTestEnum class FormFieldTests(TestCase): def test_field_instance_creates_choices_correctly(self): instance = EnumChoiceField(CharTestEnum) choices = instance.build_choices() self.assertEqual( choices, [('first', 'first'), ('second', 'second'), ('third', 'third')] ) def test_field_instance_creates_choices_correctly_with_custom_choice_builder(self): def choice_builder(choice): return 'Custom_' + choice.value, choice.value instance = EnumChoiceField(CharTestEnum, choice_builder=choice_builder) choices = instance.build_choices() self.assertEqual( choices, [('Custom_first', 'first'), ('Custom_second', 'second'), ('Custom_third', 'third')] )
2.46875
2
secret_handshake/crypto.py
ProgVal/PySecretHandshake
22
12793062
# Copyright (c) 2017 PySecretHandshake contributors (see AUTHORS for more details) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import hashlib import hmac from base64 import b64decode from nacl.bindings import (crypto_box_afternm, crypto_box_open_afternm, crypto_scalarmult) from nacl.exceptions import CryptoError from nacl.public import PrivateKey from nacl.signing import VerifyKey APPLICATION_KEY = b64decode('<KEY> class SHSError(Exception): """A SHS exception.""" pass class SHSCryptoBase(object): def __init__(self, local_key, ephemeral_key=None, application_key=None): self.local_key = local_key self.application_key = application_key or APPLICATION_KEY self._reset_keys(ephemeral_key or PrivateKey.generate()) def _reset_keys(self, ephemeral_key): self.local_ephemeral_key = ephemeral_key self.local_app_hmac = (hmac.new(self.application_key, bytes(ephemeral_key.public_key), digestmod='sha512') .digest()[:32]) def generate_challenge(self): """Generate and return a challenge to be sent to the server.""" return self.local_app_hmac + bytes(self.local_ephemeral_key.public_key) def verify_challenge(self, data): """Verify the correctness of challenge sent from the client.""" assert len(data) == 64 sent_hmac, remote_ephemeral_key = data[:32], data[32:] h = hmac.new(self.application_key, remote_ephemeral_key, digestmod='sha512') self.remote_app_hmac = h.digest()[:32] ok = self.remote_app_hmac == sent_hmac if ok: # this is (a * b) self.shared_secret = crypto_scalarmult(bytes(self.local_ephemeral_key), remote_ephemeral_key) self.remote_ephemeral_key = remote_ephemeral_key # this is hash(a * b) self.shared_hash = hashlib.sha256(self.shared_secret).digest() return ok def clean(self, new_ephemeral_key=None): self._reset_keys(new_ephemeral_key or PrivateKey.generate()) self.shared_secret = None self.shared_hash = None self.remote_ephemeral_key = None def get_box_keys(self): shared_secret = hashlib.sha256(self.box_secret).digest() return { 'shared_secret': shared_secret, 'encrypt_key': hashlib.sha256(shared_secret + bytes(self.remote_pub_key)).digest(), 'decrypt_key': hashlib.sha256(shared_secret + bytes(self.local_key.verify_key)).digest(), 'encrypt_nonce': self.remote_app_hmac[:24], 'decrypt_nonce': self.local_app_hmac[:24] } class SHSServerCrypto(SHSCryptoBase): def verify_client_auth(self, data): assert len(data) == 112 a_bob = crypto_scalarmult(bytes(self.local_key.to_curve25519_private_key()), self.remote_ephemeral_key) box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() self.hello = crypto_box_open_afternm(data, b'\x00' * 24, box_secret) signature, public_key = self.hello[:64], self.hello[64:] signed = self.application_key + bytes(self.local_key.verify_key) + self.shared_hash pkey = VerifyKey(public_key) # will raise an exception if verification fails pkey.verify(signed, signature) self.remote_pub_key = pkey b_alice = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(self.remote_pub_key.to_curve25519_public_key())) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob + b_alice).digest()[:32] return True def generate_accept(self): okay = self.local_key.sign(self.application_key + self.hello + self.shared_hash).signature d = crypto_box_afternm(okay, b'\x00' * 24, self.box_secret) return d def clean(self, new_ephemeral_key=None): super(SHSServerCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.hello = None self.b_alice = None class SHSClientCrypto(SHSCryptoBase): """An object that encapsulates all the SHS client-side crypto. :param local_key: the keypair used by the client (:class:`nacl.public.PrivateKey` object) :param server_pub_key: the server's public key (``byte`` string) :param ephemeral_key: a fresh local :class:`nacl.public.PrivateKey` :param application_key: the unique application key (``byte`` string), defaults to SSB's """ def __init__(self, local_key, server_pub_key, ephemeral_key, application_key=None): super(SHSClientCrypto, self).__init__(local_key, ephemeral_key, application_key) self.remote_pub_key = VerifyKey(server_pub_key) def verify_server_challenge(self, data): """Verify the correctness of challenge sent from the server.""" assert super(SHSClientCrypto, self).verify_challenge(data) curve_pkey = self.remote_pub_key.to_curve25519_public_key() # a_bob is (a * B) a_bob = crypto_scalarmult(bytes(self.local_ephemeral_key), bytes(curve_pkey)) self.a_bob = a_bob # this shall be hash(K | a * b | a * B) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + a_bob).digest() # and message_to_box will correspond to H = sign(A)[K | Bp | hash(a * b)] | Ap signed_message = self.local_key.sign(self.application_key + bytes(self.remote_pub_key) + self.shared_hash) message_to_box = signed_message.signature + bytes(self.local_key.verify_key) self.hello = message_to_box return True def generate_client_auth(self): """Generate box[K|a*b|a*B](H)""" nonce = b"\x00" * 24 # return box(K | a * b | a * B)[H] return crypto_box_afternm(self.hello, nonce, self.box_secret) def verify_server_accept(self, data): """Verify that the server's accept message is sane""" curve_lkey = self.local_key.to_curve25519_private_key() # b_alice is (A * b) b_alice = crypto_scalarmult(bytes(curve_lkey), self.remote_ephemeral_key) self.b_alice = b_alice # this is hash(K | a * b | a * B | A * b) self.box_secret = hashlib.sha256(self.application_key + self.shared_secret + self.a_bob + b_alice).digest() nonce = b"\x00" * 24 try: # let's use the box secret to unbox our encrypted message signature = crypto_box_open_afternm(data, nonce, self.box_secret) except CryptoError: raise SHSError('Error decrypting server acceptance message') # we should have received sign(B)[K | H | hash(a * b)] # let's see if that signature can verify the reconstructed data on our side self.remote_pub_key.verify(self.application_key + self.hello + self.shared_hash, signature) return True def clean(self, new_ephemeral_key=None): super(SHSClientCrypto, self).clean(new_ephemeral_key=new_ephemeral_key) self.a_bob = None self.b_alice = None
1.765625
2
thyme/trajectories.py
nw13slx/thyme
0
12793063
<reponame>nw13slx/thyme """ Data structure that contains a collection of trajectory objects <NAME> (Harvard University) 2020 """ from copy import deepcopy import logging import numpy as np from .trajectory import Trajectory from thyme.utils.atomic_symbols import species_to_order_label from thyme.utils.savenload import save_file, load_file def dummy_comp(trj1, trj2): return True class Trajectories: def __init__(self): self.nframes = 0 self.ntrjs = 0 self.alltrjs = {} self._iter_index = 0 self.per_frame_attrs = [] self.trj_id = None self.in_trj_id = None self.global_id = None def __repr__(self) -> str: return f"Trajectories with {len(self.alltrjs)} trj" def __str__(self): s = f"{len(self.alltrjs)} trajectories with {len(self)} frames\n" for name in self.alltrjs: s += f"----{name}----\n" s += f"{self.alltrjs[name]}\n" return s def __len__(self): return self.nframes def construct_id_list(self, force_run=False): if self.trj_id is not None and not force_run: max_trj = np.max(self.trj_id) max_frame = np.max(self.global_id) if max_trj == (self.ntrjs - 1) and max_frame == (self.nframes - 1): return self.trj_id = np.zeros(self.nframes, dtype=int) self.in_trj_id = np.zeros(self.nframes, dtype=int) self.global_id = np.arange(self.nframes) count = 0 for id_trj, trj in enumerate(self.alltrjs.values()): nframes = trj.nframes self.trj_id[count : count + nframes] += id_trj self.in_trj_id[count : count + nframes] += np.arange(nframes) count += nframes def __getitem__(self, key): return self.alltrjs[key] def __iter__(self): return self def __next__(self): self._iter_index = getattr(self, "_iter_index", 0) if self._iter_index >= len(self): raise StopIteration self._iter_index += 1 return self.get_frame(self._iter_index - 1) def get_frame(self, idx, keys=None): n_attrs = len(self) if idx >= n_attrs: raise ValueError(f"frame index overflow {n_attrs}") trj_id = self.trj_id[idx] frame_id = self.in_trj_id[idx] trj = list(self.alltrjs.values())[trj_id] trj_name = list(self.alltrjs.keys())[trj_id] return dict(name=trj_name, **trj.get_frame(frame_id, keys=keys)) def get_attrs(self, key): self.construct_id_list() for id_trj, trj in enumerate(self.alltrjs.values()): attr = getattr(trj, key, None) if attr is None: raise ValueError(f"not all trjs has attr {attr}") array = [] for id_trj, trj in enumerate(self.alltrjs.values()): sub_array = trj.get_attr(key) array += [sub_array] if len(array[0].shape) <= 1: array = np.hstack(array) else: array = np.vstack(array) return array[self.global_id] def include_frames(self, accept_id=None): if accept_id is None: return self.construct_id_list() self.trj_id = self.trj_id[accept_id] self.in_trj_id = self.in_trj_id[accept_id] self.global_id = self.global_id[accept_id] def save(self, name: str, format: str = None): if format in ["pickle", "npz"] or format is None: save_file( self.to_dict(), supported_formats={"npz": "npz", "pickle": "pickle"}, filename=name, enforced_format=format, ) elif format == "xyz": for trj in self.alltrjs.values(): trj.save(f"{trj.name}_{name}", format) elif format == "poscar": for trj in self.alltrjs.values(): trj.save(f"{trj.name}_{name}", format) else: raise NotImplementedError( f"Output format {format} not supported:" f" try from pickle, xyz, poscar" ) logging.info(f"save as {name}") def to_dict(self): return {name: trj.to_dict() for name, trj in self.alltrjs.items()} @classmethod def from_file(cls, name: str, format: str = None, preserve_order: bool = False): """ pickle format: previous objects saved as pickle format """ obj = load_file( supported_formats={"npz": "npz", "pickle": "pickle"}, filename=name, enforced_format=format, ) if isinstance(obj, Trajectories): return obj return cls.from_dict(dict(obj)) @staticmethod def from_dict(dictionary: dict, merge=False, preserve_order=False): """ convert dictionary to a Trajectory instance """ trjs = Trajectories() for name, trj_dict in dictionary.items(): if not isinstance(trj_dict, dict): trj_dict = trj_dict.item() trj = Trajectory.from_dict(trj_dict) trjs.add_trj(trj, name=name, merge=merge, preserve_order=preserve_order) trjs.construct_id_list() return trjs def pop_trj(self, name): trj = self.alltrjs.pop(name, None) if trj is not None: self.nframes -= trj.nframes self.ntrjs -= 1 def add_trj( self, trj, name=None, merge=False, preserve_order=False, metadata_compare=dummy_comp, save_mode=True, ): if len(self.alltrjs) == 0: self.per_frame_attrs = deepcopy(trj.per_frame_attrs) elif save_mode: nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trj.per_frame_attrs) if len(intersection) != nterms: print(self.per_frame_attrs) print(trj.per_frame_attrs) raise RuntimeError(f"not enough per_frame_attrs") if not merge: if name in self.alltrjs: name = f"{name}_{len(self.alltrjs)}" elif name is None and trj.name not in self.alltrjs: name = trj.name elif name is None: name = f"{len(self.alltrjs)}" self.alltrjs[name] = trj self.nframes += trj.nframes self.ntrjs += 1 return # order trj by element order, label = species_to_order_label(trj.species) if name is None: stored_label = None for _label, oldtrj in self.alltrjs.items(): old_order, old_label = species_to_order_label(oldtrj.species) if metadata_compare(trj, oldtrj) and old_label == label: stored_label = _label break if stored_label is None: stored_label = label else: stored_label = name + "_" + label label = name + "_" + label if stored_label not in self.alltrjs: newtrj = Trajectory() newtrj.name = np.copy(stored_label) self.alltrjs[stored_label] = newtrj else: oldtrj = self.alltrjs[stored_label] if metadata_compare(trj, oldtrj): logging.debug( f"! Metadata is exactly the same. Merge to {stored_label}" ) else: stored_label, last_label = obtain_store_label( last_label="NA0", label=label, alldata=self.alltrjs, preserve_order=True, ) self.alltrjs[stored_label] = Trajectory() logging.debug( f"! Metadata is not the same. Not merge. Buil {stored_label}" ) self.alltrjs[stored_label].add_trj(trj, save_mode=False, order=order) self.nframes += trj.nframes self.ntrjs += 1 return stored_label def add_trjs( self, trjs, merge=False, preserve_order=False, metadata_compare=dummy_comp, ): nterms = len(self.per_frame_attrs) intersection = set(self.per_frame_attrs).intersection(trjs.per_frame_attrs) if len(intersection) != nterms: raise RuntimeError(f"not enough per_frame_attrs") for trj in trjs.alltrjs.values(): self.add_trj( trj, name=None, merge=merge, preserve_order=preserve_order, metadata_compare=metadata_compare, save_mode=False, ) def merge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = self.remerge() del self.alltrjs self.alltrjs = trjs.alltrjs def remerge(self, preserve_order=False, metadata_compare=dummy_comp): trjs = Trajectories() for trj in self.alltrjs.values(): trjs.add_trj( trj, name=None, merge=True, preserve_order=preserve_order, metadata_compare=metadata_compare, ) return trjs def obtain_store_label(last_label, label, alldata, preserve_order): stored_label = label if preserve_order: count = -1 # find all the previous trajectories for l in alldata: if "_" in l: line_split = l.split("_") else: line_split = l if label == line_split[0]: _count = int(line_split[1]) if _count > count: count = _count if label != last_label: count += 1 last_label = label stored_label = f"{label}_{count}" return stored_label, last_label
2.375
2
100DaysOfDays/Dia07/ex32.py
giselemanuel/programming-challenges
0
12793064
<gh_stars>0 """ Exercício Python 32: Faça um programa que leia um ano qualquer e mostre se ele é bissexto. """ print("-" * 40) print(f'{"Ano Bissexto":^40}') print("-" * 40) ano = int(input("Insira um ano: ")) if ano % 4 == 0 and ano % 100 == 0 or ano % 400 == 0: print(f"O ano {ano} é BISSEXTO !!!") else: print(f'Este {ano} não é BISSEXTO !!!')
3.78125
4
scripts/tif_to_netcdf.py
giancastro/mapshader
0
12793065
import xarray as xr from mapshader.transforms import squeeze from mapshader.transforms import cast from mapshader.transforms import orient_array from mapshader.transforms import flip_coords from mapshader.transforms import reproject_raster def run_float(input_file, output_file, chunks=(512, 512), name='data', scale_factor=0.1, fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = cast(arr, dtype='float64') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', 'scale_factor': 0.1, '_FillValue': -9999}}) def run_int(input_file, output_file, chunks=(512, 512), name='data', fill_value=-9999): arr = xr.open_rasterio(input_file) arr = squeeze(arr, 'band') arr = orient_array(arr) arr = flip_coords(arr, dim='y') # do we need this? arr = reproject_raster(arr, epsg=3857) dataset = xr.Dataset({name: (['y', 'x'], arr.chunk(chunks))}, coords={'x': arr.coords['x'], 'y': arr.coords['y']}) dataset.attrs = dict(name=name) dataset.to_netcdf(output_file, encoding={'data': {'dtype': 'int16', '_FillValue': fill_value}}) if __name__ == '__main__': import sys from argparse import ArgumentParser from os import path parser = ArgumentParser() parser.add_argument('-i') parser.add_argument('-o') parser.add_argument('-f') parsed = parser.parse_args() input_file = path.abspath(path.expanduser(parsed.i)) print(f'Converting {input_file} from TIFF to NetCDF File', file=sys.stdout) if not parsed.o: output_file = input_file.replace('.tif', '.nc') else: output_file = path.abspath(path.expanduser(parsed.o)) if parsed.f: run_float(input_file, output_file) else: run_int(input_file, output_file) print(f'Conversion Complete: {output_file}', file=sys.stdout)
2.21875
2
deliver/functions_will/extract_feature_pixel.py
mariecpereira/Extracao-de-Caracteristicas-Corpo-Caloso
0
12793066
# -*- encoding: utf-8 -*- def extract_feature_pixel(img, mask_1, mask_0=[], mask_2=[], mask_3=[], mask_4=[], mask_5=[], dim_prof=0): import numpy as np #Função de leitura da imagem e máscaras, e retorna array de pixel como atributo n = img.shape[dim_prof] t1 = img[mask_1].size/n t0 = img[mask_0].size/n t2 = img[mask_2].size/n t3 = img[mask_3].size/n t4 = img[mask_4].size/n t5 = img[mask_5].size/n ones = np.ones((t1,1)) eval1 = img[mask_1].reshape(n,-1).T atr_slice = np.concatenate((eval1,ones), axis=1) if mask_0!=[]: zeros = np.zeros((t0,1)) eval0 = img[mask_0].reshape(n,-1).T atr0 = np.concatenate((eval0,zeros), axis=1) atr_slice = np.vstack([atr0,atr_slice]) if mask_2!=[]: twos = np.ones((t2,1))*2 eval2 = img[mask_2].reshape(n,-1).T atr2 = np.concatenate((eval2,twos), axis=1) atr_slice = np.vstack([atr_slice,atr2]) if mask_3!=[]: threes = np.ones((t3,1))*3 eval3 = img[mask_3].reshape(n,-1).T atr3 = np.concatenate((eval3,threes), axis=1) atr_slice = np.vstack([atr_slice,atr3]) if mask_4!=[]: fours = np.ones((t4,1))*4 eval4 = img[mask_4].reshape(n,-1).T atr4 = np.concatenate((eval4,fours), axis=1) atr_slice = np.vstack([atr_slice,atr4]) if mask_5!=[]: fives = np.ones((t5,1))*5 eval5 = img[mask_5].reshape(n,-1).T atr5 = np.concatenate((eval5,fives), axis=1) atr_slice = np.vstack([atr_slice,atr5]) return atr_slice
2.9375
3
schools/urls.py
suhailvs/stackschools
0
12793067
from django.urls import include, path, re_path from . import views as school_views from django.views.generic import TemplateView app_name = 'schools' urlpatterns = [ # if blank show districts(Thissur, palakkad...) # path('', school_views.states, name='states'), # if digit, view school information re_path(r'^(?P<code>\d{5})/',school_views.school_view_kerala, name='school_view_kerala'), re_path(r'^(?P<code>\d{4}\w{7})/',school_views.school_view, name='school_view'), # if districts show districts(Thissur, palakkad...) of a state path('<slug:state>/', school_views.districts, name='districts'), # if character, show sub districts path('<slug:state>/<district>/',school_views.sub_districts, name='sub_districts'), path('<slug:state>/<district>/<sub_district>/',school_views.schools, name='schools'), ]
2.1875
2
cloudbio/deploy/plugins/galaxy.py
glebkuznetsov/cloudbiolinux
122
12793068
from cloudbio.galaxy.tools import _install_application def install_tool(options): version = options.get("galaxy_tool_version") name = options.get("galaxy_tool_name") install_dir = options.get("galaxy_tool_dir", None) _install_application(name, version, tool_install_dir=install_dir) configure_actions = { "install_galaxy_tool": install_tool, }
1.609375
2
cli/core/path.py
morajlab/mjw-cli
0
12793069
import os def getRootPath(): return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) def getProjectAbsPath(*path): return os.path.join(getRootPath(), *path) def getCachePath(*path): return getProjectAbsPath(".cache", *path) def getTemplatePath(*path): return getProjectAbsPath("cli", "templates", *path) def getNodeBinPath(name): return getProjectAbsPath("node_modules", ".bin", name) def getPipEnvBinPath(name): return getProjectAbsPath("env", "bin", name) def getCurrentAbsPath(path="."): if os.path.isabs(path): return os.path.abspath(path) else: return getCurrentAbsPath(os.path.join(os.getcwd(), path))
2.265625
2
django_cte/__init__.py
arogazinsky/django-more
32
12793070
from importlib import reload, import_module # Project imports from patchy import patchy default_app_config = 'django_cte.apps.DjangoCTEConfig' def patch_cte(): """ Apply CTE monkey patches to Django. At present these patches must be updated manually to conform with new CTE implementations, but this is only necessary to use new functionality. Order of patching *matters* due to namespace reload. """ with patchy('django.db.models', 'django_cte') as p: p.mod('expressions').auto() p.mod('sql.compiler').auto() p.mod('sql.subqueries').auto() # Force reload so that new query types are imported into namespace reload(import_module('django.db.models.sql')) p.mod('query').auto() p.cls('manager.BaseManager').auto() p.cls('base.Model').auto()
2.34375
2
BlenderAddon/PhotonBlend/psdl/cmd.py
jasonoscar88/Photon-v2
88
12793071
<reponame>jasonoscar88/Photon-v2 from . import cmd_base from abc import abstractmethod class WorldCommand(cmd_base.SdlCommand): def __init__(self): super(WorldCommand, self).__init__() self.__clauses = {} self.__data_name = "" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass @abstractmethod def to_sdl(self, sdlconsole): pass def set_data_name(self, name): self.__data_name = name def update_clause(self, clause): clause_type_and_name = clause.get_type_name() + clause.get_name() self.__clauses[clause_type_and_name] = clause def get_data_name(self): return self.__data_name def get_clauses(self): return self.__clauses def _get_formated_data_name(self): return "\"@" + self.__data_name + "\"" class CreationCommand(WorldCommand): def __init__(self): super(CreationCommand, self).__init__() @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = ["-> ", self.get_type_category(), "(", self.get_type_name(), ") ", self._get_formated_data_name(), " "] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append("\n") return "".join(cmdstrs) class FunctionCommand(WorldCommand): def __init__(self): super(FunctionCommand, self).__init__() self.__func_name = "" @abstractmethod def get_type_category(self): pass @abstractmethod def get_type_name(self): pass def to_sdl(self, sdlconsole): cmdstrs = ["-> ", self.get_type_category(), "(", self.get_type_name(), ") ", self.get_func_name(), "(", self._get_formated_data_name(), ") "] for clause_name, clause in self.get_clauses().items(): cmdstrs.append(clause.to_sdl_fragment()) cmdstrs.append("\n") return "".join(cmdstrs) def set_func_name(self, name): self.__func_name = name def get_func_name(self): return self.__func_name class RawCommand(cmd_base.SdlCommand): def __init__(self): super(RawCommand, self).__init__() self.__command_string = "" def append_string(self, string): self.__command_string += string def to_sdl(self, sdlconsole): return self.__command_string # TODO: core commands
2.5
2
python-flask/mymodel.py
jirramounikapriyanka/sample-android-python-ml-app
0
12793072
<reponame>jirramounikapriyanka/sample-android-python-ml-app<gh_stars>0 import pandas as pd import pickle from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import SGDClassifier from sklearn.metrics import confusion_matrix, classification_report from sklearn.preprocessing import StandardScaler, LabelEncoder from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score location="C:/Users/91798/Downloads/red-wine-quality-cortez-et-al-2009/winequality-red.csv" wine=pd.read_csv(location) bins = (2, 6.5, 8) group_names = ['bad', 'good'] wine['quality'] = pd.cut(wine['quality'], bins = bins, labels = group_names) label_quality = LabelEncoder() #Bad becomes 0 and good becomes 1 wine['quality'] = label_quality.fit_transform(wine['quality']) X = wine.drop('quality', axis = 1) y = wine['quality'] #Train and Test splitting of data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 42) #Applying Standard scaling to get optimized result sc = StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.fit_transform(X_test) rfc = RandomForestClassifier(n_estimators=200) rfc.fit(X_train, y_train) # Saving model to disk pickle.dump(rfc, open('model.pkl','wb')) # Loading model to compare the results #model = pickle.load(open('model.pkl','rb')) #print(model.predict([[2, 9, 6]]))
2.640625
3
antlir/bzl/constants.bzl
facebookincubator/fs_image
9
12793073
<reponame>facebookincubator/fs_image # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # This combines configurable build-time constants (documented on REPO_CFG # below), and non-configurable constants that are currently not namespaced. # # Note that there's no deep reason for this struct / non-struct split, so we # could easily move everything into the struct. # load("//antlir/bzl:oss_shim.bzl", "config", "do_not_use_repo_cfg") load("//antlir/bzl:sha256.bzl", "sha256_b64") load("//antlir/bzl:shape.bzl", "shape") load(":constants.shape.bzl", "bzl_const_t", "flavor_config_t", "nevra_t", "repo_config_t") load(":snapshot_install_dir.bzl", "RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR", "snapshot_install_dir") load(":target_helpers.bzl", "normalize_target") DO_NOT_USE_BUILD_APPLIANCE = "__DO_NOT_USE_BUILD_APPLIANCE__" CONFIG_KEY = "antlir" BZL_CONST = shape.new( bzl_const_t, layer_feature_suffix = "__layer-feature", # Do NOT use this outside of Antlir internals. See "Why are `feature`s # forbidden as dependencies?" in `bzl/image/feature/new.bzl` for a # detailed explanation. PRIVATE_feature_suffix = "_IF_YOU_REFER_TO_THIS_RULE_YOUR_DEPENDENCIES_WILL_BE_BROKEN", version_set_allow_all_versions = "__VERSION_SET_ALLOW_ALL_VERSIONS__", # hostnames can't contain underscores. hostname_for_compiler_in_ba = "INTERNAL-ONLY-HOSTNAME-FOR-COMPILER-IN-BA", ) def version_set_override_name(current_target): return "vset-override-" + sha256_b64(current_target) # Use `_get_str_cfg` or `_get_str_list_cfg` instead. def _do_not_use_directly_get_cfg(name, default = None): # Allow `buck -c` overrides from the command-line val = native.read_config(CONFIG_KEY, name) if val != None: return val val = do_not_use_repo_cfg.get(name) if val != None: return val return default # We don't have "globally required" configs because code that requires a # config will generally loudly fail on a config value that is None. def _get_str_cfg(name, default = None, allow_none = False): ret = _do_not_use_directly_get_cfg(name, default = default) if not allow_none and ret == None: fail("Repo config must set key {}".format(name)) return ret # Defaults to the empty list if the config is not set. # # We use space to separate plurals because spaces are not allowed in target # paths, and also because that's what `.buckconfig` is supposed to support # for list configs (but does not, due to bugs). def _get_str_list_cfg(name, separator = " ", default = None): s = _do_not_use_directly_get_cfg(name) return s.split(separator) if s else (default or []) # Defaults to the empty list if the config is not set def _get_version_set_to_path(): lst = _get_str_list_cfg("version_set_to_path") vs_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(vs_to_path) != len(lst): fail("antlir.version_set_to_path is a space-separated dict: k1 v1 k2 v2") # A layer can turn off version locking # via `version_set = BZL_CONST.version_set_allow_all_versions`. vs_to_path[BZL_CONST.version_set_allow_all_versions] = "TROLLING TROLLING TROLLING" return vs_to_path # Defaults to the empty list if the config is not set def _get_artifact_key_to_path(): lst = _get_str_list_cfg("artifact_key_to_path") key_to_path = dict(zip(lst[::2], lst[1::2])) if 2 * len(key_to_path) != len(lst): fail("antlir.artifact_key_to_path is a space-separated dict: k1 v1 k2 v2") return key_to_path def new_nevra(**kwargs): return shape.new(nevra_t, **kwargs) def new_flavor_config( name, build_appliance, rpm_installer, rpm_repo_snapshot = None, rpm_version_set_overrides = None, version_set_path = BZL_CONST.version_set_allow_all_versions, unsafe_bypass_flavor_check = False): """ Arguments - `name`: The name of the flavor - `build_appliance`: Path to a layer target of a build appliance, containing an installed `rpm_repo_snapshot()`, plus an OS image with other image build tools like `btrfs`, `dnf`, `yum`, `tar`, `ln`, ... - `rpm_installer`: The build appliance currently does not set a default package manager -- in non-default settings, this has to be chosen per image, since a BA can support multiple package managers. In the future, if specifying a non-default installer per image proves onerous when using non-default BAs, we could support a `default` symlink under `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_repo_snapshot`: List of target or `/__antlir__` paths, see `snapshot_install_dir` doc. `None` uses the default determined by looking up `rpm_installer` in `RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR`. - `rpm_version_set_overrides`: List of `nevra` objects (see antlir/bzl/constants.bzl for definition). If rpm with given name to be installed, the `nevra` defines its version. - `unsafe_bypass_flavor_check`: Do NOT use. """ if build_appliance == None: fail( "Must be a target path, or a value from `constants.bzl`", "build_appliance", ) if rpm_installer != "yum" and rpm_installer != "dnf": fail("Unsupported rpm_installer supplied in build_opts") # When building the BA itself, we need this constant to avoid a circular # dependency. # # This feature is exposed a non-`None` magic constant so that callers # cannot get confused whether `None` refers to "no BA" or "default BA". if build_appliance == DO_NOT_USE_BUILD_APPLIANCE: build_appliance = None if build_appliance: build_appliance = normalize_target(build_appliance) return shape.new( flavor_config_t, name = name, build_appliance = build_appliance, rpm_installer = rpm_installer, rpm_repo_snapshot = ( snapshot_install_dir(rpm_repo_snapshot) if rpm_repo_snapshot else "{}/{}".format( RPM_DEFAULT_SNAPSHOT_FOR_INSTALLER_DIR, rpm_installer, ) ), rpm_version_set_overrides = rpm_version_set_overrides, version_set_path = version_set_path, unsafe_bypass_flavor_check = unsafe_bypass_flavor_check, ) def _get_flavor_to_config(): flavor_to_config = {} for flavor, orig_flavor_config in do_not_use_repo_cfg.get("flavor_to_config", {}).items(): flavor_config = {"name": flavor} flavor_config.update(orig_flavor_config) # we'll mutate a copy # Apply `buck -c` overrides. # # Buck has a notion of flavors that is separate from Antlir's but # similar in spirit. It uses # as the delimiter for per-flavor # config options, so we follow that pattern. config_key = CONFIG_KEY + "#" + flavor for key, v in flavor_config.items(): val = native.read_config(config_key, key, None) if val != None: flavor_config[key] = val flavor_to_config[flavor] = new_flavor_config(**flavor_config) return flavor_to_config REPO_CFG = shape.new( repo_config_t, # This one is not using the access methods to provide the precedence order # because the way this is determined is *always* based on the build mode # provided, ie `@mode/opt` vs `@mode/dev`. And the build mode provided # determines the value of the `.buckconfig` properties used. There is no # way to override this value except to use a different build mode. artifacts_require_repo = ( (native.read_config("defaults.cxx_library", "type") == "shared") or (native.read_config("python", "package_style") == "inplace") ) and native.read_config("antlir", "require_repo", "true") == "true", # This is a dictionary that allow for looking up configurable artifact # targets by a key. artifact = _get_artifact_key_to_path(), # At FB, the Antlir team tightly controls the usage of host mounts, # since they are a huge footgun, and are a terrible idea for almost # every application. To create an easy-to-review code bottleneck, any # feature target using a host-mount must be listed in this config. host_mounts_allowed_in_targets = _get_str_list_cfg("host_mounts_allowed_in_targets"), # Enumerates host mounts required to execute FB binaries in @mode/dev. # # This is turned into json and loaded by the python side of the # `nspawn_in_subvol` sub system. In the future this would be # implemented via a `Shape` so that the typing can be maintained across # bzl/python. host_mounts_for_repo_artifacts = _get_str_list_cfg( "host_mounts_for_repo_artifacts", ), flavor_available = _get_str_list_cfg("flavor_available"), stable_flavors = _get_str_list_cfg("stable_flavors"), flavor_default = _get_str_cfg("flavor_default"), flavor_to_config = _get_flavor_to_config(), # KEEP THIS DICTIONARY SMALL. # # For each `feature`, we have to emit as many targets as there are # elements in this list, because we do not know the version set that the # including `image.layer` will use. This would be fixable if Buck # supported providers like Bazel does. antlir_linux_flavor = _get_str_cfg("antlir_linux_flavor", allow_none = True), antlir_cell_name = config.get_antlir_cell_name(), )
1.171875
1
btstock/db/db_mysql.py
huangxl-github/base-tushare-stock
5
12793074
<reponame>huangxl-github/base-tushare-stock<filename>btstock/db/db_mysql.py<gh_stars>1-10 #!/usr/bin/python #-*- coding: UTF-8 -*- import os from dbutils.pooled_db import PooledDB import pymysql class Mysql(object): 'Mysql element' def __init__(self): self.mysql_host = '127.0.0.1' self.user = 'root' self.password = '<PASSWORD>' self.db = 'tushare' self.conn_num = 5 self.port = 3306 self.pool = PooledDB(pymysql,self.conn_num, host=self.mysql_host, user=self.user, passwd=self.password, db=self.db, port=self.port) self.conn = self.pool.connection() self.cur = self.conn.cursor() def set_mysql_host(self,data): self.mysql_host = data def set_user(self,data): self.user = data def set_password(self,data): self.password = data def set_db(self,data): self.db = data def set_port(self,data): self.port = data def execute_sql_file(self,sql_file_path): try: with open(sql_file_path,'r+',encoding = 'utf8') as f: print(f) sql_list = f.read().split(';')[:-1] sql_list = [x.replace('\n',' ') if '\n' in x else x for x in sql_list] for sql_item in sql_list: self.insert(sql_item) except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit() def select(self,sql): try: self.cur.execute(sql) data = self.cur.fetchall() return data except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit() else: pass finally: pass def select_one(self,sql): try: self.cur.execute(sql) data = self.cur.fetchone() return data except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit() def insert(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit() def delete(self,sql): try: self.cur.execute(sql) self.cur.fetchone() self.conn.commit() except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit() def update(self,sql): try: self.cur.execute(sql) self.conn.commit() except Exception as e: prcc("错误描述:"+str(e)+" 错误行:"+str(e.__traceback__.tb_lineno),'r') sys.exit()
2.546875
3
test/sesqlite/avc/test.py
unibg-seclab/sesqlite
10
12793075
#!/usr/bin/env python from subprocess import check_output import numpy as np from sys import argv def get_output(command): return map(int, check_output(command.split()).split()) tests = argv[1] if len(argv) > 1 else 1000 avc_first = get_output("./avctest/avctest %s 1" % tests) avc_other = get_output("./avctest/avctest %s 0" % tests) uavc_first = get_output("./uavctest/uavctest %s 1" % tests) uavc_other = get_output("./uavctest/uavctest %s 0" % tests) mean_avc_first = np.mean(avc_first) mean_avc_other = np.mean(avc_other) mean_uavc_first = np.mean(uavc_first) mean_uavc_other = np.mean(uavc_other) std_avc_first = np.std(avc_first) std_avc_other = np.std(avc_other) std_uavc_first = np.std(uavc_first) std_uavc_other = np.std(uavc_other) print "=== AVC ===" print "first: %f (std. %f)" % (mean_avc_first, std_avc_first) print "other: %f (std. %f)" % (mean_avc_other, std_avc_other) print "=== uAVC ===" print "first: %f (std. %f)" % (mean_uavc_first, std_uavc_first) print "other: %f (std. %f)" % (mean_uavc_other, std_uavc_other)
2.53125
3
strategies/tools.py
logpy/strategies
13
12793076
<reponame>logpy/strategies from __future__ import print_function, division from .core import do_one, exhaust, switch def typed(ruletypes): """Apply rules based on the expression type inputs: ruletypes -- a dict mapping {Type: rule} """ return switch(type, ruletypes)
2.609375
3
app/pythonBackend/algorithm.py
russianambassador/durhack_team-repository
4
12793077
# Imports pickle library used to store trained model import pickle # Open trained model and assigned to variable with open('property_model_Bristle.pickle', 'rb') as file: lr = pickle.load(file) # Predict price based on console input, use for debugging input_distance = float(input("Please enter the distance to the train station: ")) input_bedrooms = int(input("Please input the number of bedrooms of your property: ")) input_bathrooms = int(input("Please input the number of bathrooms of your property: ")) predicted_price = lr.predict([[input_distance, input_bedrooms, input_bathrooms]]) print(round(predicted_price[0],2))
3.8125
4
src/offline/aws/awsscraper.py
vlab-cs-ucsb/quacky
1
12793078
# scrapes AWS resource type and action mapping from AWS docs import json import time from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.chrome.options import Options from selenium.webdriver.opera.options import Options from selenium.webdriver.firefox.options import Options options = Options() options.add_argument('--headless') options.add_argument('--disable-gpu') # takes in a URL to a IAM service's actions, resources, and condition keys list and scrapes the tables def get_tables(url): # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') browser.get(url) time.sleep(5) wait = WebDriverWait(browser, 10) try: # wait for all the JSON elements to load before scraping. wait.until(EC.presence_of_element_located((By.TAG_NAME, 'awsdocs-view'))) except TimeoutError: pass else: # get IAM service name and tables namespace = browser.find_elements_by_xpath("//div[@id='main-col-body']/p/code")[0].text tables = browser.find_elements_by_tag_name('table') if len(tables) > 0: # first table is the list of actions actions = tables[0].find_elements(By.TAG_NAME, 'tr') # second table is the list of resource types if len(tables) > 1: resources = tables[1].find_elements(By.TAG_NAME, 'tr') namespace_json = dict() if len(tables) > 0: previous_name = '' # store resource type -> actions mapping for action in actions: fields = list(action.find_elements_by_tag_name('td')) if len(fields) == 3: resource_type = str(fields[0].text.replace('*', '')) if resource_type in namespace_json: namespace_json[resource_type].append(previous_name) else: namespace_json[resource_type] = [previous_name] elif len(fields) > 3: resource_type = str(fields[3].text.replace('*', '')) action_name = fields[0].text.replace(' [permission only]', '') action_name = action_name.lower() if resource_type in namespace_json: namespace_json[resource_type].append(action_name) else: namespace_json[resource_type] = [action_name] previous_name = action_name # save the constraints actions_json[namespace] = namespace_json namespace_json = dict() #if there is a resource type, scrape it and its ARN format if len(tables) > 1: for resource in resources: fields = list(resource.find_elements_by_tag_name('td')) if len(fields) > 1: namespace_json[fields[0].text] = fields[1].text # save the constraints resources_json[namespace] = namespace_json finally: browser.close() # browser = webdriver.Chrome(options = options) # browser = webdriver.Opera(options = options, executable_path = './operadriver') browser = webdriver.Firefox(options = options, executable_path = './geckodriver') # open the general page listing the actions, resource types, and condition keys for all IAM services. aws_reference = 'https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_actions-resources-contextkeys.html' browser.get(aws_reference) wait = WebDriverWait(browser, 10) try: # wait until page has fully loaded all the JSON elements wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'highlights'))) except: pass actions_json = {} resources_json = {} # get list of all services rows = browser.find_elements_by_xpath("//div[@id='main-col-body']/div[@class='highlights']/ul/li") # iterate through services and scrape their tables for row in rows: a_path = row.find_elements_by_tag_name('a')[0] url = a_path.get_attribute('href') get_tables(url) print('{}...done'.format(url)) browser.quit() # dump constraints to files file = open('actions.json', 'w') file.write(json.dumps(actions_json, indent=4)) file.close() file = open('resources.json', 'w') file.write(json.dumps(resources_json, indent=4)) file.close()
2.59375
3
11_Sock_Merchant.py
waditya/HackerRank_Algorithms_Implementation_Challenges
0
12793079
#!/bin/python3 #Import Libraries import sys def sockMerchant(n, ar): ##Sort the array ar.sort() br = ar br = list(set(br)) ##print(br) no_of_pairs = 0 counter = 0 ##Declare and initialize variables for i in range(0, len(br), 1): for j in range(0, len(ar), 1): ##print(ar[j], br[i]) if int(ar[j]/br[i] == 1): ##print("Match found") counter = counter + 1 ##print(counter) no_of_pairs = no_of_pairs + int(counter/2) ##print(no_of_pairs) counter = 0 return(no_of_pairs) n = int(input().strip()) ar = list(map(int, input().strip().split(' '))) result = sockMerchant(n, ar) print(result)
3.796875
4
tests/util/test_requirements.py
pomes/valiant
2
12793080
<gh_stars>1-10 """Test for valiant.util.requirements. Copyright 2021 The Valiant Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from pathlib import Path import py # https://py.readthedocs.io/en/latest/index.html import pytest from valiant.util import ( RequirementEntry, parse_requirements_file, parse_requirements_entry, ) _dir = os.path.dirname(os.path.realpath(__file__)) REQ_DIR = py.path.local(_dir) / "req_data" _req_files = [os.path.join(REQ_DIR / j) for j in os.listdir(REQ_DIR) if j[-3:] == "txt"] ALL_REQ_FILES = pytest.mark.datafiles(*_req_files) tests = [ ( "packaging", RequirementEntry( package="packaging", versions=[], extras=[], environment_markers=None, hashes=None, ), ), ( "packaging==20.7", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=None, ), ), ( "packaging== 20.7", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=None, ), ), ( "packaging == 20.7", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=None, ), ), ( "packaging ==20.7", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=None, ), ), ( " packaging== 20.7 ", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=None, ), ), ( "packaging==20.7" " --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=[ ( "sha256", "a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", ) ], ), ), ( "packaging==20.7" " --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" " --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=None, hashes=[ ( "sha256", "a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", ), ( "sha256", "7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", ), ], ), ), ( "packaging[quux, strange]==20.7", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=["quux", "strange"], environment_markers=None, hashes=None, ), ), ( "packaging[quux, strange]==20.7" " --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" " --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=["quux", "strange"], environment_markers=None, hashes=[ ( "sha256", "a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", ), ( "sha256", "7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", ), ], ), ), ( "packaging==20.7; " 'python_version >= "3.5" and python_full_version < "3.0.0"' ' or python_full_version >= "3.4.0" and python_version >= "3.5"', RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=( "or", ( "and", (">=", "python_version", "3.5"), ("<", "python_full_version", "3.0.0"), ), ( "and", (">=", "python_full_version", "3.4.0"), (">=", "python_version", "3.5"), ), ), hashes=None, ), ), ( "packaging==20.7; " 'python_version >= "3.5" and python_full_version < "3.0.0"' ' or python_full_version >= "3.4.0" and python_version >= "3.5"' " --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" " --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", RequirementEntry( package="packaging", versions=[("==", "20.7")], extras=[], environment_markers=( "or", ( "and", (">=", "python_version", "3.5"), ("<", "python_full_version", "3.0.0"), ), ( "and", (">=", "python_full_version", "3.4.0"), (">=", "python_version", "3.5"), ), ), hashes=[ ( "sha256", "a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", ), ( "sha256", "7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", ), ], ), ), ( "packaging[quux, strange]==99; os_name=='os2'" " and (python_version<'2.7' and platform_version=='2')" " --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", RequirementEntry( package="packaging", versions=[("==", "99")], extras=["quux", "strange"], environment_markers=( "and", ("==", "os_name", "os2"), ( "and", ("<", "python_version", "2.7"), ("==", "platform_version", "2"), ), ), hashes=[ ( "sha256", "7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", ) ], ), ), ] def test_parse_requirements_entry() -> None: """Work through different requirement scenarios.""" for entry in tests: req = parse_requirements_entry(entry[0]) assert req.package == entry[1].package assert req.versions == entry[1].versions assert req.extras == entry[1].extras assert req.environment_markers == entry[1].environment_markers assert req.hashes == entry[1].hashes def test_parse_requirements_file_exception() -> None: """Check exception raised for dodgy file path.""" with pytest.raises(ValueError): parse_requirements_file(Path("")) parse_requirements_file(Path("/i_dont_exist")) @ALL_REQ_FILES def test_parse_requirements_file(datafiles: py.path) -> None: """Perform a general parse of the test files.""" for data_file in datafiles.listdir(): assert len(parse_requirements_file(Path(data_file))) > 0
1.921875
2
plot_error.py
h0uter/Sphero_SLAM_simulation
0
12793081
<filename>plot_error.py import plotly.plotly as py import plotly.graph_objs as go # plotly # import plotly # plotly.tools.set_credentials_file( # username='houterm', api_key='putYaOwnKey') def plot(sphero, step_count): if step_count == 40000: # if step_count == 1000: x_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_error_list, name='x direction' ) y_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_error_list, name='y direction' ) x_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_x_unfiltered_error_list, name='unfiltered x direction' ) y_unfiltered_error_behaviour = go.Scatter( x=sphero.plot_time_list, y=sphero.plot_y_unfiltered_error_list, name='unfiltered y direction' ) layout = go.Layout( title=go.layout.Title( # text='Error Behaviour 3: Maze', xref='paper', x=0 ), xaxis=go.layout.XAxis( title=go.layout.xaxis.Title( text='Time (s)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), yaxis=go.layout.YAxis( title=go.layout.yaxis.Title( text='Error (pixels)', font=dict( family='Courier New, monospace', size=28, color='#000' ) ), # showticklabels=False ), legend=dict( # x=0, # y=0, x= 1.1, y= 1.2, traceorder='normal', font=dict( family='sans-serif', size=20, color='#000' ), orientation="h", # bgcolor='#E2E2E2', # bordercolor='#FFFFFF', # borderwidth=2 ) ) # print(sphero.plot_time_list) # print(sphero.plot_x_error_list) data = [x_error_behaviour, y_error_behaviour, x_unfiltered_error_behaviour, y_unfiltered_error_behaviour] fig = go.Figure(data=data, layout=layout) # py.iplot(fig, filename='styling-names') # py.plot(data, filename='error behaviour', auto_open=True) py.plot(fig, filename='unfiltered_error', auto_open=True)
2.421875
2
tests/test_mc_flow_sim.py
sthagen/python-mc_flow_sim
1
12793082
# -*- coding: utf-8 -*- # pylint: disable=missing-docstring,unused-import,reimported import pytest # type: ignore import mc_flow_sim.mc_flow_sim as mc def test_walk_ok_empty_string(): empty = '' assert mc.walk(empty) is None def test_walk_ok_empty_list(): seq = [] assert mc.walk(seq) is None def test_walk_ok_empty_set(): seq = {} assert mc.walk(seq) is None def test_walk_ok_empty_dict(): seq = dict() assert mc.walk(seq) is None def test_walk_ok_empty_tuple(): seq = tuple() assert mc.walk(seq) is None def test_walk_ok_string(): string = "abc" step = mc.walk(string) assert len(step) == 1 assert step in string def test_walk_ok_string_list(): the_same = "a" seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_range(): a_range = range(42) step = mc.walk(a_range) assert step in a_range assert isinstance(step, int) def test_walk_ok_function_list(): the_same = print seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_iterator_list(): the_same = iter([1, 2, 3]) seq = [the_same, the_same, the_same] assert mc.walk(seq) == the_same def test_walk_ok_int_dict(): seq = {0: "a", 1: "b"} assert mc.walk(seq) in seq.values() def test_walk_nok_string_dict(): seq = {"a": "b"} message = r"0" with pytest.raises(KeyError, match=message): mc.walk(seq) def test_walk_nok_wrong_type_none(): bad = None assert mc.walk(bad) is None def test_walk_nok_wrong_type_object(): bad = object message = r"object of type 'type' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_int(): bad = 42 message = r"object of type 'int' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_float(): bad = 3.1415 message = r"object of type 'float' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_complex(): bad = complex(1, -1) message = r"object of type 'complex' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(bad) def test_walk_nok_wrong_type_generator_expression(): message = r"object of type 'generator' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(n for n in range(1234)) def test_walk_nok_wrong_type_function(): message = r"object of type 'builtin_function_or_method' has no len\(\)" with pytest.raises(TypeError, match=message): mc.walk(print)
2.46875
2
pygame_camera_module.py
Pjchardt/camera_node
0
12793083
<gh_stars>0 #simple class to grab webcam frames using pygame camera import os import pygame import pygame.camera class PygameCameraModule(object): #def __init__(self): def start(self): pygame.camera.init() cameras = pygame.camera.list_cameras() #Camera detected or not print(cameras) print ("Using camera %s ..." % cameras[0]) self.cam = pygame.camera.Camera(cameras[0], (640, 480)) self.cam.start() def capture_image(self, name): img = self.cam.get_image() img = pygame.transform.rotate(img, -90) img = pygame.transform.smoothscale(img, (960, 1280)) pygame.image.save(img,name)
3.0625
3
AppDB/cassandra/start_cassandra.py
eabyshev/appscale
2
12793084
""" A test script to start Cassandra. """ import logging import os import sys import cassandra_interface sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib")) import monit_interface def run(): """ Starts up cassandra. """ logging.warning("Starting Cassandra.") monit_interface.start(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning("Done!") if __name__ == '__main__': run()
2.3125
2
models/python_cli/commands.py
andrei-papou/horn
2
12793085
<reponame>andrei-papou/horn<filename>models/python_cli/commands.py import os from time import time from typing import List, Type from horn import save_model, save_tensor from common import Args, SubParsers, get_model_file_path, get_data_file_path, get_model, ModelSpec from models import model_specs def _extract_model_spec(args: Args) -> Type[ModelSpec]: model_spec = model_specs.get(args.model) if model_spec is None: raise ValueError(f'Invalid model name: {args.model}') return model_spec class Command: name: str @classmethod def add_cli_parser(cls, sub_parsers: SubParsers): parser = sub_parsers.add_parser(name=cls.name) parser.set_defaults(func=cls.execute) cls._add_cli_arguments(parser) @classmethod def _add_cli_arguments(cls, parser): pass @classmethod def execute(cls, args: Args): raise NotImplementedError() class SaveModelCommand(Command): name: str = 'save_model' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) model_file_path = get_model_file_path(model_name=args.model) save_model(model, model_file_path) print(f'Model has been saved to "{model_file_path}"') class SaveDataCommand(Command): name: str = 'save_data' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) xs_file_path = get_data_file_path(model_spec.name, 'x') ys_file_path = get_data_file_path(model_spec.name, 'y') if not os.path.isfile(xs_file_path): save_tensor(model_spec.xs, xs_file_path) print(f'Saved X data for model "{model_spec.name}" to "{xs_file_path}".') else: print(f'X data for model `{model_spec.name}` already exists at "{xs_file_path}"') if not os.path.isfile(ys_file_path): save_tensor(model_spec.ys, ys_file_path) print(f'Saved Y data for model "{model_spec.name}" to "{ys_file_path}".') else: print(f'Y data for model `{model_spec.name}` already exists at "{ys_file_path}"') class TestCorrectnessCommand(Command): name: str = 'test_correctness' @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) scores = model.evaluate(x=model_spec.xs, y=model_spec.ys, verbose=0) print(f'Keras accuracy for the model `{model_spec.name}` is {scores[1]}') class TestPerformanceCommand(Command): name: str = 'test_performance' @classmethod def _add_cli_arguments(cls, parser): parser.add_argument( '-n', '--num-iterations', dest='num_iterations', default=1000, type=int, help='Number of performance measurement iterations' ) parser.add_argument( '--iteration-time', action='store_true', help='Print iteration time' ) @classmethod def execute(cls, args: Args): model_spec = _extract_model_spec(args) model = get_model(args.model, model_spec.get_model) cumulative_time = 0.0 for i in range(args.num_iterations): start = time() model.predict(x=model_spec.xs, verbose=0) iteration_time = time() - start if args.iteration_time: print(f'Iteration {i} time: {iteration_time * 1e6}') cumulative_time += iteration_time print(f'Keras performance for model `{model_spec.name}` : {cumulative_time * 1e6}') commands: List[Type[Command]] = [ SaveModelCommand, SaveDataCommand, TestCorrectnessCommand, TestPerformanceCommand, ]
2.390625
2
release/stubs.min/Wms/RemotingImplementation/__init__.py
tranconbv/ironpython-stubs
0
12793086
# encoding: utf-8 # module Wms.RemotingImplementation calls itself RemotingImplementation # from Wms.RemotingImplementation,Version=1.23.1.0,Culture=neutral,PublicKeyToken=<PASSWORD> # by generator 1.145 # no doc # no important from System.Collections.Generic import * from ..__init__ import * # no functions # classes class AppHost(object): """ AppHost() """ def ZZZ(self): """hardcoded/mock instance of the class""" return AppHost() instance=ZZZ() """hardcoded/returns an instance of the class""" def CreateContainer(self): """ CreateContainer(self: AppHost) -> UnityContainer """ pass def Init(self,appSettings,authoritySystem): """ Init(self: AppHost,appSettings: IApplicationSettings,authoritySystem: ICentralAuthoritySystem) """ pass def RegisterQueues(self,container): """ RegisterQueues(self: AppHost,container: IUnityContainer) """ pass class BusinessLayerExtensions(object): # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return BusinessLayerExtensions() instance=ZZZ() """hardcoded/returns an instance of the class""" @staticmethod def ConvertTo(db,mapExpr): pass __all__=[ 'ConvertTo', ] class CallerContext(object): """ CallerContext() """ def ZZZ(self): """hardcoded/mock instance of the class""" return CallerContext() instance=ZZZ() """hardcoded/returns an instance of the class""" def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass def __repr__(self,*args): """ __repr__(self: object) -> str """ pass UserName=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: UserName(self: CallerContext) -> str """ class Constants(object): # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return Constants() instance=ZZZ() """hardcoded/returns an instance of the class""" AdminZoneId=2 AutoDisposeDeadObjectInterval=5 Batch='Batch' ItemIdType=None ItemMovementTasks=None LayoutsDirectory='C:\\Program Files (x86)\\TranCon\\BOXwisePro\\Server\\Layouts' MaxAllowedTimeDifference=None PurchaseOrderLineItemIdTokenFormat='PO:{0}{1}{2}' RefreshSettingsInterval=60.0 ReportsPackageSlipFile='PackageSlip.rdlc' ReportsPackingSlipFolder='C:\\Program Files (x86)\\TranCon\\BOXwisePro\\Server\\Layouts\\PackageSlip' ReportsPickListsConfigFile='Config.xml' ReportsPickListsFolder='C:\\Program Files (x86)\\TranCon\\BOXwisePro\\Server\\Layouts\\Picking' ReportsPurchaseReceiptFile='ReceivingSlip.rdlc' ReportsPurchaseReceiptFolder='C:\\Program Files (x86)\\TranCon\\BOXwisePro\\Server\\Layouts\\PurchaseReceipt' ReportsRmaReceiptFile='RmaReceipt.rdlc' ReportsRmaReceiptFolder='C:\\Program Files (x86)\\TranCon\\BOXwisePro\\Server\\Layouts\\RmaReceipt' RmaOrderLineItemIdTokenFormat='RMA:{0}{1}{2}' SalesOrderLineItemIdTokenFormat='SO:{0}{1}{2}' Serial='Serial' ServerClientName='__SERVER__TASK_{0}' ServerUsername='server' Shipping=None StartupSqlConnRetryAttempts=3 SupportedImages=None ThreadTimeoutGetDeviceInfo=1200 ThreadTimeoutGetScreenShot=1200 ThreadTimeoutSendBroadcastQuestion=10000 ThreadTimeoutVoidShipment=60000 TokenDelimiter='|' TraceCategoryDebug='Debug' __all__=[ 'AdminZoneId', 'AutoDisposeDeadObjectInterval', 'Batch', 'ItemIdType', 'ItemMovementTasks', 'MaxAllowedTimeDifference', 'PurchaseOrderLineItemIdTokenFormat', 'RefreshSettingsInterval', 'RmaOrderLineItemIdTokenFormat', 'SalesOrderLineItemIdTokenFormat', 'Serial', 'ServerClientName', 'ServerUsername', 'Shipping', 'StartupSqlConnRetryAttempts', 'SupportedImages', 'ThreadTimeoutGetDeviceInfo', 'ThreadTimeoutGetScreenShot', 'ThreadTimeoutSendBroadcastQuestion', 'ThreadTimeoutVoidShipment', 'TokenDelimiter', 'TraceCategoryDebug', ] class DataSet(DataSet): """ DataSet() """ def ZZZ(self): """hardcoded/mock instance of the class""" return DataSet() instance=ZZZ() """hardcoded/returns an instance of the class""" def Clone(self): """ Clone(self: DataSet) -> DataSet """ pass def DetermineSchemaSerializationMode(self,*args): """ DetermineSchemaSerializationMode(self: DataSet,info: SerializationInfo,context: StreamingContext) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. System.Data.DataSet.#ctor(System.Runtime.Serialization.SerializationInfo,System.Runtime.Serialization.StreamingContext) is invoked with during deserialization in remoting scenarios. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the payload. DetermineSchemaSerializationMode(self: DataSet,reader: XmlReader) -> SchemaSerializationMode Determines the System.Data.DataSet.SchemaSerializationMode for a System.Data.DataSet. reader: The System.Xml.XmlReader instance that is passed during deserialization of the System.Data.DataSet. Returns: An System.Data.SchemaSerializationMode enumeration indicating whether schema information has been omitted from the payload. """ pass def Dispose(self): """ Dispose(self: MarshalByValueComponent,disposing: bool) Releases the unmanaged resources used by the System.ComponentModel.MarshalByValueComponent and optionally releases the managed resources. disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources. """ pass def GetSchemaSerializable(self,*args): """ GetSchemaSerializable(self: DataSet) -> XmlSchema """ pass def GetSerializationData(self,*args): """ GetSerializationData(self: DataSet,info: SerializationInfo,context: StreamingContext) Deserializes the table data from the binary or XML stream. info: The System.Runtime.Serialization.SerializationInfo instance. context: The streaming context. """ pass @staticmethod def GetTypedDataSetSchema(xs): """ GetTypedDataSetSchema(xs: XmlSchemaSet) -> XmlSchemaComplexType """ pass def InitializeDerivedDataSet(self,*args): """ InitializeDerivedDataSet(self: DataSet) """ pass def IsBinarySerialized(self,*args): """ IsBinarySerialized(self: DataSet,info: SerializationInfo,context: StreamingContext) -> bool Inspects the format of the serialized representation of the DataSet. info: The System.Runtime.Serialization.SerializationInfo object. context: The System.Runtime.Serialization.StreamingContext object. Returns: true if the specified System.Runtime.Serialization.SerializationInfo represents a DataSet serialized in its binary format,false otherwise. """ pass def OnPropertyChanging(self,*args): """ OnPropertyChanging(self: DataSet,pcevent: PropertyChangedEventArgs) Raises the System.Data.DataSet.OnPropertyChanging(System.ComponentModel.PropertyChangedEventArgs) event. pcevent: A System.ComponentModel.PropertyChangedEventArgs that contains the event data. """ pass def OnRemoveRelation(self,*args): """ OnRemoveRelation(self: DataSet,relation: DataRelation) Occurs when a System.Data.DataRelation object is removed from a System.Data.DataTable. relation: The System.Data.DataRelation being removed. """ pass def OnRemoveTable(self,*args): """ OnRemoveTable(self: DataSet,table: DataTable) Occurs when a System.Data.DataTable is removed from a System.Data.DataSet. table: The System.Data.DataTable being removed. """ pass def RaisePropertyChanging(self,*args): """ RaisePropertyChanging(self: DataSet,name: str) Sends a notification that the specified System.Data.DataSet property is about to change. name: The name of the property that is about to change. """ pass def ReadXmlSerializable(self,*args): """ ReadXmlSerializable(self: DataSet,reader: XmlReader) """ pass def ShouldSerializeRelations(self,*args): """ ShouldSerializeRelations(self: DataSet) -> bool """ pass def ShouldSerializeTables(self,*args): """ ShouldSerializeTables(self: DataSet) -> bool """ pass def __enter__(self,*args): """ __enter__(self: IDisposable) -> object """ pass def __exit__(self,*args): """ __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self): """ __new__(cls: type) __new__(cls: type,info: SerializationInfo,context: StreamingContext) """ pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Events=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets the list of event handlers that are attached to this component. """ PurchaseOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: PurchaseOrders_GetHistoryLines(self: DataSet) -> PurchaseOrders_GetHistoryLinesDataTable """ Relations=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: Relations(self: DataSet) -> DataRelationCollection """ RmaOrders_GetHistoryLines=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RmaOrders_GetHistoryLines(self: DataSet) -> RmaOrders_GetHistoryLinesDataTable """ SchemaSerializationMode=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: SchemaSerializationMode(self: DataSet) -> SchemaSerializationMode Set: SchemaSerializationMode(self: DataSet)=value """ Shipment_GetHistoryShipmentLines=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: Shipment_GetHistoryShipmentLines(self: DataSet) -> Shipment_GetHistoryShipmentLinesDataTable """ Tables=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: Tables(self: DataSet) -> DataTableCollection """ PurchaseOrders_GetHistoryLinesDataTable=None PurchaseOrders_GetHistoryLinesRow=None PurchaseOrders_GetHistoryLinesRowChangeEvent=None PurchaseOrders_GetHistoryLinesRowChangeEventHandler=None RmaOrders_GetHistoryLinesDataTable=None RmaOrders_GetHistoryLinesRow=None RmaOrders_GetHistoryLinesRowChangeEvent=None RmaOrders_GetHistoryLinesRowChangeEventHandler=None Shipment_GetHistoryShipmentLinesDataTable=None Shipment_GetHistoryShipmentLinesRow=None Shipment_GetHistoryShipmentLinesRowChangeEvent=None Shipment_GetHistoryShipmentLinesRowChangeEventHandler=None class DocumentQueue(MarshalByRefObject): """ DocumentQueue(printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) """ def ZZZ(self): """hardcoded/mock instance of the class""" return DocumentQueue() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddPrintJob(self,args): """ AddPrintJob(self: DocumentQueue,args: AddPrintJob) -> Task[Guid] """ pass def AddPrintJobScriptOverride(self,args,blobId,blobName): """ AddPrintJobScriptOverride(self: DocumentQueue,args: AddPrintJob,blobId: int,blobName: str) -> Guid """ pass def CopyPrintRule(self,printRuleId): """ CopyPrintRule(self: DocumentQueue,printRuleId: int) -> PrintRule """ pass def DeletePrintJobs(self,jobIds): """ DeletePrintJobs(self: DocumentQueue,jobIds: Array[Guid]) """ pass def DeletePrintRule(self,printRuleId): """ DeletePrintRule(self: DocumentQueue,printRuleId: int) """ pass def DeletePrintRules(self,ruleIds): """ DeletePrintRules(self: DocumentQueue,ruleIds: List[int]) """ pass def GetBlobContent(self,blobId): """ GetBlobContent(self: DocumentQueue,blobId: int) -> BlobContent """ pass def GetFileTypes(self): """ GetFileTypes(self: DocumentQueue) -> List[DocumentTypeEnum] """ pass def GetMatchingPrintRules(self,attributes): """ GetMatchingPrintRules(self: DocumentQueue,attributes: SerializableDictionary[str,str]) -> List[int] """ pass def GetOperators(self): """ GetOperators(self: DocumentQueue) -> List[Operator] """ pass def GetPrinterRules(self,args): """ GetPrinterRules(self: DocumentQueue,args: GetPrinterRulesArgs) -> List[PrintRule] """ pass def GetPrinters(self): """ GetPrinters(self: DocumentQueue) -> List[Printer] """ pass def GetPrintJobAttributes(self,printJobId): """ GetPrintJobAttributes(self: DocumentQueue,printJobId: Guid) -> SerializableDictionary[str,str] """ pass def GetPrintJobAuditLog(self,printJobId,paging): """ GetPrintJobAuditLog(self: DocumentQueue,printJobId: Guid,paging: PagingParams) -> PagedList[PrintJobAuditLogEntry] """ pass def GetPrintJobs(self,args,paging): """ GetPrintJobs(self: DocumentQueue,args: GetPrintJobsArgs,paging: PagingParams) -> PagedList[QueuedPrintJob] """ pass def GetPrintJobTypes(self): """ GetPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] """ pass def GetPrintJobTypesOfConfiguredPrintRules(self): """ GetPrintJobTypesOfConfiguredPrintRules(self: DocumentQueue) -> List[PrintJobType] """ pass def GetPrintRuleConditions(self,printRuleId): """ GetPrintRuleConditions(self: DocumentQueue,printRuleId: int) -> List[PrintRuleLine] """ pass def GetUsedAttributeNames(self,args): """ GetUsedAttributeNames(self: DocumentQueue,args: GetPrintJobAttributesArgs) -> List[PrintJobAttribute] """ pass def GetUsedAttributeValues(self,attributeName): """ GetUsedAttributeValues(self: DocumentQueue,attributeName: str) -> List[str] """ pass def GetUsedAttributeValuesAsObject(self,attributeName): """ GetUsedAttributeValuesAsObject(self: DocumentQueue,attributeName: str) -> List[AttributeValue] """ pass def GetUsedPrintJobTypes(self): """ GetUsedPrintJobTypes(self: DocumentQueue) -> List[PrintJobType] """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: DocumentQueue) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def RedispatchPrintJob(self,jobId): """ RedispatchPrintJob(self: DocumentQueue,jobId: Guid) """ pass def RedispatchPrintJobWithPrinter(self,args): """ RedispatchPrintJobWithPrinter(self: DocumentQueue,args: RedispatchPrintJobArgs) """ pass def SavePrintRule(self,rule): """ SavePrintRule(self: DocumentQueue,rule: PrintRule) -> PrintRule """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,printingService,storageProvider,printJobsQueuer): """ __new__(cls: type,printingService: IPrintingService,storageProvider: StorageProvider,printJobsQueuer: PrintJobsQueuer) """ pass class ExceptionHelper(object): """ ExceptionHelper() """ def ZZZ(self): """hardcoded/mock instance of the class""" return ExceptionHelper() instance=ZZZ() """hardcoded/returns an instance of the class""" @staticmethod def IsRetryPossible(ex,currentIdentity): """ IsRetryPossible(ex: Exception,currentIdentity: RemotingIdentity) -> bool """ pass @staticmethod def WrapException(ex): """ WrapException(ex: Exception) -> RemotingException """ pass class ExtendedUnityServiceLocator(UnityServiceLocator): """ ExtendedUnityServiceLocator(container: IUnityContainer) """ def ZZZ(self): """hardcoded/mock instance of the class""" return ExtendedUnityServiceLocator() instance=ZZZ() """hardcoded/returns an instance of the class""" def DoGetAllInstances(self,*args): """ DoGetAllInstances(self: UnityServiceLocator,serviceType: Type) -> IEnumerable[object] """ pass def DoGetInstance(self,*args): """ DoGetInstance(self: UnityServiceLocator,serviceType: Type,key: str) -> object """ pass def FormatActivateAllExceptionMessage(self,*args): """ FormatActivateAllExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type) -> str """ pass def FormatActivationExceptionMessage(self,*args): """ FormatActivationExceptionMessage(self: ServiceLocatorImplBase,actualException: Exception,serviceType: Type,key: str) -> str """ pass def IsRegistered(self,type=None): """ IsRegistered[T](self: ExtendedUnityServiceLocator) -> bool IsRegistered(self: ExtendedUnityServiceLocator,type: Type) -> bool """ pass def __enter__(self,*args): """ __enter__(self: IDisposable) -> object """ pass def __exit__(self,*args): """ __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,container): """ __new__(cls: type,container: IUnityContainer) """ pass class General(MarshalByRefObject): """ General(stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) """ def ZZZ(self): """hardcoded/mock instance of the class""" return General() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddOrUpdateErpLock(self,lock): """ AddOrUpdateErpLock(self: General,lock: ErpLock) -> int """ pass def AddOrUpdateErpLockDirect(self,lock): """ AddOrUpdateErpLockDirect(self: General,lock: ErpLock) -> int """ pass def AddTaskAutoDisposeTask(self): """ AddTaskAutoDisposeTask(self: General) """ pass def AddTaskCacheBackgroundTasks(self): """ AddTaskCacheBackgroundTasks(self: General) """ pass def AddTaskErpLockingTask(self): """ AddTaskErpLockingTask(self: General) """ pass def AddTaskLogCleanupTask(self): """ AddTaskLogCleanupTask(self: General) """ pass def AddTaskMessageQueueCleanupTask(self): """ AddTaskMessageQueueCleanupTask(self: General) """ pass def AddTaskNotificationCleanupTask(self): """ AddTaskNotificationCleanupTask(self: General) """ pass def AddTaskStockStreamTask(self): """ AddTaskStockStreamTask(self: General) """ pass def AddUserToZone(self,zone,user): """ AddUserToZone(self: General,zone: Zone,user: User) -> bool """ pass def AttachClient(self,endPoint): """ AttachClient(self: General,endPoint: str) """ pass def AuthenticateUser(self,args,barcodeSettings): """ AuthenticateUser(self: General,args: AuthenticationArgs) -> (RemotingIdentity,BarcodeTypes) """ pass def AuthenticateUserForDefaultZone(self,remId): """ AuthenticateUserForDefaultZone(self: General) -> (bool,RemotingIdentity) """ pass def AuthenticateUserForFirstZone(self,remId): """ AuthenticateUserForFirstZone(self: General) -> (bool,RemotingIdentity) """ pass def AuthenticateUserForZone(self,selectedZone,remId): """ AuthenticateUserForZone(self: General,selectedZone: Zone) -> (bool,RemotingIdentity) """ pass def BeepContinuous(self,endPoint): """ BeepContinuous(self: General,endPoint: str) """ pass def ChangeItemBarcode(self,args): """ ChangeItemBarcode(self: General,args: ChangeBarcodeArgs) -> bool """ pass def CheckHookVersions(self): """ CheckHookVersions(self: General) -> bool """ pass def CheckLicenseFile(self,xml,errors,license): """ CheckLicenseFile(self: General,xml: str) -> (bool,List[str],License) """ pass def CheckServerHealth(self): """ CheckServerHealth(self: General) -> ServerHealthEnum """ pass def CheckZoneRightAddReferenceOnTransfer(self,warehouseTransferKey): """ CheckZoneRightAddReferenceOnTransfer(self: General,warehouseTransferKey: CacheKey) -> bool """ pass def CleanupCacheHistory(self): """ CleanupCacheHistory(self: General) """ pass def CleanupUserCacheData(self): """ CleanupUserCacheData(self: General) """ pass def ClearResourceCache(self): """ ClearResourceCache(self: General) """ pass def CompileScript(self,script): """ CompileScript(self: General,script: str) -> List[PythonError] """ pass def ConvertToUsersByZone(self,oZonesUsersProxy): """ ConvertToUsersByZone(self: General,oZonesUsersProxy: ViewUsersInZone) -> Users """ pass def CreateBarcodeStructureDefinition(self,arg): """ CreateBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] """ pass def CreateColliPreset(self,arg): """ CreateColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] """ pass def CreateDatabase(self,message): """ CreateDatabase(self: General) -> (bool,str) """ pass def CreateDevice(self,arg): """ CreateDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] """ pass def CreateLocationClassification(self,arg): """ CreateLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] """ pass def CreateModule(self,arg): """ CreateModule(self: General,arg: ModuleArgs) -> bool """ pass def CreateOrUpdateBackgroundAgent(self,arg): """ CreateOrUpdateBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] """ pass def CreatePrintLabel(self,arg): """ CreatePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] """ pass def CreateScript(self,arg): """ CreateScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] """ pass def CreateScriptTask(self,arg): """ CreateScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] """ pass def CreateShipperServiceLink(self,arg): """ CreateShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] """ pass def CreateSnippetModule(self,arg): """ CreateSnippetModule(self: General,arg: ModuleArgs) -> bool """ pass def CreateStorageAssignmentClassification(self,arg): """ CreateStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] """ pass def CreateTag(self,arg): """ CreateTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] """ pass def CreateUser(self,arg): """ CreateUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] """ pass def CreateWarehouseLayoutSetting(self,arg): """ CreateWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] """ pass def CreateZone(self,arg): """ CreateZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] """ pass def DeleteBackgroundAgent(self,arg): """ DeleteBackgroundAgent(self: General,arg: DataFlowObject[BackgroundAgent]) -> DataFlowObject[BackgroundAgent] """ pass def DeleteBarcodeStructureDefinition(self,arg): """ DeleteBarcodeStructureDefinition(self: General,arg: DataFlowObject[BarcodeStructureDefinition]) -> DataFlowObject[BarcodeStructureDefinition] """ pass def DeleteColliPreset(self,arg): """ DeleteColliPreset(self: General,arg: DataFlowObject[ColliPreset]) -> DataFlowObject[ColliPreset] """ pass def DeleteDevice(self,arg): """ DeleteDevice(self: General,arg: DataFlowObject[Device]) -> DataFlowObject[Device] """ pass def DeleteErpLock(self,lock): """ DeleteErpLock(self: General,lock: ErpLock) """ pass def DeleteLocationClassification(self,arg): """ DeleteLocationClassification(self: General,arg: DataFlowObject[LocationClassification]) -> DataFlowObject[LocationClassification] """ pass def DeleteModule(self,arg): """ DeleteModule(self: General,arg: ModuleArgs) -> bool """ pass def DeletePrintLabel(self,arg): """ DeletePrintLabel(self: General,arg: DataFlowObject[PrintLabel]) -> DataFlowObject[PrintLabel] """ pass def DeleteScript(self,arg): """ DeleteScript(self: General,arg: DataFlowObject[ZoneScript]) -> DataFlowObject[ZoneScript] """ pass def DeleteScriptTask(self,arg): """ DeleteScriptTask(self: General,arg: DataFlowObject[ScriptTask]) -> DataFlowObject[ScriptTask] """ pass def DeleteShipperServiceLink(self,arg): """ DeleteShipperServiceLink(self: General,arg: DataFlowObject[ShipperServiceLink]) -> DataFlowObject[ShipperServiceLink] """ pass def DeleteStorageAssignmentClassification(self,arg): """ DeleteStorageAssignmentClassification(self: General,arg: DataFlowObject[StorageAssignmentClassification]) -> DataFlowObject[StorageAssignmentClassification] """ pass def DeleteTag(self,arg): """ DeleteTag(self: General,arg: DataFlowObject[Tag]) -> DataFlowObject[Tag] """ pass def DeleteUser(self,arg): """ DeleteUser(self: General,arg: DataFlowObject[User]) -> DataFlowObject[User] """ pass def DeleteWarehouseLayoutSetting(self,arg): """ DeleteWarehouseLayoutSetting(self: General,arg: DataFlowObject[WarehouseLayoutSetting]) -> DataFlowObject[WarehouseLayoutSetting] """ pass def DeleteZone(self,arg): """ DeleteZone(self: General,arg: DataFlowObject[Zone]) -> DataFlowObject[Zone] """ pass def DiscardPrintLines(self,key): """ DiscardPrintLines(self: General,key: CacheKey) """ pass def DisposeCachedObject(self,hashCode): """ DisposeCachedObject(self: General,hashCode: int) -> DataFlowObject[object] """ pass def DisposeCachedObjects(self): """ DisposeCachedObjects(self: General) """ pass def DisposeCachedObjectWhenUnchanged(self,key): """ DisposeCachedObjectWhenUnchanged(self: General,key: CacheKey) """ pass def ExecuteCommand(self,command): """ ExecuteCommand(self: General,command: str) -> str """ pass def ExecuteScript(self,script): """ ExecuteScript(self: General,script: str) -> object """ pass def ExecuteScriptTaskOnce(self,id): """ ExecuteScriptTaskOnce(self: General,id: int) -> object """ pass def ExecuteScriptWithCacheObjectScope(self,script,cacheKey): """ ExecuteScriptWithCacheObjectScope(self: General,script: str,cacheKey: int) -> object """ pass def ExecuteScriptWithScope(self,script,scope): """ ExecuteScriptWithScope(self: General,script: str,scope: Dictionary[str,object]) -> object """ pass def FinishUploadModule(self,arg): """ FinishUploadModule(self: General,arg: ModuleArgs) -> bool """ pass def GenerateSerialNumbers(self,dfObject,numbersGenerated): """ GenerateSerialNumbers(self: General,dfObject: DataFlowObject[ItemIdGenerateArgs]) -> (DataFlowObject[ItemIdGenerateArgs],List[str]) """ pass def GetActiveColliPresets(self,colliPresets): """ GetActiveColliPresets(self: General) -> (int,ColliPresets) """ pass def GetAppDomainList(self): """ GetAppDomainList(self: General) -> List[AppDomainInformation] """ pass def GetBackgroundAgentById(self,id,agent): """ GetBackgroundAgentById(self: General,id: str) -> (bool,BackgroundAgent) """ pass def GetBackgroundAgentsAll(self,agents): """ GetBackgroundAgentsAll(self: General) -> (int,BackgroundAgents) """ pass def GetBackgroundAgentsByType(self,type,agents): """ GetBackgroundAgentsByType(self: General,type: BackgroundAgentType) -> (int,BackgroundAgents) """ pass def GetBackgroundAgentStatusByType(self,type): """ GetBackgroundAgentStatusByType(self: General,type: BackgroundAgentType) -> BackgroundAgentStatus """ pass def GetBarcodeSettingsAll(self,types): """ GetBarcodeSettingsAll(self: General) -> (int,BarcodeTypes) """ pass def GetBarcodeStructure(self,value,expectedScan,barcodeStructure): """ GetBarcodeStructure(self: General,value: str,expectedScan: ExpectScanOfEnum) -> (BarcodeStructureResultEnum,BarcodeStructure) """ pass def GetBarcodeStructureActive(self,definitions): """ GetBarcodeStructureActive(self: General) -> (int,BarcodeStructureDefinitions) """ pass def GetBarcodeStructureDefinitionById(self,countId,definition): """ GetBarcodeStructureDefinitionById(self: General,countId: int) -> (bool,BarcodeStructureDefinition) """ pass def GetBarcodeStructureDefinitions(self,filterBy,pagingParams,definitions): """ GetBarcodeStructureDefinitions(self: General,filterBy: BarcodeStructureDefinitionFilter,pagingParams: PagingParams) -> (int,BarcodeStructureDefinitions) """ pass def GetBarcodeStructureInOrder(self,value,expectedScans,barcodeStructure): """ GetBarcodeStructureInOrder(self: General,value: str,expectedScans: List[ExpectScanOfEnum]) -> (BarcodeStructureResultEnum,BarcodeStructure) """ pass def GetCacheObject(self,hashCode): """ GetCacheObject(self: General,hashCode: int) -> ICachable """ pass def GetCacheObjectAsXml(self,hashCode): """ GetCacheObjectAsXml(self: General,hashCode: int) -> str """ pass def GetChacheStatus(self): """ GetChacheStatus(self: General) -> str """ pass def GetColliPresetById(self,id,colliPreset): """ GetColliPresetById(self: General,id: int) -> (bool,ColliPreset) """ pass def GetColliPresetsAll(self,colliPresets): """ GetColliPresetsAll(self: General) -> (int,ColliPresets) """ pass def GetColliPresetSpecificationCodes(self,searchText,colliSpecificationCodes): """ GetColliPresetSpecificationCodes(self: General,searchText: str) -> (int,List[str]) """ pass def GetCopyOfCache(self): """ GetCopyOfCache(self: General) -> List[ICachable] """ pass def GetCountriesActive(self,countries): """ GetCountriesActive(self: General) -> (int,Countries) """ pass def GetCurrentIdentity(self): """ GetCurrentIdentity(self: General) -> RemotingIdentity """ pass def GetDefaultColliPreset(self,colliPreset): """ GetDefaultColliPreset(self: General) -> (bool,ColliPreset) """ pass def GetDefaultInboundLocations(self,warehouseCode,locations): """ GetDefaultInboundLocations(self: General,warehouseCode: str) -> (bool,Locations) """ pass def GetDeviceById(self,id,device): """ GetDeviceById(self: General,id: int) -> (bool,Device) """ pass def GetDeviceByMacAddress(self,macAddress,device): """ GetDeviceByMacAddress(self: General,macAddress: str) -> (bool,Device) """ pass def GetDeviceByName(self,name,device): """ GetDeviceByName(self: General,name: str) -> (bool,Device) """ pass def GetDeviceInformation(self,endPoint,deviceInfo): """ GetDeviceInformation(self: General,endPoint: str) -> (bool,DeviceInformation) """ pass def GetDevicesAll(self,devices): """ GetDevicesAll(self: General) -> (int,Devices) """ pass def GetErpLocks(self,locks): """ GetErpLocks(self: General) -> (int,List[ErpLock]) """ pass def GetErpName(self): """ GetErpName(self: General) -> str """ pass def GetErpSettings(self): """ GetErpSettings(self: General) -> SystemSettings """ pass def GetErpSettingsTable(self): """ GetErpSettingsTable(self: General) -> SystemSettingsTable """ pass def GetExecutionContexts(self): """ GetExecutionContexts(self: General) -> List[SafeRpcExecutionContext] """ pass def GetGeneratedScriptComment(self,script): """ GetGeneratedScriptComment(self: General,script: ZoneScript) -> str """ pass def GetImplementedMethods(self): """ GetImplementedMethods(self: General) -> ImplementedFunctionalities """ pass def GetItem(self,itemCode,item): """ GetItem(self: General,itemCode: str) -> (bool,Item) """ pass def GetItemExists(self,itemCode): """ GetItemExists(self: General,itemCode: str) -> bool """ pass def GetItemExistsOnDefaultInboundLocation(self,itemCode,warehouseCode,item): """ GetItemExistsOnDefaultInboundLocation(self: General,itemCode: str,warehouseCode: str) -> (bool,LocationItem) """ pass def GetItemExistsOnLocation(self,itemCode,warehouseCode,warehouseLocationCode,item): """ GetItemExistsOnLocation(self: General,itemCode: str,warehouseCode: str,warehouseLocationCode: str) -> (bool,LocationItem) """ pass def GetItemIdentificationExists(self,itemCode,itemId): """ GetItemIdentificationExists(self: General,itemCode: str,itemId: str) -> bool """ pass def GetItemIdentificationExistsMulti(self,itemCode,itemIds): """ GetItemIdentificationExistsMulti(self: General,itemCode: str,itemIds: List[str]) -> bool """ pass def GetItemIdentifications(self,args,selected,itemIdentifications): """ GetItemIdentifications(self: General,args: GetItemIdentificationArgs,selected: ItemIdentifications) -> (int,ItemIdentifications) """ pass def GetItemIdentificationsAvailable(self,args,itemIds): """ GetItemIdentificationsAvailable(self: General,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) """ pass def GetItemIdentificationsAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemIds): """ GetItemIdentificationsAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemIdentificationArgs) -> (int,ItemIdentifications) """ pass def GetItemImageFromErp(self,itemCode): """ GetItemImageFromErp(self: General,itemCode: str) -> Array[Byte] """ pass def GetItemImageLarge(self,itemCode): """ GetItemImageLarge(self: General,itemCode: str) -> Array[Byte] """ pass def GetItemImageSmall(self,itemCode): """ GetItemImageSmall(self: General,itemCode: str) -> Array[Byte] """ pass def GetItemInfoFromBarcode(self,barcode,itemInfo): """ GetItemInfoFromBarcode(self: General,barcode: str) -> (bool,ItemInfo) """ pass def GetItemLocationDefault(self,args,location): """ GetItemLocationDefault(self: General,args: GetItemLocationsArgs) -> (bool,ItemLocation) """ pass def GetItemLocations(self,args,locations): """ GetItemLocations(self: General,args: GetItemLocationsArgs) -> (int,ItemLocations) """ pass def GetItems(self,args,paging,items): """ GetItems(self: General,args: GetItemsArgs,paging: PagingParams) -> (int,Items) """ pass def GetItemsAll(self,args,items): """ GetItemsAll(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) """ pass def GetItemsOnDefaultInboundLocation(self,warehouseCode,filter,items): """ GetItemsOnDefaultInboundLocation(self: General,warehouseCode: str,filter: str) -> (int,LocationItems) """ pass def GetItemsOnLocation(self,args,items): """ GetItemsOnLocation(self: General,args: GetItemsOnLocationArgs) -> (int,LocationItems) """ pass def GetItemsOnTransportLocation(self,filter,items): """ GetItemsOnTransportLocation(self: General,filter: str) -> (int,LocationItems) """ pass def GetItemStockAvailableIncludingBatches(self,cacheKeyOfBatch,args,itemStock): """ GetItemStockAvailableIncludingBatches(self: General,cacheKeyOfBatch: CacheKey,args: GetItemStockListArgs) -> (int,List[ItemStock]) """ pass def GetItemStockList(self,args,itemStockLocationList): """ GetItemStockList(self: General,args: GetItemStockListArgs) -> (int,ItemStockLocationList) """ pass def GetItemStockTotals(self,args,totals): """ GetItemStockTotals(self: General,args: GetItemStockTotalsArgs) -> (bool,ItemStockTotals) """ pass def GetLibContent(self,arg,contents): """ GetLibContent(self: General,arg: GetLibArgs) -> (int,LibContents) """ pass @staticmethod def GetLibRoot(): """ GetLibRoot() -> str """ pass def GetLocationClassificationById(self,id,locationClassification): """ GetLocationClassificationById(self: General,id: int) -> (bool,LocationClassification) """ pass def GetLocationClassifications(self,filterBy,locationClassifications): """ GetLocationClassifications(self: General,filterBy: LocationClassificationsFilter) -> (int,LocationClassifications) """ pass def GetLocationsByCountGroup(self,countGroup,locations): """ GetLocationsByCountGroup(self: General,countGroup: CountGroup) -> (int,Locations) """ pass def GetLocationsByLocationClassification(self,locationClassification,locations): """ GetLocationsByLocationClassification(self: General,locationClassification: LocationClassification) -> (int,Locations) """ pass def GetLocationsByStorageAssignmentClassification(self,storageAssignmentClassification,locations): """ GetLocationsByStorageAssignmentClassification(self: General,storageAssignmentClassification: StorageAssignmentClassification) -> (int,Locations) """ pass def GetLogLines(self,args): """ GetLogLines(self: General,args: GetLogLinesArgs) -> PagedList[LogLine] """ pass def GetMacAddress(self): """ GetMacAddress(self: General) -> str """ pass def GetModule(self,arg,module): """ GetModule(self: General,arg: ModuleArgs) -> (bool,PythonModule) """ pass def GetPendingPrintLineCount(self,key): """ GetPendingPrintLineCount(self: General,key: CacheKey) -> int """ pass def GetPrintDatasetInstance(self,datasetFullTypeName,dataset): """ GetPrintDatasetInstance(self: General,datasetFullTypeName: str) -> (bool,PrintDatasetBase) """ pass def GetPrintDatasets(self,datasets): """ GetPrintDatasets(self: General) -> (int,List[PrintDatasetBase]) """ pass def GetPrintersTable(self): """ GetPrintersTable(self: General) -> Hashtable """ pass def GetPrintLabelByName(self,name,label): """ GetPrintLabelByName(self: General,name: str) -> (bool,PrintLabel) """ pass def GetPrintLabelImage(self,labelId): """ GetPrintLabelImage(self: General,labelId: str) -> Array[Byte] """ pass def GetPrintLabelMappings(self,labelId,mappings): """ GetPrintLabelMappings(self: General,labelId: int) -> (bool,Mappings[str,str,str]) """ pass def GetPrintLabels(self,labels): """ GetPrintLabels(self: General) -> (int,PrintLabels) """ pass def GetPrintLabelsOfDataset(self,datasetTypeFullName,labels): """ GetPrintLabelsOfDataset(self: General,datasetTypeFullName: str) -> (int,PrintLabels) """ pass def GetPrintLabelsOfPrintLines(self,printsLinesTypes,labels): """ GetPrintLabelsOfPrintLines(self: General,printsLinesTypes: IEnumerable[Type]) -> (int,PrintLabels) """ pass def GetProfilingLogEntries(self,userKey,previousMethod,endTime,elapsedMiliSeconds,entries): """ GetProfilingLogEntries(self: General,userKey: int,previousMethod: int,endTime: Nullable[DateTime],elapsedMiliSeconds: int) -> (int,ProfilingLogEntries) """ pass def GetProfilingUserNodes(self,userNodes): """ GetProfilingUserNodes(self: General) -> (int,ProfilingUserNodes) """ pass def GetProgressOfActivity(self,args,activity): """ GetProgressOfActivity(self: General,args: GetActivityProgressArgs) -> (bool,Activity) """ pass def GetProgressUpdate(self,args,progress): """ GetProgressUpdate(self: General,args: GetActivityProgressArgs) -> (bool,Progress) """ pass def GetResourcesOfTranslation(self,resourceSet,culture,translation): """ GetResourcesOfTranslation(self: General,resourceSet: str,culture: str) -> (bool,Translation) """ pass def GetScreenshot(self,accessId): """ GetScreenshot(self: General,accessId: str) -> Array[Byte] """ pass def GetScriptIntellisenseOptions(self,hint): """ GetScriptIntellisenseOptions(self: General,hint: str) -> Array[str] """ pass def GetScripts(self,arg,scripts): """ GetScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) """ pass def GetScriptsAll(self,scripts): """ GetScriptsAll(self: General) -> (int,ZoneScripts) """ pass def GetScriptSnippets(self,snippets): """ GetScriptSnippets(self: General) -> (int,List[ScriptSnippet]) """ pass def GetScriptTaskById(self,id,task): """ GetScriptTaskById(self: General,id: int) -> (bool,ScriptTask) """ pass def GetScriptTaskByName(self,name,task): """ GetScriptTaskByName(self: General,name: str) -> (bool,ScriptTask) """ pass def GetScriptTaskProjectedSchedule(self,id,schedule,firstOccurrence): """ GetScriptTaskProjectedSchedule(self: General,id: int) -> (bool,Array[DateTime],DateTime) """ pass def GetScriptTasksActive(self,tasks): """ GetScriptTasksActive(self: General) -> (int,ScriptTasks) """ pass def GetScriptTasksAll(self,tasks): """ GetScriptTasksAll(self: General) -> (int,ScriptTasks) """ pass def GetScriptTasksInActive(self,tasks): """ GetScriptTasksInActive(self: General) -> (int,ScriptTasks) """ pass def GetServerDate(self): """ GetServerDate(self: General) -> DateTime """ pass def GetSessions(self,sessions): """ GetSessions(self: General) -> (int,Sessions) """ pass def GetSettings(self): """ GetSettings(self: General) -> SystemSettings """ pass def GetSettingsTable(self): """ GetSettingsTable(self: General) -> SystemSettingsTable """ pass def GetShipperServiceLinkByErpDeliveryMethodCode(self,erpDeliveryMethodCode,shipperServiceLink): """ GetShipperServiceLinkByErpDeliveryMethodCode(self: General,erpDeliveryMethodCode: str) -> (bool,ShipperServiceLink) """ pass def GetShipperServiceLinksAll(self,shipperServiceLinks): """ GetShipperServiceLinksAll(self: General) -> (int,ShipperServiceLinks) """ pass @staticmethod def GetSnippetRoot(): """ GetSnippetRoot() -> str """ pass def GetSortedItemLocations(self,args,filterOptions,locations): """ GetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) """ pass @staticmethod def GetStdLibRoot(path): """ GetStdLibRoot() -> (bool,str) """ pass def GetStorageAssignmentClassificationById(self,id,storageAssignmentClassification): """ GetStorageAssignmentClassificationById(self: General,id: int) -> (bool,StorageAssignmentClassification) """ pass def GetStorageAssignmentClassifications(self,filterBy,storageAssignmentClassifications): """ GetStorageAssignmentClassifications(self: General,filterBy: StorageAssignmentClassificationsFilter) -> (int,StorageAssignmentClassifications) """ pass def GetTagById(self,id,tag): """ GetTagById(self: General,id: int) -> (bool,Tag) """ pass def GetTagsAll(self,tags): """ GetTagsAll(self: General) -> (int,Tags) """ pass def GetTagsByDescription(self,filter,tags): """ GetTagsByDescription(self: General,filter: str) -> (int,Tags) """ pass def GetTagsByType(self,target,tags): """ GetTagsByType(self: General,target: TagTarget) -> (int,Tags) """ pass def GetTranslationsAvailable(self,translations): """ GetTranslationsAvailable(self: General) -> (int,Translations) """ pass def GetTranslationsAvailablePerSet(self,resourseSet,translations): """ GetTranslationsAvailablePerSet(self: General,resourseSet: str) -> (int,Translations) """ pass def GetUserByUserId(self,userId,user): """ GetUserByUserId(self: General,userId: int) -> (bool,User) """ pass def GetUserByUserName(self,username,user): """ GetUserByUserName(self: General,username: str) -> (bool,User) """ pass def GetUserCacheData(self,tag): """ GetUserCacheData(self: General,tag: str) -> str """ pass def GetUsersActive(self,users): """ GetUsersActive(self: General) -> (int,Users) """ pass def GetUsersAll(self,users): """ GetUsersAll(self: General) -> (int,Users) """ pass def GetUsersInactive(self,users): """ GetUsersInactive(self: General) -> (int,Users) """ pass def GetUsersInZone(self,zoneId,users): """ GetUsersInZone(self: General,zoneId: int) -> (int,Users) """ pass def GetVersion(self): """ GetVersion(self: General) -> str """ pass def GetWarehouseByCode(self,warehouseCode,warehouse): """ GetWarehouseByCode(self: General,warehouseCode: str) -> (bool,Warehouse) """ pass def GetWarehouseExists(self,warehouseCode): """ GetWarehouseExists(self: General,warehouseCode: str) -> bool """ pass def GetWarehouseLayoutBySetting(self,warehouseLocation,warehouseLayoutSetting,warehouseLayout): """ GetWarehouseLayoutBySetting(self: General,warehouseLocation: str,warehouseLayoutSetting: WarehouseLayoutSetting) -> (bool,WarehouseLayout) """ pass def GetWarehouseLayoutsBySetting(self,warehouseLayoutSetting,warehouseLayouts): """ GetWarehouseLayoutsBySetting(self: General,warehouseLayoutSetting: WarehouseLayoutSetting) -> (int,WarehouseLayouts) """ pass def GetWarehouseLayoutSettingById(self,id,warehouseLayoutSetting): """ GetWarehouseLayoutSettingById(self: General,id: int) -> (bool,WarehouseLayoutSetting) """ pass def GetWarehouseLayoutSettings(self,filterBy,warehouseLayoutSettings): """ GetWarehouseLayoutSettings(self: General,filterBy: WarehouseLayoutSettingFilter) -> (int,WarehouseLayoutSettings) """ pass def GetWarehouseLocationExists(self,warehouseCode,warehouseLocationCode): """ GetWarehouseLocationExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> bool """ pass def GetWarehouseLocationFromStockThenErp(self,warehouseCode,warehouseLocationCode): """ GetWarehouseLocationFromStockThenErp(self: General,warehouseCode: str,warehouseLocationCode: str) -> Location """ pass def GetWarehouseLocationIfExists(self,warehouseCode,warehouseLocationCode,location): """ GetWarehouseLocationIfExists(self: General,warehouseCode: str,warehouseLocationCode: str) -> (bool,Location) """ pass def GetWarehouseLocations(self,args,locations): """ GetWarehouseLocations(self: General,args: GetWarehouseLocationsArgs) -> (int,Locations) """ pass def GetWarehousesActive(self,warehouses): """ GetWarehousesActive(self: General) -> (int,Warehouses) """ pass def GetWarehousesActiveByLocation(self,warehouseLocationCode,warehouses): """ GetWarehousesActiveByLocation(self: General,warehouseLocationCode: str) -> (int,Warehouses) """ pass def GetWarehousesActiveWithDefaultInboundLocation(self,warehouses): """ GetWarehousesActiveWithDefaultInboundLocation(self: General) -> (int,Warehouses) """ pass def GetWarehousesAll(self,warehouses): """ GetWarehousesAll(self: General) -> (int,Warehouses) """ pass def GetWarehousesInactive(self,warehouses): """ GetWarehousesInactive(self: General) -> (int,Warehouses) """ pass def GetZoneById(self,id,zone): """ GetZoneById(self: General,id: int) -> (bool,Zone) """ pass def GetZoneByName(self,name,zone): """ GetZoneByName(self: General,name: str) -> (bool,Zone) """ pass def GetZoneRightsOfZone(self,zoneId,zoneRights): """ GetZoneRightsOfZone(self: General,zoneId: int) -> (bool,ZoneRights) """ pass def GetZonesActive(self,active,zones): """ GetZonesActive(self: General,active: bool) -> (int,Zones) """ pass def GetZonesActiveOfCurrentUser(self,zones): """ GetZonesActiveOfCurrentUser(self: General) -> (int,Zones) """ pass def GetZonesActiveOfUser(self,user,zones): """ GetZonesActiveOfUser(self: General,user: User) -> (int,Zones) """ pass def GetZonesAll(self,zones): """ GetZonesAll(self: General) -> (int,Zones) """ pass def GetZoneScriptHook(self,arg,script): """ GetZoneScriptHook(self: General,arg: GetScriptArgs) -> (bool,ZoneScript) """ pass def GetZoneScripts(self,arg,scripts): """ GetZoneScripts(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) """ pass def GetZoneScriptsOrphan(self,arg,scripts): """ GetZoneScriptsOrphan(self: General,arg: GetScriptArgs) -> (int,ZoneScripts) """ pass def GetZonesOfUser(self,user,addActiveOnly,zones): """ GetZonesOfUser(self: General,user: User,addActiveOnly: bool) -> (int,Zones) """ pass def GetZoneUsers(self,zoneId,zoneUsers): """ GetZoneUsers(self: General,zoneId: int) -> (int,ZoneUsers) """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: General) -> object """ pass def IsProfilerRunning(self): """ IsProfilerRunning(self: General) -> bool """ pass def KillAppDomain(self,*__args): """ KillAppDomain(self: General,arg: DataFlowObject[AppDomainInformation]) -> DataFlowObject[AppDomainInformation] KillAppDomain(self: General,filter: str) """ pass def LoadCache(self): """ LoadCache(self: General) """ pass def LoadSettings(self,*__args): """ LoadSettings(self: General,unsafe: bool)LoadSettings(self: General,settingsObject: SystemSettings) """ pass def LogoutClient(self): """ LogoutClient(self: General) """ pass def LogoutUser(self): """ LogoutUser(self: General) """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def MoveModuleOrDirectory(self,isFile,name,fromDir,toDir): """ MoveModuleOrDirectory(self: General,isFile: bool,name: str,fromDir: str,toDir: str) -> bool """ pass def OnPythonEngineBooted(self): """ OnPythonEngineBooted(self: General) """ pass def OutputCacheStatusToLog(self): """ OutputCacheStatusToLog(self: General) """ pass def PrintPrintLine(self,line,label): """ PrintPrintLine(self: General,line: PrintLineBase,label: PrintLabel) -> bool """ pass def PrintPrintLineByObjectAndPrinter(self,line,label,printArgs): """ PrintPrintLineByObjectAndPrinter(self: General,line: PrintLineBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool """ pass def PrintPrintLines(self,key,label): """ PrintPrintLines(self: General,key: CacheKey,label: PrintLabel) -> bool """ pass def PrintPrintLinesByObject(self,lines,label): """ PrintPrintLinesByObject(self: General,lines: PrintLinesBase,label: PrintLabel) -> bool """ pass def PrintPrintLinesByObjectAndPrinter(self,lines,label,printArgs): """ PrintPrintLinesByObjectAndPrinter(self: General,lines: PrintLinesBase,label: PrintLabel,printArgs: PrintBaseArgs) -> bool """ pass def PrintTestLabel(self,labelId,testRun): """ PrintTestLabel(self: General,labelId: int,testRun: bool) """ pass def PurgeProfilingLog(self): """ PurgeProfilingLog(self: General) """ pass def RegisterBackgroundAgentLastSeen(self,agent): """ RegisterBackgroundAgentLastSeen(self: General,agent: BackgroundAgent) """ pass def RemoveUserFromZone(self,zone,user): """ RemoveUserFromZone(self: General,zone: Zone,user: User) -> bool """ pass def ResetBarcodeSettingsToDefault(self): """ ResetBarcodeSettingsToDefault(self: General) -> bool """ pass def ResetPrintLines(self,key,printLines): """ ResetPrintLines(self: General,key: CacheKey) -> (bool,PrintLinesBase) """ pass def RestartScriptEngine(self): """ RestartScriptEngine(self: General) """ pass def SaveCache(self): """ SaveCache(self: General) """ pass def SaveDefaultInboundLocation(self,warehouse): """ SaveDefaultInboundLocation(self: General,warehouse: DataFlowObject[Warehouse]) -> DataFlowObject[Warehouse] """ pass def SaveErpSetting(self,memberName,value): """ SaveErpSetting(self: General,memberName: str,value: object) """ pass def SaveModule(self,module): """ SaveModule(self: General,module: PythonModule) -> bool """ pass def SavePrintLabelMappings(self,labelId,mappings): """ SavePrintLabelMappings(self: General,labelId: int,mappings: Mappings[str,str,str]) -> bool """ pass def SaveSetting(self,memberName,value): """ SaveSetting(self: General,memberName: str,value: object) """ pass def SaveTranslations(self,translations): """ SaveTranslations(self: General,*translations: Array[SaveTranslationArgs]) """ pass def ScheduleScriptTasks(self): """ ScheduleScriptTasks(self: General) """ pass def SendBroadcastMessage(self,message): """ SendBroadcastMessage(self: General,message: str) """ pass def SendBroadcastQuestion(self,question,possibleAnswers): """ SendBroadcastQuestion(self: General,question: str,possibleAnswers: int) -> Answers """ pass def SendKey(self,endPoint,key): """ SendKey(self: General,endPoint: str,key: str) """ pass def SendMessage(self,endPoint,message): """ SendMessage(self: General,endPoint: str,message: str) """ pass def SendMouseClick(self,endPoint,x,y): """ SendMouseClick(self: General,endPoint: str,x: int,y: int) """ pass def SetPrintLinesQuantitiesAtMax(self,key,printLines): """ SetPrintLinesQuantitiesAtMax(self: General,key: CacheKey) -> (bool,PrintLinesBase) """ pass def SetSessionTimeout(self): """ SetSessionTimeout(self: General) """ pass def SetUserCacheData(self,tag,data): """ SetUserCacheData(self: General,tag: str,data: str) """ pass def SetZoneRightsOfZone(self,zoneId,zoneRights): """ SetZoneRightsOfZone(self: General,zoneId: int,zoneRights: ZoneRightViews) -> bool """ pass def Sleep(self,seconds): """ Sleep(self: General,seconds: int) -> str """ pass def StartDiscoveryServer(self,tcpPortNumber=None,unsafe=None): """ StartDiscoveryServer(self: General)StartDiscoveryServer(self: General,tcpPortNumber: int,unsafe: bool) """ pass def StartProfiler(self): """ StartProfiler(self: General) """ pass def StopDiscoveryServer(self,unsafe=None): """ StopDiscoveryServer(self: General)StopDiscoveryServer(self: General,unsafe: bool) """ pass def StopMarshalledObjectFactories(self): """ StopMarshalledObjectFactories(self: General) """ pass def StopProfiler(self): """ StopProfiler(self: General) """ pass def TouchGetSortedItemLocations(self,args,filterOptions,locations): """ TouchGetSortedItemLocations(self: General,args: GetItemLocationsArgs,filterOptions: FilterOptions) -> (int,ItemLocations) """ pass def UpdateBarcodeSettings(self,dfObject): """ UpdateBarcodeSettings(self: General,dfObject: DataFlowObject[BarcodeTypes]) -> DataFlowObject[BarcodeTypes] """ pass def UpdateCultureOfUserSession(self): """ UpdateCultureOfUserSession(self: General) """ pass def UpdateDatabase(self,message): """ UpdateDatabase(self: General) -> (bool,str) """ pass def UpdatePrintLine(self,key,line): """ UpdatePrintLine(self: General,key: CacheKey,line: PrintLineBase) -> bool """ pass def UploadModule(self,arg): """ UploadModule(self: General,arg: AddModuleArgs) -> bool """ pass def UploadNewLicense(self,xml,license): """ UploadNewLicense(self: General,xml: str) -> (bool,License) """ pass def ValidateColliReferences(self,dfObject): """ ValidateColliReferences(self: General,dfObject: DataFlowObject[ValidateColliReferencesArgs]) -> DataFlowObject[ValidateColliReferencesArgs] """ pass def ValidateColliReferenceScan(self,barcode,result): """ ValidateColliReferenceScan(self: General,barcode: str) -> (bool,ColliBarcodeResult) """ pass def ValidateItemIdentification(self,itemCode,itemId,isBatchNumber,errorMessage): """ ValidateItemIdentification(self: General,itemCode: str,itemId: str,isBatchNumber: bool) -> (bool,str) """ pass def ValidateItemIdentificationForDelivery(self,dfObject): """ ValidateItemIdentificationForDelivery(self: General,dfObject: DataFlowObject[ValidateItemIdentificationArgs]) -> DataFlowObject[ValidateItemIdentificationArgs] """ pass def ValidateOrder(self,orderNumber,orderType): """ ValidateOrder(self: General,orderNumber: str,orderType: OrderTypeEnum) -> OrderValidationResult """ pass def ValidateTransportPackageScan(self,barcode,result): """ ValidateTransportPackageScan(self: General,barcode: str) -> (bool,TransportPackageScanResult) """ pass def __getitem__(self,*args): """ x.__getitem__(y) <==> x[y] """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,stockManager,passwordHasher,documentQueue): """ __new__(cls: type,stockManager: IStockManager,passwordHasher: IPasswordHasher,documentQueue: IDocumentQueue) """ pass CachedSettings=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: CachedSettings(self: General) -> SystemSettings """ CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: CurrentLicense(self: General) -> License Set: CurrentLicense(self: General)=value """ DocumentQueue=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class IApplicationSettings: # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return IApplicationSettings() instance=ZZZ() """hardcoded/returns an instance of the class""" def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass BosRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: BosRestBaseUri(self: IApplicationSettings) -> str """ BosRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: BosRestLicenseCreationSecret(self: IApplicationSettings) -> str """ GCloudProjectId=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: GCloudProjectId(self: IApplicationSettings) -> str """ GCloudPubSubPrefix=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: GCloudPubSubPrefix(self: IApplicationSettings) -> str """ MailgunApiKey=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: MailgunApiKey(self: IApplicationSettings) -> str """ MailgunBaseUrl=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: MailgunBaseUrl(self: IApplicationSettings) -> str """ MailgunDefaultSender=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: MailgunDefaultSender(self: IApplicationSettings) -> str """ MailgunDomainBoxwise=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: MailgunDomainBoxwise(self: IApplicationSettings) -> str """ Options=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: Options(self: IApplicationSettings) -> str """ PdfPrintNetCompany=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: PdfPrintNetCompany(self: IApplicationSettings) -> str """ PdfPrintNetLicenseKey=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: PdfPrintNetLicenseKey(self: IApplicationSettings) -> str """ RemotingDictionarySettingName=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RemotingDictionarySettingName(self: IApplicationSettings) -> str """ RemotingDictionarySettingPort=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RemotingDictionarySettingPort(self: IApplicationSettings) -> str """ RemotingPortNr=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RemotingPortNr(self: IApplicationSettings) -> int """ RemotingTcpChannelName=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RemotingTcpChannelName(self: IApplicationSettings) -> str """ RpRestBaseUri=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RpRestBaseUri(self: IApplicationSettings) -> str """ RpRestLicenseCreationSecret=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: RpRestLicenseCreationSecret(self: IApplicationSettings) -> str """ class ICentralAuthoritySystem: # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return ICentralAuthoritySystem() instance=ZZZ() """hardcoded/returns an instance of the class""" def RestartGooglePubSubServices(self): """ RestartGooglePubSubServices(self: ICentralAuthoritySystem) """ pass def StartBosInboundListener(self): """ StartBosInboundListener(self: ICentralAuthoritySystem) -> bool """ pass def StartRemotePublishingInboundListener(self): """ StartRemotePublishingInboundListener(self: ICentralAuthoritySystem) -> bool """ pass def WaitStartGooglePubSubServicesUntilValidServerHealth(self): """ WaitStartGooglePubSubServicesUntilValidServerHealth(self: ICentralAuthoritySystem) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass class IExtendedServiceLocator: # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return IExtendedServiceLocator() instance=ZZZ() """hardcoded/returns an instance of the class""" def IsRegistered(self,type=None): """ IsRegistered[T](self: IExtendedServiceLocator) -> bool IsRegistered(self: IExtendedServiceLocator,type: Type) -> bool """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass class Inbound(MarshalByRefObject): """ Inbound(stockManager: IStockManager,messaging: IMessaging,general: IGeneral) """ def ZZZ(self): """hardcoded/mock instance of the class""" return Inbound() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddOrUpdateLicensePlateToReceipt(self,cacheKey,licensePlate): """ AddOrUpdateLicensePlateToReceipt(self: Inbound,cacheKey: CacheKey,licensePlate: LicensePlate) -> LicensePlate """ pass def CancelPendingInboundReceiveLines(self,warehouseCode,customerNumber,orderType): """ CancelPendingInboundReceiveLines(self: Inbound,warehouseCode: str,customerNumber: str,orderType: InboundOrderTypeEnum) -> bool """ pass def CancelPendingPurchaseOrderReceipts(self,purchaseOrders): """ CancelPendingPurchaseOrderReceipts(self: Inbound,purchaseOrders: PurchaseOrders) """ pass def CancelPendingRmaOrderReceipts(self,rmaOrders): """ CancelPendingRmaOrderReceipts(self: Inbound,rmaOrders: RmaOrders) """ pass def CreatePreReceipt(self,dfObject): """ CreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] """ pass def CreatePreReceiptLines(self,dfObject): """ CreatePreReceiptLines(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] """ pass def DeletePreReceipLines(self,dfObject): """ DeletePreReceipLines(self: Inbound,dfObject: DataFlowObject[List[int]]) -> DataFlowObject[List[int]] """ pass def DeletePreReceipt(self,dfObject): """ DeletePreReceipt(self: Inbound,dfObject: DataFlowObject[int]) -> DataFlowObject[int] """ pass def DisposeReceiptWhenUnchanged(self,dfObject): """ DisposeReceiptWhenUnchanged(self: Inbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] """ pass def GetAdhocRmaCustomersByFilter(self,args,customers): """ GetAdhocRmaCustomersByFilter(self: Inbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) """ pass def GetHistoryPurchaseOrderPrintLines(self,filter,lines): """ GetHistoryPurchaseOrderPrintLines(self: Inbound,filter: GetHistoryPurchaseOrderPrintLinesArgs) -> (int,PurchaseOrderPrintLines) """ pass def GetHistoryPurchaseOrdersByFilter(self,filter,pagingParams,purchaseOrders): """ GetHistoryPurchaseOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) """ pass def GetHistoryPurchaseReceiptsByFilter(self,filter,pagingParams,purchaseOrders): """ GetHistoryPurchaseReceiptsByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryPurchaseOrders) """ pass def GetHistoryRmaOrderLines(self,args,orderLines): """ GetHistoryRmaOrderLines(self: Inbound,args: GetHistoryRmaOrderLinesArgs) -> (int,HistoryRmaOrderLines) """ pass def GetHistoryRmaOrdersByFilter(self,filter,pagingParams,rmaOrders): """ GetHistoryRmaOrdersByFilter(self: Inbound,filter: HistoryPurchaseOrdersFilter,pagingParams: PagingParams) -> (int,HistoryRmaOrders) """ pass def GetHistoryRmaReceiptById(self,groupGuid): """ GetHistoryRmaReceiptById(self: Inbound,groupGuid: Guid) -> HistoryRmaOrder """ pass def GetInboundReceiveLinesByKey(self,cacheKey,receiveLines): """ GetInboundReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (bool,InboundReceiveLines) """ pass def GetItemsOfVendor(self,args,items): """ GetItemsOfVendor(self: Inbound,args: GetItemsOfVendorArgs) -> (int,Items) """ pass def GetItemVendors(self,args,vendors): """ GetItemVendors(self: Inbound,args: GetItemVendorsArgs) -> (int,ItemVendors) """ pass def GetPreReceiptLines(self,args,lines): """ GetPreReceiptLines(self: Inbound,args: PreReceiptLinesArgs) -> (int,PagedList[PreReceiptLine]) """ pass def GetPreReceiptReceiveLines(self,dfObject): """ GetPreReceiptReceiveLines(self: Inbound,dfObject: DataFlowObject[ReceiveLinesForPreReceiptArgs]) -> DataFlowObject[ReceiveLinesForPreReceiptArgs] """ pass def GetPreReceipts(self,args,preReceipts): """ GetPreReceipts(self: Inbound,args: PreReceiptArgs) -> (int,PreReceipts) """ pass def GetPreReceiptSummaries(self,purchaseOrdernumber): """ GetPreReceiptSummaries(self: Inbound,purchaseOrdernumber: str) -> List[PreReceiptSummary] """ pass def GetPurchaseOrder(self,args,purchaseOrder): """ GetPurchaseOrder(self: Inbound,args: PurchaseOrderArgs) -> (bool,PurchaseOrder) """ pass def GetPurchaseOrderItemIdentifications(self,purchaseOrderId,orderLineId,itemIds): """ GetPurchaseOrderItemIdentifications(self: Inbound,purchaseOrderId: int,orderLineId: int) -> (int,ItemIdentifications) """ pass def GetPurchaseOrderLines(self,args,purchaseOrderLines): """ GetPurchaseOrderLines(self: Inbound,args: GetPurchaseOrderLinesArgs) -> (int,PurchaseOrderLines) """ pass def GetPurchaseOrderPrintLines(self,key,lines): """ GetPurchaseOrderPrintLines(self: Inbound,key: CacheKey) -> (int,PurchaseOrderPrintLines) """ pass def GetPurchaseOrdersAll(self,purchaseOrders): """ GetPurchaseOrdersAll(self: Inbound) -> (int,PurchaseOrders) """ pass def GetPurchaseOrdersByFilter(self,args,purchaseOrders): """ GetPurchaseOrdersByFilter(self: Inbound,args: PurchaseOrderArgs) -> (int,PurchaseOrders) """ pass def GetPurchaseReceiveLines(self,purchaseOrders,warehouseCode,purchaseReceiveLines): """ GetPurchaseReceiveLines(self: Inbound,purchaseOrders: DataFlowObject[PurchaseOrders],warehouseCode: str) -> (DataFlowObject[PurchaseOrders],InboundReceiveLines) """ pass def GetPurchaseReceiveLinesByKey(self,cacheKey,purchaseReceiveLines): """ GetPurchaseReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) """ pass def GetRmaCustomersExpected(self,customers): """ GetRmaCustomersExpected(self: Inbound) -> (int,Customers) """ pass def GetRmaCustomersExpectedByFilter(self,args,customers): """ GetRmaCustomersExpectedByFilter(self: Inbound,args: GetRmaOrderCustomersArgs) -> (int,Customers) """ pass def GetRmaOrder(self,args,rmaOrder): """ GetRmaOrder(self: Inbound,args: RmaOrderArgs) -> (bool,RmaOrder) """ pass def GetRmaOrderItemIdentifications(self,rmaOrderId,orderLineId,itemIds): """ GetRmaOrderItemIdentifications(self: Inbound,rmaOrderId: int,orderLineId: int) -> (int,ItemIdentifications) """ pass def GetRmaOrderLines(self,args,rmaOrderLines): """ GetRmaOrderLines(self: Inbound,args: GetRmaOrderLinesArgs) -> (int,RmaOrderLines) """ pass def GetRmaOrderPrintLines(self,key,lines): """ GetRmaOrderPrintLines(self: Inbound,key: CacheKey) -> (int,RmaOrderPrintLines) """ pass def GetRmaOrdersAll(self,rmaOrders): """ GetRmaOrdersAll(self: Inbound) -> (int,RmaOrders) """ pass def GetRmaOrdersByFilter(self,filterBy,rmaOrders): """ GetRmaOrdersByFilter(self: Inbound,filterBy: RmaOrderArgs) -> (int,RmaOrders) """ pass def GetRmaReasons(self,reasons): """ GetRmaReasons(self: Inbound) -> (int,RmaReasons) """ pass def GetRmaReceiveLines(self,rmaOrders,warehouseCode,rmaReceiveLines): """ GetRmaReceiveLines(self: Inbound,rmaOrders: DataFlowObject[RmaOrders],warehouseCode: str) -> (DataFlowObject[RmaOrders],InboundReceiveLines) """ pass def GetRmaReceiveLinesByKey(self,cacheKey,rmaReceiveLines): """ GetRmaReceiveLinesByKey(self: Inbound,cacheKey: CacheKey) -> (int,InboundReceiveLines) """ pass def GetRmaReceiveLinesUsingOutboundOrders(self,dfObject,rmaReceiveLines): """ GetRmaReceiveLinesUsingOutboundOrders(self: Inbound,dfObject: DataFlowObject[PrepareAdhocRmaReceiveLinesArgs]) -> (DataFlowObject[PrepareAdhocRmaReceiveLinesArgs],InboundReceiveLines) """ pass def GetVendors(self,args,vendors): """ GetVendors(self: Inbound,args: GetVendorsArgs) -> (int,Vendors) """ pass def GetVendorsExpected(self,vendors): """ GetVendorsExpected(self: Inbound) -> (int,PurchaseOrderVendors) """ pass def GetVendorsExpectedByFilter(self,vendors,args): """ GetVendorsExpectedByFilter(self: Inbound,args: GetPurchaseOrderVendorArgs) -> (int,PurchaseOrderVendors) """ pass def GetVendorsWithPendingPreReceipts(self,args,vendors): """ GetVendorsWithPendingPreReceipts(self: Inbound,args: InboundOrderArgsBase) -> (int,PurchaseOrderVendors) """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: Inbound) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def PreCreatePreReceipt(self,dfObject): """ PreCreatePreReceipt(self: Inbound,dfObject: DataFlowObject[CreatePreReceiptArgs]) -> DataFlowObject[CreatePreReceiptArgs] """ pass def PrepareInboundReceiveLines(self,args,cacheKey): """ PrepareInboundReceiveLines(self: Inbound,args: PrepareInboundReceiveLinesArgs) -> CacheKey """ pass def PrintPurchaseReceipt(self,groupGuid,printer,printingOptions): """ PrintPurchaseReceipt(self: Inbound,groupGuid: Guid,printer: str,printingOptions: PrintingOptions) -> bool """ pass def PrintReceiveLabels(self,line,quantity,label): """ PrintReceiveLabels(self: Inbound,line: InboundReceiveLine,quantity: Decimal,label: PrintLabel) """ pass def PrintRmaReceipt(self,groupGuid): """ PrintRmaReceipt(self: Inbound,groupGuid: Guid) -> bool """ pass def ProcessAdhocRmaOrderLines(self,customerNumber,printRmaInvoice,warehouseCode,orderLines,reference): """ ProcessAdhocRmaOrderLines(self: Inbound,customerNumber: str,printRmaInvoice: bool,warehouseCode: str,orderLines: RmaOrderLines,reference: str) -> ErpProcessPurchaseOrderLinesResult """ pass def ProcessPendingReceiveLines(self,dfObject): """ ProcessPendingReceiveLines(self: Inbound,dfObject: DataFlowObject[ProcessInboundReceiveLinesArgs]) -> DataFlowObject[ProcessInboundReceiveLinesArgs] """ pass def ProcessPreReceipt(self,preReceiptId,warehouseCode,orderLines,yourReference,transactionId): """ ProcessPreReceipt(self: Inbound,preReceiptId: int,warehouseCode: str,orderLines: InboundOrderLines,yourReference: str,transactionId: Guid) -> ErpProcessPurchaseOrderLinesResult """ pass def ReceiveItemIdMulti(self,dfObject): """ ReceiveItemIdMulti(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdMultiArgs]) -> DataFlowObject[ReceiveItemIdMultiArgs] """ pass def ReceiveItemIdRange(self,dfObject): """ ReceiveItemIdRange(self: Inbound,dfObject: DataFlowObject[ReceiveItemIdRangeArgs]) -> DataFlowObject[ReceiveItemIdRangeArgs] """ pass def RemoveInboundReceiveLine(self,cacheKey,receiveLineId): """ RemoveInboundReceiveLine(self: Inbound,cacheKey: CacheKey,receiveLineId: str) -> bool """ pass def RemoveLicensePlateFromReceipt(self,cacheKey,licensePlateId): """ RemoveLicensePlateFromReceipt(self: Inbound,cacheKey: CacheKey,licensePlateId: int) """ pass def UpdatePreReceiptStatus(self,dfObject): """ UpdatePreReceiptStatus(self: Inbound,dfObject: DataFlowObject[UpdatePreReceiptStatusArgs]) -> DataFlowObject[UpdatePreReceiptStatusArgs] """ pass def UpdateQuantityReceiveLine(self,dfObject,receiveLine): """ UpdateQuantityReceiveLine(self: Inbound,dfObject: DataFlowObject[ReceiveArgs]) -> (DataFlowObject[ReceiveArgs],InboundReceiveLine) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,stockManager,messaging,general): """ __new__(cls: type,stockManager: IStockManager,messaging: IMessaging,general: IGeneral) """ pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Inventory(MarshalByRefObject): """ Inventory(stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) """ def ZZZ(self): """hardcoded/mock instance of the class""" return Inventory() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddCountItemIdentitification(self,key,itemId,overwriteIfExists): """ AddCountItemIdentitification(self: Inventory,key: CacheKey,itemId: ItemIdentification,overwriteIfExists: bool) -> bool """ pass def AddCountItemIdentitificationMulti(self,key,itemIds,overwriteIfExists): """ AddCountItemIdentitificationMulti(self: Inventory,key: CacheKey,itemIds: ItemIdentifications,overwriteIfExists: bool) -> bool """ pass def AddCountQuantity(self,key,quantity,overwriteIfExists): """ AddCountQuantity(self: Inventory,key: CacheKey,quantity: Decimal,overwriteIfExists: bool) -> bool """ pass def AddWarehouseTransferItemIdentitifications(self,key,args): """ AddWarehouseTransferItemIdentitifications(self: Inventory,key: CacheKey,args: AddWarehouseTransferItemIdentitificationArgs) -> bool """ pass def AddWarehouseTransferItems(self,key,itemCodes,overwriteIfExists): """ AddWarehouseTransferItems(self: Inventory,key: CacheKey,itemCodes: List[str],overwriteIfExists: bool) -> bool """ pass def AddWarehouseTransferQuantities(self,key,items,overwriteIfExists): """ AddWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems,overwriteIfExists: bool) -> bool """ pass def AddWarehouseTransferQuantity(self,key,args): """ AddWarehouseTransferQuantity(self: Inventory,key: CacheKey,args: AddWarehouseTransferQuantityArgs) -> bool """ pass def BatchChangeCountType(self,filterBy,type): """ BatchChangeCountType(self: Inventory,filterBy: CountFilter,type: CountTypeEnum) -> int """ pass def CancelProcessCounts(self): """ CancelProcessCounts(self: Inventory) """ pass def ChangeDefaultLocationAfterTransfer(self,arg): """ ChangeDefaultLocationAfterTransfer(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] """ pass def ChangeLicensePlateStatus(self,args): """ ChangeLicensePlateStatus(self: Inventory,args: ChangeLicensePlateStatusArgs) """ pass def CheckLicensePlateIntegrity(self,args): """ CheckLicensePlateIntegrity(self: Inventory,args: CheckLicensePlateIntegrityArgs) -> CheckLicensePlateIntegrityResult """ pass def CreateCount(self,arg): """ CreateCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] """ pass def CreateCountFromCache(self,arg): """ CreateCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] """ pass def CreateCountGroup(self,arg): """ CreateCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] """ pass def CreateCountsForPickDifferences(self,batch): """ CreateCountsForPickDifferences(self: Inventory,batch: Batch) """ pass def CreateLicensePlate(self,lp): """ CreateLicensePlate(self: Inventory,lp: LicensePlate) -> LicensePlate """ pass def CreateLicensePlateAuditLogEntry(self,lpAuditEntry): """ CreateLicensePlateAuditLogEntry(self: Inventory,lpAuditEntry: LicensePlateAuditLog) -> LicensePlateAuditLog """ pass def CreateLicensePlateFromReceipt(self,args): """ CreateLicensePlateFromReceipt(self: Inventory,args: CreateLicensePlateFromReceiptArgs) -> LicensePlate """ pass def CreateOneCount(self,itemBarcode,warehouseCode,locationCode,countGroupId,itemId): """ CreateOneCount(self: Inventory,itemBarcode: str,warehouseCode: str,locationCode: str,countGroupId: int,itemId: str) -> bool """ pass def CreateOrUpdateLicensePlateItem(self,licensePlateId,item): """ CreateOrUpdateLicensePlateItem(self: Inventory,licensePlateId: int,item: LicensePlateItem) -> LicensePlateItem """ pass def CreateOrUpdateLicensePlateItems(self,licensePlateId,items): """ CreateOrUpdateLicensePlateItems(self: Inventory,licensePlateId: int,items: List[LicensePlateItem]) """ pass def CreateOrUpdateReplenishmentOrderLine(self,line,skipAllocationCheck): """ CreateOrUpdateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine],skipAllocationCheck: bool) -> DataFlowObject[ReplenishmentOrderLine] """ pass def CreateReplenishmentOrder(self,order): """ CreateReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] """ pass def CreateReplenishmentOrderLine(self,line): """ CreateReplenishmentOrderLine(self: Inventory,line: DataFlowObject[ReplenishmentOrderLine]) -> DataFlowObject[ReplenishmentOrderLine] """ pass def CreateReplenishmentOrderLines(self,lines): """ CreateReplenishmentOrderLines(self: Inventory,lines: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] """ pass def CreateReplenishmentOrders(self,dfObject): """ CreateReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] """ pass def CreateZeroCount(self,arg): """ CreateZeroCount(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] """ pass def CreateZeroCountByCountGroup(self,countGroupId): """ CreateZeroCountByCountGroup(self: Inventory,countGroupId: int) """ pass def DeleteCountFromCache(self,arg): """ DeleteCountFromCache(self: Inventory,arg: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] """ pass def DeleteCountFromCacheAndTable(self,cacheKey): """ DeleteCountFromCacheAndTable(self: Inventory,cacheKey: CacheKey) """ pass def DeleteCountFromTable(self,arg): """ DeleteCountFromTable(self: Inventory,arg: DataFlowObject[Count]) -> DataFlowObject[Count] """ pass def DeleteCountGroup(self,arg): """ DeleteCountGroup(self: Inventory,arg: DataFlowObject[CountGroup]) -> DataFlowObject[CountGroup] """ pass def DeleteLicensePlateById(self,licensePlateId): """ DeleteLicensePlateById(self: Inventory,licensePlateId: int) """ pass def DeleteLicensePlateItemById(self,itemId): """ DeleteLicensePlateItemById(self: Inventory,itemId: int) """ pass def DeleteReplenishmentOrder(self,order): """ DeleteReplenishmentOrder(self: Inventory,order: DataFlowObject[ReplenishmentOrder]) -> DataFlowObject[ReplenishmentOrder] """ pass def DeleteReplenishmentOrderLines(self,dfObject): """ DeleteReplenishmentOrderLines(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrderLines]) -> DataFlowObject[ReplenishmentOrderLines] """ pass def DeleteReplenishmentOrders(self,dfObject): """ DeleteReplenishmentOrders(self: Inventory,dfObject: DataFlowObject[ReplenishmentOrders]) -> DataFlowObject[ReplenishmentOrders] """ pass def GenerateReplenishmentOrder(self,warehouseToCode): """ GenerateReplenishmentOrder(self: Inventory,warehouseToCode: str) -> bool """ pass def GenerateReplenishmentOrders(self,args): """ GenerateReplenishmentOrders(self: Inventory,args: GenerateReplenishmentOrdersArgs) -> bool """ pass def GetAllItemIdentifications(self,filterBy): """ GetAllItemIdentifications(self: Inventory,filterBy: GetAllItemIdentificationsArgs) -> ItemIdentifications """ pass def GetCount(self,*__args): """ GetCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int,itemId: str) -> (bool,Count) GetCount(self: Inventory,key: CacheKey) -> (bool,Count) """ pass def GetCountByCountId(self,countId,count): """ GetCountByCountId(self: Inventory,countId: int) -> (bool,Count) """ pass def GetCountGroupIdByType(self,type): """ GetCountGroupIdByType(self: Inventory,type: CountGroupTypeEnum) -> int """ pass def GetCountGroups(self,filter,countGroups): """ GetCountGroups(self: Inventory,filter: str) -> (int,CountGroups) """ pass def GetCountGroupsAll(self,countGroups): """ GetCountGroupsAll(self: Inventory) -> (int,CountGroups) """ pass def GetCountGroupsById(self,id): """ GetCountGroupsById(self: Inventory,id: int) -> CountGroup """ pass def GetCountGroupsByType(self,type): """ GetCountGroupsByType(self: Inventory,type: CountGroupTypeEnum) -> CountGroup """ pass def GetCounts(self,filterBy,pagingParams,counts): """ GetCounts(self: Inventory,filterBy: CountFilter,pagingParams: PagingParams) -> (int,Counts) """ pass def GetItemsOnLocationLeftToAddToLp(self,args): """ GetItemsOnLocationLeftToAddToLp(self: Inventory,args: GetItemsOnLocationLeftToAddToLpArgs) -> List[LpLocationItem] """ pass def GetItemStockAllocations(self,filterBy,allocations): """ GetItemStockAllocations(self: Inventory,filterBy: GetAllocationsArgs) -> (int,ItemStockAllocationList) """ pass def GetLicensePlateAuditLogEntries(self,args,pagingParams,logEntries): """ GetLicensePlateAuditLogEntries(self: Inventory,args: GetLicensePlateItemAuditLogEntriesArgs,pagingParams: PagingParams) -> (int,LicensePlateAuditLogs) """ pass def GetLicensePlateByCode(self,args,licensePlate): """ GetLicensePlateByCode(self: Inventory,args: GetLicensePlateByCodeArgs) -> (bool,LicensePlate) """ pass def GetLicensePlateById(self,licensePlateId,licensePlate): """ GetLicensePlateById(self: Inventory,licensePlateId: int) -> (bool,LicensePlate) """ pass def GetLicensePlateItems(self,args,pagingParams,items): """ GetLicensePlateItems(self: Inventory,args: GetLicensePlateItemsArgs,pagingParams: PagingParams) -> (int,LicensePlateItems) """ pass def GetLicensePlates(self,args,pagingParams,licensePlates): """ GetLicensePlates(self: Inventory,args: GetLicensePlatesArgs,pagingParams: PagingParams) -> (int,LicensePlates) """ pass def GetProcessCountsProgress(self,percentageComplete,message): """ GetProcessCountsProgress(self: Inventory) -> (int,str) """ pass def GetReplenishmentOrder(self,args,replenishmentOrder): """ GetReplenishmentOrder(self: Inventory,args: ReplenishmentOrderArgs) -> (bool,ReplenishmentOrder) """ pass def GetReplenishmentOrderLines(self,args,replenishmentOrderLines): """ GetReplenishmentOrderLines(self: Inventory,args: ReplenishmentOrderLinesArgs) -> (int,ReplenishmentOrderLines) """ pass def GetReplenishmentOrders(self,filterBy,replenishmentOrders): """ GetReplenishmentOrders(self: Inventory,filterBy: ReplenishmentOrderArgs) -> (int,ReplenishmentOrders) """ pass def GetStockManagerList(self,filterBy,pagingParams,stockList): """ GetStockManagerList(self: Inventory,filterBy: GetStockManagerListArgs,pagingParams: PagingParams) -> (int,ItemStockWithAllocationsList) """ pass def GetStockOnMatchingFilter(self,args): """ GetStockOnMatchingFilter(self: Inventory,args: GetStockManagerListArgs) -> FindableList[ItemStockWithLocations] """ pass def GetWarehousesWithPendingCounts(self,warehouses): """ GetWarehousesWithPendingCounts(self: Inventory) -> (int,Warehouses) """ pass def GetWarehouseTransfer(self,key): """ GetWarehouseTransfer(self: Inventory,key: CacheKey) -> WarehouseTransfer """ pass def GetWarehouseTransferItems(self,key): """ GetWarehouseTransferItems(self: Inventory,key: CacheKey) -> WarehouseTransferItems """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: Inventory) -> object """ pass def IsValidItemInCountGroup(self,itemBarcode,countGroup,quantity): """ IsValidItemInCountGroup(self: Inventory,itemBarcode: str,countGroup: CountGroup) -> (bool,Decimal) """ pass def IsValidLocationInCountGroup(self,warehouseCode,locationBarcode,countGroup,location): """ IsValidLocationInCountGroup(self: Inventory,warehouseCode: str,locationBarcode: str,countGroup: CountGroup) -> (bool,Location) """ pass def ItemBelongsToLicensePlate(self,args): """ ItemBelongsToLicensePlate(self: Inventory,args: ItemBelongsToLicensePlateArgs) -> bool """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def PreCreateReplenishmentOrderForWarehouse(self,warehouseTo,order): """ PreCreateReplenishmentOrderForWarehouse(self: Inventory,warehouseTo: str) -> (bool,ReplenishmentOrder) """ pass def PreCreateReplenishmentOrderLineForItem(self,replenishmentOrderId,itemcode,quantity,line): """ PreCreateReplenishmentOrderLineForItem(self: Inventory,replenishmentOrderId: int,itemcode: str,quantity: Decimal) -> (bool,ReplenishmentOrderLine) """ pass def PrepareCount(self,itemCode,warehouseCode,warehouseLocationCode,countGroupId): """ PrepareCount(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupId: int) -> CacheKey """ pass def PrepareCountWithType(self,itemCode,warehouseCode,warehouseLocationCode,countGroupType): """ PrepareCountWithType(self: Inventory,itemCode: str,warehouseCode: str,warehouseLocationCode: str,countGroupType: CountGroupTypeEnum) -> CacheKey """ pass def PrepareWarehouseTransfer(self,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo,type): """ PrepareWarehouseTransfer(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str,type: WarehouseTransferType) -> CacheKey """ pass def PrepareWarehouseTransferFrom(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom): """ PrepareWarehouseTransferFrom(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str) -> CacheKey """ pass def PrepareWarehouseTransferFromInterBranch(self,warehouseCodeFrom,warehouseLocationCodeFrom,transferType): """ PrepareWarehouseTransferFromInterBranch(self: Inventory,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,transferType: Nullable[WarehouseTransferType]) -> CacheKey """ pass def PrepareWarehouseTransferItem(self,itemCode,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): """ PrepareWarehouseTransferItem(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey """ pass def PrepareWarehouseTransferReceived(self,itemCode,warehouseCodeFrom,warehouseCodeTo,warehouseLocationCodeTo): """ PrepareWarehouseTransferReceived(self: Inventory,itemCode: str,warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey """ pass def PrepareWarehouseTransferTo(self,itemCode,warehouseCodeTo,warehouseLocationCodeTo): """ PrepareWarehouseTransferTo(self: Inventory,itemCode: str,warehouseCodeTo: str,warehouseLocationCodeTo: str) -> CacheKey """ pass def PrepareWarehouseTransferToMulti(self,itemCodes,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): """ PrepareWarehouseTransferToMulti(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey """ pass def PrepareWarehouseTransferToMultiReceived(self,itemCodes,warehouseCodeFrom,warehouseCodeTo,warehouseLocationTo): """ PrepareWarehouseTransferToMultiReceived(self: Inventory,itemCodes: List[str],warehouseCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey """ pass def PrepareWarehouseTransferToMultiTransport(self,itemCodes,warehouseCodeTo,warehouseLocationTo): """ PrepareWarehouseTransferToMultiTransport(self: Inventory,itemCodes: List[str],warehouseCodeTo: str,warehouseLocationTo: str) -> CacheKey """ pass def PrintLicensePlateLabels(self,args): """ PrintLicensePlateLabels(self: Inventory,args: PrintLicensePlateLabelArgs) """ pass def ProcessCounts(self,warehouseCode,countGroup,description,date,ledgerCode,started): """ ProcessCounts(self: Inventory,warehouseCode: str,countGroup: int,description: str,date: DateTime,ledgerCode: str) -> bool """ pass def ProcessReplenishmentOrder(self,printInvoices,order,orderlines): """ ProcessReplenishmentOrder(self: Inventory,printInvoices: bool,order: ReplenishmentOrder,orderlines: List[ReplenishmentOrderLine]) -> ErpProcessSalesOrderLinesResult """ pass def ProcessWarehouseTransfer(self,dfObject): """ ProcessWarehouseTransfer(self: Inventory,dfObject: DataFlowObject[ProcessWarehouseTransferArgs]) -> DataFlowObject[ProcessWarehouseTransferArgs] """ pass def RemoveCountItemIdentification(self,key,itemId): """ RemoveCountItemIdentification(self: Inventory,key: CacheKey,itemId: str) -> bool """ pass def RemoveWarehouseTransfer(self,key): """ RemoveWarehouseTransfer(self: Inventory,key: CacheKey) -> bool """ pass def RemoveWarehouseTransferItemCompletely(self,key,itemCode): """ RemoveWarehouseTransferItemCompletely(self: Inventory,key: CacheKey,itemCode: str) -> bool """ pass def RemoveWarehouseTransferItemIdentification(self,key,itemCode,itemId): """ RemoveWarehouseTransferItemIdentification(self: Inventory,key: CacheKey,itemCode: str,itemId: str) -> bool """ pass def SubtractWarehouseTransferItemQuantity(self,key,itemCode,quantity): """ SubtractWarehouseTransferItemQuantity(self: Inventory,key: CacheKey,itemCode: str,quantity: Decimal) -> bool """ pass def SubtractWarehouseTransferQuantities(self,key,items): """ SubtractWarehouseTransferQuantities(self: Inventory,key: CacheKey,items: WarehouseTransferItems) -> bool """ pass def SyncStock(self): """ SyncStock(self: Inventory) """ pass def TransferItems(self,arg): """ TransferItems(self: Inventory,arg: DataFlowObject[WarehouseTransfer]) -> DataFlowObject[WarehouseTransfer] """ pass def UpdateLicensePlate(self,lp): """ UpdateLicensePlate(self: Inventory,lp: LicensePlate) """ pass def UpdateWarehouseTransfer(self,key,warehouseCodeFrom,warehouseLocationCodeFrom,warehouseCodeTo,warehouseLocationTo): """ UpdateWarehouseTransfer(self: Inventory,key: CacheKey,warehouseCodeFrom: str,warehouseLocationCodeFrom: str,warehouseCodeTo: str,warehouseLocationTo: str) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,stockManager,countCacheKeyConstructor): """ __new__(cls: type,stockManager: IStockManager,countCacheKeyConstructor: ICacheKeyConstructor[Count]) """ pass StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) class Mailer(object): """ Mailer() """ def ZZZ(self): """hardcoded/mock instance of the class""" return Mailer() instance=ZZZ() """hardcoded/returns an instance of the class""" class Messaging(MarshalByRefObject): """ Messaging() """ def ZZZ(self): """hardcoded/mock instance of the class""" return Messaging() instance=ZZZ() """hardcoded/returns an instance of the class""" def ChangeMessagesStatus(self,messageIds,newStatus): """ ChangeMessagesStatus(self: Messaging,messageIds: List[Guid],newStatus: MessageStatus) """ pass def ChangeMessageStatus(self,messageId,newStatus): """ ChangeMessageStatus(self: Messaging,messageId: Guid,newStatus: MessageStatus) """ pass def CreateMessage(self,message): """ CreateMessage(self: Messaging,message: IMessage) """ pass def DeleteMessageByGuid(self,messageId): """ DeleteMessageByGuid(self: Messaging,messageId: Guid) """ pass def DequeueNextMessage(self): """ DequeueNextMessage(self: Messaging) -> DequeueResult """ pass def ExecuteMessageHandler(self,args): """ ExecuteMessageHandler(self: Messaging,args: ExecuteMessageHandlerArgs) -> ExecuteMessageHandlerResult """ pass def ExecuteMessagePublisher(self,args): """ ExecuteMessagePublisher(self: Messaging,args: ExecuteMessagePublisherArgs) -> ExecuteMessagePublisherResult """ pass def GetDistinctTypeList(self,args): """ GetDistinctTypeList(self: Messaging,args: GetDistinctTypeListArgs) -> List[str] """ pass def GetMessage(self,messageId): """ GetMessage(self: Messaging,messageId: Guid) -> IMessage """ pass def GetMessageBodyAsString(self,messageId,decodeAs): """ GetMessageBodyAsString(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs) -> str """ pass def GetMessageHandlers(self,args,messageHandlers): """ GetMessageHandlers(self: Messaging,args: GetMessageHandlersArgs) -> (int,IList[MessageHandlerDescriptorSerializable]) """ pass def GetMessagePublishers(self,args,messagePublishers): """ GetMessagePublishers(self: Messaging,args: GetMessagePublishersArgs) -> (int,IList[MessagePublisherDescriptorSerializable]) """ pass def GetMessages(self,args,paging,messages): """ GetMessages(self: Messaging,args: GetMessagesArgs,paging: PagingParams) -> (int,Messages) """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: Messaging) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def ReissueMessage(self,messageId): """ ReissueMessage(self: Messaging,messageId: Guid) """ pass def ReissueMessages(self,messageIds): """ ReissueMessages(self: Messaging,messageIds: List[Guid]) """ pass def SaveMessageBody(self,messageId,decodeAs,messageBody): """ SaveMessageBody(self: Messaging,messageId: Guid,decodeAs: MessageBodyDecodeAs,messageBody: str) """ pass def StartMessageQueueListener(self,cancellationToken): """ StartMessageQueueListener(self: Messaging,cancellationToken: CancellationToken) -> Task """ pass def UpdateMessage(self,message): """ UpdateMessage(self: Messaging,message: IMessage) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass class NotificationCenter(MarshalByRefObject): """ NotificationCenter(general: General) """ def ZZZ(self): """hardcoded/mock instance of the class""" return NotificationCenter() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddNotification(self,notificationToInsert): """ AddNotification(self: NotificationCenter,notificationToInsert: InsertNotificationArgs) """ pass def AddNotificationGroup(self,notificationGroup): """ AddNotificationGroup(self: NotificationCenter,notificationGroup: AddNotificationGroupArgs) """ pass def AddTaskNotificationSummaryTasks(self): """ AddTaskNotificationSummaryTasks(self: NotificationCenter) """ pass def DeleteNotification(self,notificationId): """ DeleteNotification(self: NotificationCenter,notificationId: int) """ pass def DeleteNotificationGroup(self,notificationGroup): """ DeleteNotificationGroup(self: NotificationCenter,notificationGroup: DeleteNotificationGroupArgs) """ pass def DeleteNotificationsByReference(self,notificationFilter): """ DeleteNotificationsByReference(self: NotificationCenter,notificationFilter: DeleteNotificationByReferenceArgs) """ pass def GetAllNotificationGroups(self): """ GetAllNotificationGroups(self: NotificationCenter) -> List[NotificationGroup] """ pass def GetNotifications(self,filterOn): """ GetNotifications(self: NotificationCenter,filterOn: GetNotificationsArgs) -> List[Notification] """ pass def HasNotifications(self,filterOn): """ HasNotifications(self: NotificationCenter,filterOn: HasNotificationsArgs) -> bool """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: NotificationCenter) -> object """ pass def MarkAsRead(self,notificationId,userId): """ MarkAsRead(self: NotificationCenter,notificationId: int,userId: int) """ pass def MarkGroupAsRead(self,groupKey,userId): """ MarkGroupAsRead(self: NotificationCenter,groupKey: str,userId: int) """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,general): """ __new__(cls: type,general: General) """ pass class NotificationSummary(MarshalByRefObject): """ NotificationSummary(implementationContainer: NotificationTypeContainer) NotificationSummary() """ def ZZZ(self): """hardcoded/mock instance of the class""" return NotificationSummary() instance=ZZZ() """hardcoded/returns an instance of the class""" def DeleteConfiguration(self,notificationSummaryId): """ DeleteConfiguration(self: NotificationSummary,notificationSummaryId: int) """ pass def DeleteConfigurations(self,notificationSummaryIds): """ DeleteConfigurations(self: NotificationSummary,notificationSummaryIds: List[int]) """ pass def ExecuteSummaries(self): """ ExecuteSummaries(self: NotificationSummary) """ pass def GetAllConfigurations(self): """ GetAllConfigurations(self: NotificationSummary) -> List[NotificationSummaryConfiguration] """ pass def GetAllExecutionSchedules(self): """ GetAllExecutionSchedules(self: NotificationSummary) -> List[str] """ pass def GetAllExecutionTypes(self): """ GetAllExecutionTypes(self: NotificationSummary) -> List[str] """ pass def GetConfigurationForm(self,executionType): """ GetConfigurationForm(self: NotificationSummary,executionType: str) -> UiForm """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: NotificationSummary) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def SaveConfiguration(self,model): """ SaveConfiguration(self: NotificationSummary,model: NotificationSummaryConfiguration) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,implementationContainer=None): """ __new__(cls: type,implementationContainer: NotificationTypeContainer) __new__(cls: type) """ pass class NumberGeneration(MarshalByRefObject): """ NumberGeneration() """ def ZZZ(self): """hardcoded/mock instance of the class""" return NumberGeneration() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddUsedNumber(self,args): """ AddUsedNumber(self: NumberGeneration,args: AddUsedNumberArgs) """ pass def CreateNumberRange(self,dfObject): """ CreateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] """ pass def DeleteNumberRange(self,dfObject): """ DeleteNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] """ pass def GenerateNumbers(self,dfObject): """ GenerateNumbers(self: NumberGeneration,dfObject: DataFlowObject[GenerateBarcodeLabelArgs]) -> DataFlowObject[GenerateBarcodeLabelArgs] """ pass def GetCurrentNumber(self,rangeId): """ GetCurrentNumber(self: NumberGeneration,rangeId: int) -> int """ pass def GetNumberRangeById(self,rangeId): """ GetNumberRangeById(self: NumberGeneration,rangeId: int) -> NumberRange """ pass def GetNumberRangesByFilter(self,args): """ GetNumberRangesByFilter(self: NumberGeneration,args: GetNumberRangeArgs) -> List[NumberRange] """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: NumberGeneration) -> object """ pass def IsNumberUsed(self,args): """ IsNumberUsed(self: NumberGeneration,args: UsedNumberArgs) -> bool """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def ResetNumberRange(self,dfObject): """ ResetNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] """ pass def UpdateNumberRange(self,dfObject): """ UpdateNumberRange(self: NumberGeneration,dfObject: DataFlowObject[NumberRange]) -> DataFlowObject[NumberRange] """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass class OfflineScanning(MarshalByRefObject): """ OfflineScanning(appSettings: IApplicationSettings,general: General,messaging: Messaging) """ def ZZZ(self): """hardcoded/mock instance of the class""" return OfflineScanning() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddScanner(self,args): """ AddScanner(self: OfflineScanning,args: AddScannerArgs) """ pass def BosInboundListenerPullDirect(self): """ BosInboundListenerPullDirect(self: OfflineScanning) -> int """ pass def DeleteScanner(self,args): """ DeleteScanner(self: OfflineScanning,args: DeleteScannerArgs) """ pass def DownloadFileAsync(self,filePath): """ DownloadFileAsync(self: OfflineScanning,filePath: str) -> Task[Stream] """ pass def EnsureLicenseExists(self): """ EnsureLicenseExists(self: OfflineScanning) """ pass def GetAppVersionFileSpec(self,args): """ GetAppVersionFileSpec(self: OfflineScanning,args: GetAppVersionFileSpecArgs) -> str """ pass def GetAppVersions(self): """ GetAppVersions(self: OfflineScanning) -> AppVersions """ pass def GetCurrentAppVersion(self): """ GetCurrentAppVersion(self: OfflineScanning) -> LicenseAppVersion """ pass def GetScanners(self): """ GetScanners(self: OfflineScanning) -> Scanners """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: OfflineScanning) -> object """ pass def IsBosInboundListenerRunning(self): """ IsBosInboundListenerRunning(self: OfflineScanning) -> bool """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def SetCurrentAppVersion(self,args): """ SetCurrentAppVersion(self: OfflineScanning,args: SetCurrentAppVersionArgs) """ pass def StartBosInboundListener(self): """ StartBosInboundListener(self: OfflineScanning) -> bool """ pass def UploadFile(self,name,file,overwrite): """ UploadFile(self: OfflineScanning,name: str,file: Stream,overwrite: bool) """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,appSettings,general,messaging): """ __new__(cls: type,appSettings: IApplicationSettings,general: General,messaging: Messaging) """ pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: CurrentLicense(self: OfflineScanning) -> License """ class OnGetDestinationLocationForLine(MulticastDelegate): """ OnGetDestinationLocationForLine(object: object,method: IntPtr) """ def ZZZ(self): """hardcoded/mock instance of the class""" return OnGetDestinationLocationForLine() instance=ZZZ() """hardcoded/returns an instance of the class""" def BeginInvoke(self,line,defaultWarehouseLocationCodeOutbound,callback,object): """ BeginInvoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str,callback: AsyncCallback,object: object) -> IAsyncResult """ pass def CombineImpl(self,*args): """ CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate Combines this System.Delegate with the specified System.Delegate to form a new delegate. follow: The delegate to combine with this delegate. Returns: A delegate that is the new root of the System.MulticastDelegate invocation list. """ pass def DynamicInvokeImpl(self,*args): """ DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object Dynamically invokes (late-bound) the method represented by the current delegate. args: An array of objects that are the arguments to pass to the method represented by the current delegate.-or- null,if the method represented by the current delegate does not require arguments. Returns: The object returned by the method represented by the delegate. """ pass def EndInvoke(self,result): """ EndInvoke(self: OnGetDestinationLocationForLine,result: IAsyncResult) -> str """ pass def GetMethodImpl(self,*args): """ GetMethodImpl(self: MulticastDelegate) -> MethodInfo Returns a static method represented by the current System.MulticastDelegate. Returns: A static method represented by the current System.MulticastDelegate. """ pass def Invoke(self,line,defaultWarehouseLocationCodeOutbound): """ Invoke(self: OnGetDestinationLocationForLine,line: OutboundOrderLine,defaultWarehouseLocationCodeOutbound: str) -> str """ pass def RemoveImpl(self,*args): """ RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate Removes an element from the invocation list of this System.MulticastDelegate that is equal to the specified delegate. value: The delegate to search for in the invocation list. Returns: If value is found in the invocation list for this instance,then a new System.Delegate without value in its invocation list; otherwise,this instance with its original invocation list. """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,object,method): """ __new__(cls: type,object: object,method: IntPtr) """ pass def __reduce_ex__(self,*args): pass class Outbound(MarshalByRefObject): """ Outbound(stockManager: IStockManager,messaging: Messaging) """ def ZZZ(self): """hardcoded/mock instance of the class""" return Outbound() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddDirectOrder(self,args): """ AddDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] """ pass def AddDirectOrderLine(self,args): """ AddDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def AddDirectOrderLineItemIdentification(self,args): """ AddDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def AddDirectOrderLineItemIdentifications(self,args): """ AddDirectOrderLineItemIdentifications(self: Outbound,args: DirectOrderLineItemIdentificationsCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def AddPackageUsingPreset(self,args,newPackageNumber,packages): """ AddPackageUsingPreset(self: Outbound,args: AddTransportPackageArgs) -> (bool,Guid,TransportPackages) """ pass def CheckBatchScan(self,args): """ CheckBatchScan(self: Outbound,args: BatchScanArgs) -> BatchScanResult """ pass def CloseBatchesForPacking(self,args): """ CloseBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) """ pass def CloseBatchForPickingById(self,id): """ CloseBatchForPickingById(self: Outbound,id: str) -> bool """ pass def CloseTransportPackages(self,packagesKey): """ CloseTransportPackages(self: Outbound,packagesKey: CacheKey) """ pass def CreateBatchByCustomerNumbers(self,customers,createdByClientType,createdBatches,message): """ CreateBatchByCustomerNumbers(self: Outbound,customers: Customers,createdByClientType: BatchCreatedByClientTypeEnum) -> (int,Batches,str) """ pass def CreateBatches(self,orderNumbers,createdByClientType,batchSettings,createdBatches,message): """ CreateBatches(self: Outbound,orderNumbers: List[str],createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (int,Batches,str) """ pass def CreateBatchesAndRoutes(self,batchableSoLines,nonBatchableSoLines,allocationSettings,batchSink,createdByClientType,batchSettings,message): """ CreateBatchesAndRoutes(self: Outbound,batchableSoLines: OutboundOrderLines,nonBatchableSoLines: OutboundOrderLines,allocationSettings: AllocationSettings,batchSink: BatchAllocationSink,createdByClientType: BatchCreatedByClientTypeEnum,batchSettings: BatchUpdateArgs) -> (Batches,str) """ pass def CreateBatchesByLineIds(self,orderNumbers,orderLineIds,createdByClientType,settings,createdBatches,message): """ CreateBatchesByLineIds(self: Outbound,orderNumbers: List[str],orderLineIds: List[int],createdByClientType: BatchCreatedByClientTypeEnum,settings: BatchUpdateArgs) -> (int,Batches,str) """ pass def DeleteBatchById(self,batchId): """ DeleteBatchById(self: Outbound,batchId: str) """ pass def DeleteBatches(self,batchesToDelete): """ DeleteBatches(self: Outbound,batchesToDelete: Batches) -> bool """ pass def DeleteBatchIfNothingChanged(self,batchCacheKey): """ DeleteBatchIfNothingChanged(self: Outbound,batchCacheKey: CacheKey) """ pass def DisposeTransportPackagesWhenUnchanged(self,dfObject): """ DisposeTransportPackagesWhenUnchanged(self: Outbound,dfObject: DataFlowObject[CacheKey]) -> DataFlowObject[CacheKey] """ pass def FinalizeProcessBatchPicking(self,batch,manager,warehouseLocationCodeTo): """ FinalizeProcessBatchPicking(self: Outbound,batch: Batch,manager: BatchPickManager,warehouseLocationCodeTo: str) -> str """ pass def GetAllocationProfiles(self,profiles): """ GetAllocationProfiles(self: Outbound) -> (int,AllocationProfiles) """ pass def GetAllocationSettingsByProfile(self,id): """ GetAllocationSettingsByProfile(self: Outbound,id: int) -> AllocationSettings """ pass def GetBatchByCacheKey(self,cacheKey,batch): """ GetBatchByCacheKey(self: Outbound,cacheKey: CacheKey) -> (bool,Batch) """ pass def GetBatchById(self,id,cacheKey,batch): """ GetBatchById(self: Outbound,id: str) -> (bool,CacheKey,Batch) """ pass def GetBatchByScan(self,barcode,batch): """ GetBatchByScan(self: Outbound,barcode: str) -> (bool,BatchBase) """ pass def GetBatchesAll(self,batches): """ GetBatchesAll(self: Outbound) -> (int,Batches) """ pass def GetBatchesByFilter(self,args,batches): """ GetBatchesByFilter(self: Outbound,args: BatchFilterArgs) -> (int,Batches) """ pass def GetBatchesIncomplete(self,batches): """ GetBatchesIncomplete(self: Outbound) -> (int,Batches) """ pass def GetBatchesIncompleteByFilter(self,args,batches): """ GetBatchesIncompleteByFilter(self: Outbound,args: GetBatchArgs) -> (int,Batches) """ pass def GetBatchesIncompleteSmall(self,batches): """ GetBatchesIncompleteSmall(self: Outbound) -> (int,FindableList[BatchBase]) """ pass def GetBatchesWithPendingPackages(self,args,result): """ GetBatchesWithPendingPackages(self: Outbound,args: BatchFilterArgs) -> (int,BatchFilterResult) """ pass def GetBoxColors(self,colors): """ GetBoxColors(self: Outbound) -> Array[Color] """ pass def GetCacheKeyOfTransportPackages(self,dfObject,packagesKey): """ GetCacheKeyOfTransportPackages(self: Outbound,dfObject: DataFlowObject[GetItemsToPackArgs]) -> (DataFlowObject[GetItemsToPackArgs],CacheKey) """ pass def GetCustomers(self,args,customers): """ GetCustomers(self: Outbound,args: GetCustomersArgs) -> (int,Customers) """ pass def GetCustomersPending(self,customers): """ GetCustomersPending(self: Outbound) -> (int,Customers) """ pass def GetCustomersPendingByFilter(self,customers,args): """ GetCustomersPendingByFilter(self: Outbound,args: GetCustomersPendingArgs) -> (int,Customers) """ pass def GetCustomersWithPendingPackages(self,args,customers): """ GetCustomersWithPendingPackages(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) """ pass @staticmethod def GetDefaultAllocationSettings(): """ GetDefaultAllocationSettings() -> AllocationSettings """ pass @staticmethod def GetDefaultBatchSink(): """ GetDefaultBatchSink() -> BatchAllocationSink """ pass def GetDirectOrder(self,args): """ GetDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] """ pass def GetDirectOrderHistoryByFilter(self,filter,pagingParams): """ GetDirectOrderHistoryByFilter(self: Outbound,filter: HistoryDirectOrdersFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrder]] """ pass def GetDirectOrderLineDetailsByLinePk(self,linePk): """ GetDirectOrderLineDetailsByLinePk(self: Outbound,linePk: int) -> DataFlowObject[List[ItemIdentification]] """ pass def GetDirectOrderLineHistoryByFilter(self,filter,pagingParams): """ GetDirectOrderLineHistoryByFilter(self: Outbound,filter: HistoryDirectOrderLinesFilter,pagingParams: PagingParams) -> DataFlowObject[List[HistoryDirectOrderLine]] """ pass def GetDirectOrdersPending(self): """ GetDirectOrdersPending(self: Outbound) -> DataFlowObject[List[DirectOrder]] """ pass def GetDocumentsOfShipment(self,shipmentPk,documents): """ GetDocumentsOfShipment(self: Outbound,shipmentPk: int) -> (int,List[Attachment]) """ pass def GetHistoryOutboundOrderCustomers(self,args,customers): """ GetHistoryOutboundOrderCustomers(self: Outbound,args: GetHistoryOutboundOrderCustomersArgs) -> (int,Customers) """ pass def GetHistoryOutboundOrderItems(self,args,items): """ GetHistoryOutboundOrderItems(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,Items) """ pass def GetHistoryOutboundOrderLines(self,args,orderLines): """ GetHistoryOutboundOrderLines(self: Outbound,args: GetHistoryOutboundOrderItemArgs) -> (int,HistoryOutboundOrderLines) """ pass def GetHistoryOutboundOrders(self,args,outboundOrders): """ GetHistoryOutboundOrders(self: Outbound,args: GetHistoryOutboundOrdersArgs) -> (int,HistoryOutboundOrders) """ pass def GetHistoryPackageNumbers(self,filter,shipmentId,historyShipmentLines): """ GetHistoryPackageNumbers(self: Outbound,filter: OutboundOrdersFilter,shipmentId: int) -> (int,HistoryShipmentLines) """ pass def GetHistoryShipment(self,shipment,packages,shipperId): """ GetHistoryShipment(self: Outbound,shipment: HistoryShipment) -> (bool,TransportPackages,str) """ pass def GetHistoryShipmentItemIdentifications(self,outboundOrdersId,shipmentPackageId,itemIdentifications): """ GetHistoryShipmentItemIdentifications(self: Outbound,outboundOrdersId: int,shipmentPackageId: int) -> (int,ItemIdentifications) """ pass def GetHistoryShipmentLines(self,filter,paging,shipmentPk,historyShipmentLines): """ GetHistoryShipmentLines(self: Outbound,filter: OutboundOrdersFilter,paging: PagingParams,shipmentPk: int) -> (int,HistoryShipmentLines) """ pass def GetHistoryShipmentsAll(self,pagingParams,shipments): """ GetHistoryShipmentsAll(self: Outbound,pagingParams: PagingParams) -> (int,HistoryShipments) """ pass def GetHistoryShipmentsByFilter(self,filter,pagingParams,shipments): """ GetHistoryShipmentsByFilter(self: Outbound,filter: HistoryShipmentFilter,pagingParams: PagingParams) -> (int,HistoryShipments) """ pass def GetHistoryShipmentsById(self,shipmentId): """ GetHistoryShipmentsById(self: Outbound,shipmentId: int) -> HistoryShipment """ pass def GetHistoryTransportPackages(self,shipmentId,packages): """ GetHistoryTransportPackages(self: Outbound,shipmentId: int,packages: TransportPackages) -> TransportPackages """ pass def GetItemIdsFromItemToPack(self,cacheKey,itemCode,itemIds): """ GetItemIdsFromItemToPack(self: Outbound,cacheKey: CacheKey,itemCode: str) -> (bool,ItemIdentifications) """ pass def GetItemsToPack(self,args,itemsToPack,itemsPacked): """ GetItemsToPack(self: Outbound,args: GetItemsToPackArgs) -> (TransportItems,TransportPackages) """ pass def GetItemsToPickOnPickLocation(self,cacheKey,warehouseCode,warehouseLocationCode,items): """ GetItemsToPickOnPickLocation(self: Outbound,cacheKey: CacheKey,warehouseCode: str,warehouseLocationCode: str) -> (int,BatchPickLocations) """ pass def GetMobileShipperById(self,shipperId,shipper): """ GetMobileShipperById(self: Outbound,shipperId: str) -> (bool,MobileShipper) """ pass def GetOutboundOrderLinesBatchableByCustomers(self,customers,batchableOrderLines,nonBatchableOrderLines): """ GetOutboundOrderLinesBatchableByCustomers(self: Outbound,customers: Customers) -> (OutboundOrderLines,OutboundOrderLines) """ pass def GetOutboundOrderLinesBatchableByOrders(self,orderNumbers,batchableOrderlines,nonBatchableOrderlines): """ GetOutboundOrderLinesBatchableByOrders(self: Outbound,orderNumbers: List[str]) -> (OutboundOrderLines,OutboundOrderLines) """ pass def GetOutboundOrderLinesFromBatches(self): """ GetOutboundOrderLinesFromBatches(self: Outbound) -> IEnumerable[OutboundOrderLine] """ pass def GetOutboundOrders(self,args,orders): """ GetOutboundOrders(self: Outbound,args: GetOutboundOrdersArgs) -> IEnumerable[OutboundOrder] """ pass def GetOutboundOrdersBatchable(self,args,batchableOrders,nonBatchableOrders): """ GetOutboundOrdersBatchable(self: Outbound,args: GetOutboundOrdersBatchableArgs) -> (OutboundOrders,OutboundOrders) """ pass def GetPackages(self,key,packages): """ GetPackages(self: Outbound,key: CacheKey) -> (bool,TransportPackages) """ pass def GetPickLocationOfItem(self,cacheKey,warehouseCode,itemCode,itemLocations): """ GetPickLocationOfItem(self: Outbound,cacheKey: CacheKey,warehouseCode: str,itemCode: str) -> (int,ItemLocations) """ pass @staticmethod def GetPrintAllocationSettings(): """ GetPrintAllocationSettings() -> AllocationSettings """ pass def GetSalesOrder(self,args,salesOrder): """ GetSalesOrder(self: Outbound,args: SalesOrderArgs) -> (bool,SalesOrder) """ pass def GetSalesOrderCostLines(self,args,salesOrderCostLines): """ GetSalesOrderCostLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) """ pass def GetSalesOrderLines(self,args,salesOrderLines): """ GetSalesOrderLines(self: Outbound,args: SalesOrderLinesArgs) -> (int,SalesOrderLines) """ pass def GetSalesOrdersAll(self,salesOrders): """ GetSalesOrdersAll(self: Outbound) -> (int,SalesOrders) """ pass def GetSalesOrdersByFilter(self,filterBy,salesOrders): """ GetSalesOrdersByFilter(self: Outbound,filterBy: SalesOrderArgs) -> (int,SalesOrders) """ pass def GetShipmentServices(self,shipperId,packagesKey,services): """ GetShipmentServices(self: Outbound,shipperId: str,packagesKey: CacheKey) -> (int,FindableList[MobileService]) """ pass def GetShipperById(self,shipperId,shipper): """ GetShipperById(self: Outbound,shipperId: str) -> (bool,ShipperBase) """ pass def GetShippers(self,shippers): """ GetShippers(self: Outbound) -> (int,FindableList[IShipper]) """ pass def GetShipperServiceLevelsByShipperId(self,shipperId,services): """ GetShipperServiceLevelsByShipperId(self: Outbound,shipperId: str) -> (int,FindableList[MobileService]) """ pass def GetShipperSettingsTableById(self,shipperId): """ GetShipperSettingsTableById(self: Outbound,shipperId: str) -> SystemSettingsTable """ pass def HandleColliForStockRegistration(self,transportPackages): """ HandleColliForStockRegistration(self: Outbound,transportPackages: TransportPackages) -> ColliRegistrationResult """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: Outbound) -> object """ pass def InitOrderMatchesCustomerValidator(self): """ InitOrderMatchesCustomerValidator(self: Outbound) -> OrderMatchesCustomerValidator """ pass def LogAndCleanupShipment(self,shipment,packages,arg): """ LogAndCleanupShipment(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] """ pass def MarkPickLocationAsPicked(self,cacheKey,idOfBatchPickLocation): """ MarkPickLocationAsPicked(self: Outbound,cacheKey: CacheKey,idOfBatchPickLocation: str) -> BatchPickLocation """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def MoveTransportItemsBetweenTransportPackages(self,dfObject): """ MoveTransportItemsBetweenTransportPackages(self: Outbound,dfObject: DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs]) -> DataFlowObject[MoveTransportItemsBetweenTransportPackagesArgs] """ pass def OpenBatchesForPacking(self,args,customers): """ OpenBatchesForPacking(self: Outbound,args: GetCustomersWithPendingPackagesArgs) -> (int,PackCustomers) """ pass def OpenBatchForPickingById(self,id,cacheKey,batch): """ OpenBatchForPickingById(self: Outbound,id: str) -> (bool,CacheKey,Batch) """ pass def OpenTransferPackagesForShipping(self,key,packages): """ OpenTransferPackagesForShipping(self: Outbound,key: CacheKey) -> (bool,TransportPackages) """ pass def PickInBatch(self,dfObject): """ PickInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] """ pass def PickItemIdInBatch(self,dfObject): """ PickItemIdInBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] """ pass def PickItemIdRangeInBatch(self,dfObject): """ PickItemIdRangeInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdRangeArgs]) -> DataFlowObject[PickItemIdRangeArgs] """ pass def PickManualSelectedMultipleItemIdsInBatch(self,dfObject): """ PickManualSelectedMultipleItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] """ pass def PickMultipleScannedItemIdsInBatch(self,dfObject): """ PickMultipleScannedItemIdsInBatch(self: Outbound,dfObject: DataFlowObject[PickItemIdsArgs]) -> DataFlowObject[PickItemIdsArgs] """ pass def PrintDocumentsOfShipment(self,args): """ PrintDocumentsOfShipment(self: Outbound,args: PrintShipmentDocumentArgs) -> bool """ pass def PrintDuplicateLabels(self,args): """ PrintDuplicateLabels(self: Outbound,args: PrintDuplicateLabelArgs) -> bool """ pass def PrintPackageSlip(self,args): """ PrintPackageSlip(self: Outbound,args: PrintPackageSlipArgs) -> bool """ pass def PrintTransportPackageLabel(self,cacheKey,boxGuid,label): """ PrintTransportPackageLabel(self: Outbound,cacheKey: CacheKey,boxGuid: Guid,label: PrintLabel) -> bool """ pass def ProcessBatchPacking(self,dfObject): """ ProcessBatchPacking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPackingArgs]) -> DataFlowObject[ProcessBatchPackingArgs] """ pass def ProcessBatchPicking(self,dfObject): """ ProcessBatchPicking(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs]) -> DataFlowObject[ProcessBatchPickingArgs] """ pass def ProcessBatchPickingToErp(self,dfObject,batch,manager,getDestinationLocationForLineDelegate): """ ProcessBatchPickingToErp(self: Outbound,dfObject: DataFlowObject[ProcessBatchPickingArgs],batch: Batch,manager: BatchPickManager,getDestinationLocationForLineDelegate: OnGetDestinationLocationForLine) -> bool """ pass def ProcessDirectOrder(self,args): """ ProcessDirectOrder(self: Outbound,args: DirectOrderCrudArgs) -> DataFlowObject[DirectOrder] """ pass def ProcessSalesOrder(self,args,order): """ ProcessSalesOrder(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult """ pass def ProcessSalesOrderQueued(self,args,order): """ ProcessSalesOrderQueued(self: Outbound,args: ProcessSalesOrderLinesArgs,order: KeyValuePair[OutboundOrder,OutboundOrderLines]) -> ErpProcessSalesOrderLinesResult """ pass def ProcessShipment(self,arg): """ ProcessShipment(self: Outbound,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] """ pass def ProcessShipmentInfo(self,shipment,packages,arg): """ ProcessShipmentInfo(self: Outbound,shipment: ShipmentBase,packages: TransportPackages,arg: DataFlowObject[ProcessShipmentArgs]) -> DataFlowObject[ProcessShipmentArgs] """ pass def ProcessShipmentWithDefaultServiceLevel(self,cacheKey): """ ProcessShipmentWithDefaultServiceLevel(self: Outbound,cacheKey: CacheKey) """ pass def PutBackFromBatch(self,dfObject): """ PutBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] """ pass def PutItemIdBackFromBatch(self,dfObject): """ PutItemIdBackFromBatch(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] """ pass @staticmethod def RemoveBatch(batch): """ RemoveBatch(batch: Batch) """ pass def RemoveDirectOrder(self,args): """ RemoveDirectOrder(self: Outbound,args: DirectOrderCrudArgs) """ pass def RemoveDirectOrderLine(self,args): """ RemoveDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[bool] """ pass def RemoveDirectOrderLineItemIdentification(self,args): """ RemoveDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def RemovePackage(self,boxGuid,args,itemsToPack,itemsPacked): """ RemovePackage(self: Outbound,boxGuid: Guid,args: GetItemsToPackArgs) -> (bool,TransportItems,TransportPackages) """ pass def RemoveTransportPackages(self,packagesKey): """ RemoveTransportPackages(self: Outbound,packagesKey: CacheKey) """ pass def SaveBatch(self,batch): """ SaveBatch(self: Outbound,batch: Batch) -> Batch """ pass def SaveShipperSetting(self,shipperId,memberName,value): """ SaveShipperSetting(self: Outbound,shipperId: str,memberName: str,value: object) """ pass def ScanItemForPacking(self,args,result): """ ScanItemForPacking(self: Outbound,args: ItemPackScanArgs) -> (bool,ScanItemPackArgsResult) """ pass def SkipOrderForProcessingPack(self,batchId,orderNumber): """ SkipOrderForProcessingPack(self: Outbound,batchId: str,orderNumber: str) -> bool """ pass def UpdateBatchWithSettings(self,batchId,args): """ UpdateBatchWithSettings(self: Outbound,batchId: Guid,args: BatchUpdateArgs) """ pass def UpdateColloReference(self,dfObject): """ UpdateColloReference(self: Outbound,dfObject: DataFlowObject[PickArgs]) -> DataFlowObject[PickArgs] """ pass def UpdateDirectOrderLine(self,args): """ UpdateDirectOrderLine(self: Outbound,args: DirectOrderLineCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def UpdateDirectOrderLineItemIdentification(self,args): """ UpdateDirectOrderLineItemIdentification(self: Outbound,args: DirectOrderLineItemIdentificationCrudArgs) -> DataFlowObject[DirectOrderLine] """ pass def UpdatePackageData(self,args,newPackageData,packages): """ UpdatePackageData(self: Outbound,args: UpdateTransportPackageArgs,newPackageData: TransportPackage) -> (bool,TransportPackages) """ pass def UpdateReference(self,reference,cacheKey): """ UpdateReference(self: Outbound,reference: ColloReference,cacheKey: CacheKey) -> bool """ pass def UpdateTransportPackagesHeader(self,packagesKey,args,packages): """ UpdateTransportPackagesHeader(self: Outbound,packagesKey: CacheKey,args: UpdateTransportPackagesHeaderArgs) -> (bool,TransportPackages) """ pass def ValidateBatchedItem(self,cacheKey,selectedBatchPickLocation,itemCode): """ ValidateBatchedItem(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,itemCode: str) -> DataFlowObject[CacheKey] """ pass def ValidateBatchLocation(self,cacheKey,selectedBatchPickLocation,locationCode): """ ValidateBatchLocation(self: Outbound,cacheKey: CacheKey,selectedBatchPickLocation: BatchPickLocation,locationCode: str) -> DataFlowObject[CacheKey] """ pass def VoidShipment(self,shipment): """ VoidShipment(self: Outbound,shipment: DataFlowObject[HistoryShipment]) -> DataFlowObject[HistoryShipment] """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,stockManager,messaging): """ __new__(cls: type,stockManager: IStockManager,messaging: Messaging) """ pass Messaging=property(lambda self: object(),lambda self,v: None,lambda self: None) StockManager=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: StockManager(self: Outbound) -> IStockManager """ class Printing(MarshalByRefObject): """ Printing(general: General) """ def ZZZ(self): """hardcoded/mock instance of the class""" return Printing() instance=ZZZ() """hardcoded/returns an instance of the class""" def GetPickListsAll(self,pickLists): """ GetPickListsAll(self: Printing) -> (int,ReportItems) """ pass def GetPickListsForSettings(self,pickListNames): """ GetPickListsForSettings(self: Printing) -> (int,List[str]) """ pass def GetPickListsTable(self): """ GetPickListsTable(self: Printing) -> Hashtable """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: Printing) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def PrintPickBatchLabel(self,dfObject): """ PrintPickBatchLabel(self: Printing,dfObject: DataFlowObject[PrintPickbatchLabelArgs]) -> DataFlowObject[PrintPickbatchLabelArgs] """ pass def PrintPickList(self,args): """ PrintPickList(self: Printing,args: PrintPickingListArgs) -> bool """ pass def PrintSSCCLabels(self,dfObject): """ PrintSSCCLabels(self: Printing,dfObject: DataFlowObject[PrintSSCCLabelsArgs]) -> DataFlowObject[PrintSSCCLabelsArgs] """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,general): """ __new__(cls: type,general: General) """ pass class PyLogger(object): # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return PyLogger() instance=ZZZ() """hardcoded/returns an instance of the class""" @staticmethod def Debug(msg): """ Debug(msg: str) """ pass @staticmethod def Error(*__args): """ Error(msg: str)Error(ex: Exception)Error(ex: BaseException) """ pass @staticmethod def Fatal(*__args): """ Fatal(msg: str)Fatal(ex: Exception)Fatal(ex: BaseException) """ pass @staticmethod def Info(msg): """ Info(msg: str) """ pass @staticmethod def Trace(msg): """ Trace(msg: str) """ pass @staticmethod def Warn(*__args): """ Warn(msg: str)Warn(ex: Exception)Warn(ex: BaseException) """ pass __all__=[ 'Debug', 'Error', 'Fatal', 'Info', 'Trace', 'Warn', ] class RemotePublishing(MarshalByRefObject): """ RemotePublishing(appSettings: IApplicationSettings,general: General) """ def ZZZ(self): """hardcoded/mock instance of the class""" return RemotePublishing() instance=ZZZ() """hardcoded/returns an instance of the class""" def AddRemotePublisher(self,req): """ AddRemotePublisher(self: RemotePublishing,req: AddRemotePublisherArgs) -> Publisher """ pass def DeleteRemotePublisher(self,req): """ DeleteRemotePublisher(self: RemotePublishing,req: DeleteRemotePublisherArgs) """ pass def DownloadFileAsync(self,filePath): """ DownloadFileAsync(self: RemotePublishing,filePath: str) -> Task[Stream] """ pass def EditRemotePublisher(self,req): """ EditRemotePublisher(self: RemotePublishing,req: EditRemotePublisherArgs) -> Publisher """ pass def EnsureLicenseExists(self): """ EnsureLicenseExists(self: RemotePublishing) """ pass def GetRemotePublishers(self): """ GetRemotePublishers(self: RemotePublishing) -> Publishers """ pass def InitializeLifetimeService(self): """ InitializeLifetimeService(self: RemotePublishing) -> object """ pass def MemberwiseClone(self,*args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self,appSettings,general): """ __new__(cls: type,appSettings: IApplicationSettings,general: General) """ pass CurrentLicense=property(lambda self: object(),lambda self,v: None,lambda self: None) """Get: CurrentLicense(self: RemotePublishing) -> License """ # variables with complex values
1.789063
2
q4.py
cs-fullstack-2019-fall/codeassessment2-Deltonjr2
0
12793087
<filename>q4.py<gh_stars>0 # ### Problem 4 # Write a program that allows users to compare words by their length. Implement a function called chk_strings that accepts 2 strings entered by the user and compares them by length # The function should return true if the first string parameter has more characters (i.e. longer) than the second string passed into the function, otherwise, the function should return false. # DO NOT print the result in the function, print the result using the return value provided by the function. # Example Input/Output: # ``` # Enter the first string: BIRD # Enter the second string: COW # BIRD is longer than COW # ``` def chk_strings(hello, goodbye): userinput1=[h e l l o] 0,1 2,3, 4 userinput2=[g o o d bye] 0,1,2,4,5,6,7 userinput1=input("enter a string") userinput2=input("enter a string") return (userinput1,userinput2)
4.03125
4
502/modes.py
kaixiang1992/python-flask
0
12793088
from sqlalchemy import create_engine, Column, Integer, String, DATETIME from sqlalchemy.ext.declarative import declarative_base from datetime import datetime # TODO: db_uri # dialect+driver://username:password@host:port/database?charset=utf8 DB_URI = 'mysql+pymysql://root:[email protected]:3300/alembic_demo?charset=utf8' engine = create_engine(DB_URI) Base = declarative_base(bind=engine) # TODO: 定义User模型 class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(50), nullable=False) # TODO: 增加字段 age = Column(Integer, nullable=False) country = Column(String(50), nullable=False) create_time = Column(DATETIME, default=datetime.now)
2.953125
3
001_Snake Game/snake.py
Guilhermeasper/Tutoriais
0
12793089
<gh_stars>0 ''' A biblioteca pygame é importada, juntamente do modulo locals dela, além disso o metodo randrange que usaremos para gerar numeros aleatórios para as posições da cobra e maçã ''' import pygame import pygame.locals from random import randrange print("Módulos importados com sucesso") ''' Utilizando um bloco de tentativa e erro para checar se o pygame foi iniciado corretamente ''' try: pygame.init() print("O modulo pygame foi inicializado com sucesso") except: print("O modulo pygame não foi inicializado com sucesso") ''' Declaração das váriaveis globais que utilizaremos em todo o código, altura e largura da tela, tamanho da cobra e maçã, tamanho do placar e cores no formato RGB''' largura = 640 altura = 480 tamanho = 20 placar = 40 branco = (255, 255, 255) preto = (0, 0, 0) vermelho = (255, 0, 0) verde = (0, 200, 0) verde_escuro = (0, 150, 0) azul = (0, 0, 255) prata = (192, 192, 192) laranja = (255, 69, 0) cinza = (79, 79, 79) cinzaClaro = (220, 220, 220) ''' Definição de configurações do jogo, relógio para definir o fps, fundo para desenhar tudo do jogo e o título da janela do jogo ''' relogio = pygame.time.Clock() fundo = pygame.display.set_mode((largura, altura)) pygame.display.set_caption("Snake Game") ''' Classe texto servirá para criar objetos de textop que serão exibidos nas telas do jogo, recebe a mensagem a cor e o tamanho como parâmetros ''' class Texto: def __init__(self, msg, cor, tam): self.font = pygame.font.SysFont(None, tam) self.texto = self.font.render(msg, True, cor) ''' Método mostrar desenha na tela o texto criado pelo construtor da classe ''' def mostrar(self, x, y): fundo.blit(self.texto, [x, y]) ''' Classe cobra definirá os elementos do objeto cobra, como cabeça, comprimento e direção, bem como o array que contém a posição de cada pedaço da cobra, recebe as coordenadas x e y como parâmetro, que será o local na tela onde ela começará o jogo ''' class Cobra: def __init__(self, x, y): self.x = x self.y = y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] self.direcao = "" ''' Método move, recebe os parâmetro x e y, que serão as novas coordenadas da cabeça e insere a nova cabeça no array das posições ''' def move(self, x, y): self.cabeca = [x, y] self.cobra.append([x, y]) ''' Método cresce, aumenta o comprimento da cobra ''' def cresce(self): self.comp += 1 ''' Método mostrar, desenha cada pedaço da cobra na tela ''' def mostrar(self): indice = 0 for XY in self.cobra: if indice == len(self.cobra) - 1: pygame.draw.rect(fundo, verde_escuro, [XY[0], XY[1], tamanho, tamanho]) else: pygame.draw.rect(fundo, verde, [XY[0], XY[1], tamanho, tamanho]) indice += 1 ''' Método rastro, remove a cauda quando o tamanho do array é maior que o comprimento da cobra ''' def rastro(self): if len(self.cobra) > self.comp: del self.cobra[0] ''' Método morreu, verifica se a cobra comeu ela mesma, se sim retorna verdadeiro, caso contrário, retorna falso ''' def morreu(self): if any(Bloco == self.cabeca for Bloco in self.cobra[:-1]): return True return False ''' Método reinicia, redefine todos os valores da cobra para os valores iniciais, para caso depois de ter perdido o jogados possa continuar jogando ''' def reinicia(self, x, y): self.x = x self.y = y self.cabeca = [x, y] self.comp = 1 self.cobra = [self.cabeca] ''' Classe maçã que definirá o objeto maçã, não recebe nenhum parâmetro, possui os atributos x e y que é a posição da maçã na tela ''' class Maca: def __init__(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Método mostrar, desenha a maçã na tela ''' def mostrar(self): pygame.draw.rect(fundo, vermelho, [self.x, self.y, tamanho, tamanho]) ''' Método reposicionar, define novos x e y aleatórios para a maçã após ser comida pela cobra ''' def reposicionar(self): self.x = randrange(0, largura - tamanho, 20) self.y = randrange(0, altura - tamanho - placar, 20) ''' Classe Jogo, definirá todo o restante do jogo, como variaveis de controle para continuar jogando, perder, posição e velocidade da cobra, pontos, bem como são criados os objetos maçã e cobra, não recebe parâmetros ''' # noinspection DuplicatedCode class Jogo: def __init__(self): self.jogando = False self.perdeu = False self.noMenu = True self.modo = None self.fundo = preto self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 self.cobra = Cobra(self.pos_x, self.pos_y) self.maca = Maca() ''' Método iniciar, possui o loop principal do jogo, que faz absolutamente tudo que acontece no jogo ''' def iniciar(self): pontos_fundo = 0 while self.jogando: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo de execução estão podem ser obtidos pelo "pygame.event.get()", sendo assim verificado se o jogo não foi fechado, bem como se nenhuma das setas foi apertada para mover a cobra ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False break if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT and self.cobra.direcao != "direita": self.cobra.direcao = "esquerda" if event.key == pygame.K_RIGHT and self.cobra.direcao != "esquerda": self.cobra.direcao = "direita" if event.key == pygame.K_UP and self.cobra.direcao != "baixo": self.cobra.direcao = "cima" if event.key == pygame.K_DOWN and self.cobra.direcao != "cima": self.cobra.direcao = "baixo" if event.key == pygame.K_SPACE: self.pontos += 1 pontos_fundo += 1 self.cobra.cresce() ''' Checa se o jogador ainda não perdeu o jogo ''' if self.jogando: ''' Descomente e descubra o que isso faz''' # if pontos_fundo == 10: # pontos_fundo = 0 # if self.fundo == branco: # self.fundo = preto # else: # self.fundo = branco ''' Limpa a tela a cada novo inicio de loop ''' fundo.fill(self.fundo) ''' Checa para qual direção a cobra está seguindo e redefine a nova posição naquela direção ''' if self.cobra.direcao == "cima": self.pos_y -= tamanho elif self.cobra.direcao == "baixo": self.pos_y += tamanho elif self.cobra.direcao == "esquerda": self.pos_x -= tamanho elif self.cobra.direcao == "direita": self.pos_x += tamanho else: pass ''' Checa se a cobra e a maçã estão na mesma posição, caso estejam, a maçã é reposicionada, a cobra aumenta e o placar de pontos aumenta ''' if self.pos_x == self.maca.x and self.pos_y == self.maca.y: self.maca.reposicionar() self.cobra.cresce() self.pontos += 1 pontos_fundo += 1 ''' Aqui primeiro é feita a checagem do modo, caso o modo escolhido no menu tenha sido o modo livre, o jogo não possuirá bordas e você poderá atravessar o mapa, mas caso tenha escolhido o modo clássico é checado se a cobra ultrapassou alguma das bordas, caso tenha ultrapassado é definido que não se está mais jogando porque perdeu e é chamado o método "perdido" ''' if self.modo == "livre": if self.pos_x + tamanho > largura: self.pos_x = 0 if self.pos_x < 0: self.pos_x = largura - tamanho if self.pos_y + tamanho > altura - placar: self.pos_y = 0 if self.pos_y < 0: self.pos_y = altura - tamanho - placar else: pygame.draw.rect(fundo, branco, [0, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, 0, largura, 2]) pygame.draw.rect(fundo, branco, [largura - 2, 0, 2, altura]) pygame.draw.rect(fundo, branco, [0, altura - placar - 2, largura, 2]) if self.pos_x + tamanho > largura: self.jogando = False self.perdeu = True self.perdido() if self.pos_x < 0: self.jogando = False self.perdeu = True self.perdido() if self.pos_y + tamanho > altura - placar: self.jogando = False self.perdeu = True self.perdido() if self.pos_y < 0: self.jogando = False self.perdeu = True self.perdido() ''' Move a cobra para a nova posição que é definida como parâmetro do método ''' self.cobra.move(self.pos_x, self.pos_y) ''' Limpa o rastro deixado pelo blocos adicionais ''' self.cobra.rastro() ''' Checa se a cobra comeu ela mesma, caso tenha comido o jogo é definido perdido, e o método "perdido" é chamado ''' if self.cobra.morreu(): self.jogando = False self.perdeu = True self.perdido() ''' Desenha a cobra na tela ''' self.cobra.mostrar() ''' Desenha o placar e o texto contendo a pontuação atual ''' pygame.draw.rect(fundo, branco, [0, altura - placar, largura, placar]) textoPlacarSombra = Texto("Pontuação:" + str(self.pontos), cinza, 25) textoPlacarSombra.mostrar(9, altura - 31) textoPlacar = Texto("Pontuação:" + str(self.pontos), branco, 25) textoPlacar.mostrar(10, altura - 30) ''' Desenha a maçã na tela ''' self.maca.mostrar() ''' Atualiza toda a tela com todos os elementos que foram desenhados anteriormente ''' pygame.display.update() ''' Define o fps do jogo ''' relogio.tick(15) ''' Método perdido, possui o loop da tela de derrota, faz tudo que acontece ao perder, podendo o jogador voltar a jogar ou sair do jogo ''' def perdido(self): while self.perdeu: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo de execução estão podem ser obtidos pelo "pygame.event.get()", é verificado se o jogador quis sair do jogo ou quer voltar a jogar, caso queira voltar, todo o jogo é redefinido e se retorna para o método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.jogando = False self.perdeu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.jogando = False self.perdeu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 369 and 168 < mouse_y < 168 + 51: self.jogando = False self.perdeu = False self.noMenu = True self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = "" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 if 193 < mouse_x < 193 + 279 and 268 < mouse_y < 268 + 58: self.jogando = True self.perdeu = False self.pos_x = randrange(0, largura - tamanho, 20) self.pos_y = randrange(0, altura - tamanho - placar, 20) self.cobra.direcao = "" self.maca.reposicionar() self.cobra.reinicia(self.pos_x, self.pos_y) self.velocidade_x = 0 self.velocidade_y = 0 self.pontos = 0 ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha "<NAME>" na tela ''' textoPerdeuSombra = Texto("<NAME>", cinza, 80) textoPerdeuSombra.mostrar(159, 29) textoPerdeu = Texto("<NAME>", vermelho, 80) textoPerdeu.mostrar(160, 30) ''' Desenha a pontuação final do jogador ''' textoPontuacaoSombra = Texto("Pontuação Final: " + str(self.pontos), cinza, 50) textoPontuacaoSombra.mostrar(179, 99) textoPontuacao = Texto("Pontuação Final: " + str(self.pontos), prata, 50) textoPontuacao.mostrar(180, 100) ''' Desenha o botão de voltar ao menu de seleção ''' pygame.draw.rect(fundo, prata, [143, 168, 369, 51]) pygame.draw.rect(fundo, preto, [145, 170, 365, 47]) textoContinuar = Texto("Voltar ao Menu", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [193, 268, 279, 58]) pygame.draw.rect(fundo, preto, [195, 270, 275, 54]) textoContinuar = Texto("Novo Jogo", branco, 70) textoContinuar.mostrar(210, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() def menu(self): while self.noMenu: ''' Iterador de eventos, todos os eventos que acontecem durante o tempo de execução estão podem ser obtidos pelo "pygame.event.get()", é verificado se o jogador quis sair do jogo ou quer voltar a jogar, caso queira voltar, todo o jogo é redefinido e se retorna para o método iniciar ''' for event in pygame.event.get(): if event.type == pygame.QUIT: self.noMenu = False if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.noMenu = False if event.type == pygame.MOUSEBUTTONDOWN: mouse_pos = pygame.mouse.get_pos() mouse_x = mouse_pos[0] mouse_y = mouse_pos[1] if 143 < mouse_x < 143 + 359 and 168 < mouse_y < 168 + 51: self.jogando = True self.perdeu = False self.noMenu = False self.modo = "classico" self.iniciar() if 183 < mouse_x < 183 + 279 and 268 < mouse_y < 268 + 51: self.jogando = True self.noMenu = False self.perdeu = False self.modo = "livre" self.iniciar() ''' Limpa a tela ''' fundo.fill(branco) ''' Desenha o titulo "Snake Game" na tela ''' # textoPerdeuSombra = Texto("Snake Game", cinza, 100) # textoPerdeuSombra.mostrar(108, 28) textoPerdeu = Texto("Snake Game", preto, 100) textoPerdeu.mostrar(110, 30) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [143, 168, 359, 51]) pygame.draw.rect(fundo, preto, [145, 170, 355, 47]) textoContinuar = Texto("Modo Clássico", branco, 70) textoContinuar.mostrar(150, 173) ''' Desenha o botão de continuar jogando ''' pygame.draw.rect(fundo, prata, [183, 268, 279, 51]) pygame.draw.rect(fundo, preto, [185, 270, 275, 47]) textoContinuar = Texto("Modo Livre", branco, 70) textoContinuar.mostrar(190, 273) ''' Atualiza a tela com todos os elementos ''' pygame.display.update() ''' Instancia do jogo ''' if __name__ == '__main__': instancia = Jogo() instancia.menu() # Iniciando o jogo através da instância ''' Fecha a janela principal do jogo ''' pygame.quit()
3.0625
3
main.py
mangoprojectfactory/Bank-System
0
12793090
<filename>main.py class bank: def __init__(self): self.accountDict = {} def inputCheck(self, email, password): if email not in self.accountDict: return 'EmailNotFound' if self.accountDict[email]['password'] != password: return 'PasswrodError' return None def createAccount(self, email, password, name): if email in self.accountDict: return 'EmailUsed' if not name: return 'NameNull' self.accountDict[email] = {'name': name, 'password': password, 'deposit': 0} return 'CreateSuccessful' def deleteAccount(self, email, password): check = Bank.inputCheck(email, password) if check: return check confirm = input(f'是否確定刪除{self.accountDict[email]["name"]}( True/False ): ') if confirm == 'True': del self.accountDict[email] return 'ActionSuccessful' if confirm == 'False': return 'ActionCancel' return 'UnknowAction' def saveMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) self.accountDict[email]['deposit'] += amount return 'SaveSuccessful' return 'AmountEnterError' def takeMoney(self, email, password, amount): check = Bank.inputCheck(email, password) if check: return check if amount.isdigit(): amount = int(amount) if self.accountDict[email]['deposit'] < amount: return 'DepositNotEnough' self.accountDict[email]['deposit'] -= amount return 'TakeSuccessful' return 'AmountEnterError' def showDeposit(self, email, password): check = Bank.inputCheck(email, password) if check: return check return self.accountDict[email]['deposit'] Bank = bank() def output(string:str): stringDict = { # Null 'EmailNull': '[Error] 請輸入E-mail', 'PasswordNull': '[Error] 請輸入密碼', 'NameNull': '[Error] 請輸入帳戶名稱', 'AmountNull': '[Error] 請輸入金額', # Account Error 'EmailNotFound': '[Error] 資料庫內無此E-mail', 'EmailUsed': '[Error] E-mail已使用', 'PasswrodError': '[Error] 密碼輸入錯誤', # Action 'ActionSuccessful': '操作成功', 'ActionCancel': '[Error] 操作取消', 'UnknowAction': '[Error] 未知的操作', 'SaveSuccessful': '成功存入銀行帳戶', 'TakeSuccessful': '取款成功', # Amount 'AmountEnterError': '[Error] 金額輸入錯誤', # Deposit 'DepositNotEnough': '[Error] 存款不足', # Account 'CreateSuccessful': '帳戶創建成功' } return stringDict[string] def line(): print('='*100) line() print('本系統由芒果凍布丁製作', '有問題請至Discord找本作者: YT Mango#4092', 'Github: https://github.com/EvanHsieh0415', '本著作使用創用CC授權 CC BY-NC-ND 3.0 TW', sep='\n') line() print('歡迎來到芒果銀行!!!', '若您是新用戶,請先創建帳戶 輸入"create account")', '刪除帳戶 請輸入"delete account"', '存款 請輸入"save money"', '取款 請輸入"take money"', '離開此系統 請輸入"leave"', sep='\n') line() tragger = True while tragger: action = input('輸入您要進行的操作: ') if not action: print('[Error] 請輸入操作名稱') line() continue if action not in ('create account', 'delete account', 'save money', 'take money', 'leave'): print('[Error] 請輸入正確的操作') line() continue if action == 'leave': print('程式關閉') tragger = False if action in ('create account', 'delete account', 'save money', 'take money'): email = input('請輸入E-mail: ') if not email: print(output('EmailNull')) line() continue password = input('請輸入密碼: ') if not password: print(output('PasswordNull')) line() continue if action in ('save money', 'take money'): amount = input('請輸入金額: ') if not amount: print(output('AmountNull')) line() continue if action == 'create account': name = input('請輸入使用者名稱: ') out = Bank.createAccount(email, password, name) elif action == 'delete account': out = Bank.deleteAccount(email, password) elif action == 'save money': out = Bank.saveMoney(email, password, amount) elif action == 'take money': out = Bank.takeMoney(email, password, amount) print(output(out)) if out in ('SaveSuccessful', 'TakeSuccessful', 'DepositNotEnough'): print(f'存款餘額: {Bank.showDeposit(email, password)}') line() input('Press any key to continue . . .')
3.734375
4
python_modules/dagster/dagster/core/definitions/mode.py
atsuhiro/dagster
0
12793091
<reponame>atsuhiro/dagster<filename>python_modules/dagster/dagster/core/definitions/mode.py from dagster import check from dagster.loggers import default_loggers from .logger import LoggerDefinition from .resource import ResourceDefinition DEFAULT_MODE_NAME = 'default' class ModeDefinition: '''Defines a "mode" in which a pipeline can operate. A mode provides a set of resource implementations as well as configuration for logging. Args: name (Optional[str]): The name of the mode, defaults to 'default'. resources (Optional[List[ResourceDefinition]]): The set of resources for this mode. loggers (Optional[List[LoggerDefinition]]): The set of loggers to use in this mode. system_storage_defs (Optional[List[SystemStorageDefinition]]): The set of system storage options available when executing in this mode. Defaults to 'in_memory' and 'filesystem'. description (Optional[str]) ''' def __init__( self, name=DEFAULT_MODE_NAME, resources=None, loggers=None, system_storage_defs=None, description=None, ): from .system_storage import SystemStorageDefinition, mem_system_storage, fs_system_storage self.name = check.str_param(name, 'name') self.resource_defs = check.opt_dict_param( resources, 'resources', key_type=str, value_type=ResourceDefinition ) self.loggers = ( check.opt_dict_param(loggers, 'loggers', key_type=str, value_type=LoggerDefinition) or default_loggers() ) self.system_storage_defs = check.list_param( system_storage_defs if system_storage_defs else [mem_system_storage, fs_system_storage], 'system_storage_def', of_type=SystemStorageDefinition, ) self.description = check.opt_str_param(description, 'description') def get_system_storage_def(self, name): check.str_param(name, 'name') for system_storage_def in self.system_storage_defs: if system_storage_def.name == name: return system_storage_def check.failed('{} storage definition not found'.format(name))
2.375
2
56-Page-Scraper/main.py
PawelZabinski/ocr-code-challenges-files
0
12793092
import requests import json import ehp # Page Scraper # Have the programme connect to a site and pulls out all the links, or images, and save them to a list. class PageScraper: def __init__(self, url): self.url = url self.parser = ehp.Html() self.dom = self.__dom() def __dom(self): req = requests.get(self.url) html = req.text dom = self.parser.feed(html) return dom def links(self): for link in self.dom.find('a'): yield link.attr['href'] def images(self): for image in self.dom.find('img'): yield image.attr['src'] def main(): url = 'https://' + input('Enter a URL: https://') pageScraper = PageScraper(url) links = [i for i in pageScraper.links()] images = [i for i in pageScraper.images()] with open('links.json', 'r') as f: encoded = f.read() decoded = json.loads(encoded) if len(encoded) else [] for i in decoded: if i['site'] == url: return decoded.append({ 'site': url, 'links': links, 'images': images }) with open('links.json', 'w') as f: encoded = json.dumps(decoded, indent=2) f.write(encoded) if __name__ == '__main__': main()
3.375
3
fnss/adapters/__init__.py
brucespang/fnss
114
12793093
"""Tools for exporting and importing FNSS data structures (topologies, event schedules and traffic matrices) to/from other simulators or emulators """ from fnss.adapters.autonetkit import * from fnss.adapters.mn import * from fnss.adapters.ns2 import * from fnss.adapters.omnetpp import * from fnss.adapters.jfed import *
1.054688
1
examples/asyncio/basic.py
bowlofeggs/sqlalchemy
0
12793094
"""Illustrates the asyncio engine / connection interface. In this example, we have an async engine created by :func:`_engine.create_async_engine`. We then use it using await within a coroutine. """ import asyncio from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.ext.asyncio import create_async_engine meta = MetaData() t1 = Table( "t1", meta, Column("id", Integer, primary_key=True), Column("name", String) ) async def async_main(): # engine is an instance of AsyncEngine engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) # conn is an instance of AsyncConnection async with engine.begin() as conn: # to support SQLAlchemy DDL methods as well as legacy functions, the # AsyncConnection.run_sync() awaitable method will pass a "sync" # version of the AsyncConnection object to any synchronous method, # where synchronous IO calls will be transparently translated for # await. await conn.run_sync(meta.drop_all) await conn.run_sync(meta.create_all) # for normal statement execution, a traditional "await execute()" # pattern is used. await conn.execute( t1.insert(), [{"name": "some name 1"}, {"name": "some name 2"}] ) async with engine.connect() as conn: # the default result object is the # sqlalchemy.engine.Result object result = await conn.execute(t1.select()) # the results are buffered so no await call is necessary # for this case. print(result.fetchall()) # for a streaming result that buffers only segments of the # result at time, the AsyncConnection.stream() method is used. # this returns a sqlalchemy.ext.asyncio.AsyncResult object. async_result = await conn.stream(t1.select()) # this object supports async iteration and awaitable # versions of methods like .all(), fetchmany(), etc. async for row in async_result: print(row) asyncio.run(async_main())
3.671875
4
scripts/generate_pkgbuild.py
zivid/arch-linux-pkgbuild-generator
1
12793095
<reponame>zivid/arch-linux-pkgbuild-generator<filename>scripts/generate_pkgbuild.py from pathlib import Path import hashlib import argparse import requests def _sha256sum(url): request = requests.get(url, stream=True) sha256 = hashlib.sha256() print(f"Calculating sha265sum of {url}", end="", flush=True) for chunk in request.iter_content(1000000): print(".", end="", flush=True) sha256.update(chunk) print("done", flush=True) return sha256.hexdigest() def _ubuntu_package_name_to_arch(name): return name if name.startswith("zivid") else f"zivid-{name}" class Pkgbuild: company_url = "https://www.zivid.com" description = "Defining the Future of 3D Machine Vision" dependencies = { "zivid-telicam-driver": (), "zivid": ("zivid-telicam-driver", "opencl-driver"), "zivid-studio": ("zivid",), "zivid-tools": ("zivid",), "zivid-genicam": ("zivid",), } replaces = { "zivid-telicam-driver": ("zivid-telicam-sdk",), "zivid": (), "zivid-studio": (), "zivid-tools": (), "zivid-genicam": (), } def __init__(self, base_dir: Path, template_file: Path): self.base_dir = base_dir with template_file.open() as in_file: self.template = in_file.read() def source_url(self, release_version, package_name, package_version): return f"{self.company_url}/hubfs/softwarefiles/releases/{release_version}/u18/{package_name}_{package_version}_amd64.deb" def configure(self, release_version, ubuntu_package_name, package_version): source_url = self.source_url( release_version, ubuntu_package_name, package_version ) package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) return self.template.format( name=package_name, pkgver=package_version.replace("-", "_"), description=self.description, url=self.company_url, dependencies=" ".join(self.dependencies[package_name]), conflicts=" ".join(self.replaces[package_name]), provides=" ".join(self.replaces[package_name]), source=source_url, sha256sum=_sha256sum(source_url), ) def write(self, release_version, ubuntu_package_name, package_version): package_name = _ubuntu_package_name_to_arch(ubuntu_package_name) out_dir = self.base_dir / package_name out_dir.mkdir(parents=True) out_file_name = out_dir / "PKGBUILD" print(f"Writing {out_file_name}") with out_file_name.open("w") as out_file: out_file.write( self.configure(release_version, ubuntu_package_name, package_version) ) def main(): parser = argparse.ArgumentParser() parser.add_argument("--out-dir") parser.add_argument("--template") parser.add_argument("--release-version") parser.add_argument("--package") parser.add_argument("--package-version") options = parser.parse_args() pkgbuild = Pkgbuild(Path(options.out_dir), Path(options.template)) pkgbuild.write(options.release_version, options.package, options.package_version) print("Done") if __name__ == "__main__": main()
2.390625
2
definition.py
SensorDX/rainqc
1
12793096
# Root dir import os ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) # Weather variable vocublary. This should match to the variable name used in the tahmoapi. RAIN = "precipitation" TEMP = "temperature" REL = "humidity" WINDR= "winddirection" SRAD = "radiation"
2.21875
2
molo/core/migrations/0018_remove_wagtail_forms.py
Ishma59/molo
25
12793097
# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2019-07-23 19:11 from __future__ import unicode_literals from django.db import migrations import molo.core.blocks import molo.core.models import wagtail.core.blocks import wagtail.core.fields import wagtail.images.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailforms', '0003_capitalizeverbose'), ('wagtailcore', '0040_page_draft_title'), ('wagtailredirects', '0006_redirect_increase_max_length'), ('core', '0017_add_google_search_console'), ] operations = [ migrations.RemoveField( model_name='formfield', name='page', ), migrations.RemoveField( model_name='formpage', name='language', ), migrations.RemoveField( model_name='formpage', name='page_ptr', ), migrations.RemoveField( model_name='formpage', name='translated_pages', ), migrations.AlterField( model_name='articlepage', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', molo.core.blocks.MarkDownBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('numbered_list', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Item'))), ('page', wagtail.core.blocks.PageChooserBlock()), ('media', molo.core.models.MoloMediaBlock(icon='media')), ('richtext', wagtail.core.blocks.RichTextBlock()), ('html', wagtail.core.blocks.RawHTMLBlock())], blank=True, null=True), ), migrations.DeleteModel( name='FormField', ), migrations.DeleteModel( name='FormIndexPage', ), migrations.DeleteModel( name='FormPage', ), ]
1.585938
2
apis_server/test/test_models_controller.py
ilyde-platform/ilyde-apis
0
12793098
# coding: utf-8 # # Copyright (c) 2020-2021 Hopenly srl. # # This file is part of Ilyde. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import unittest from flask import json from six import BytesIO from apis_server.test import BaseTestCase class TestModelsController(BaseTestCase): """ModelsController integration test stubs""" def test_create_model(self): """Test case for create_model Create a model """ model_serializer = {} headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models', method='POST', headers=headers, data=json.dumps(model_serializer), content_type='application/json') self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_create_model_version(self): """Test case for create_model_version Create a model version """ model_version_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/create', method='POST', headers=headers, data=json.dumps(model_version_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model(self): """Test case for delete_model Delete a model """ query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_delete_model_version(self): """Test case for delete_model_version delete a model version """ query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/delete', method='DELETE', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_get_model_version(self): """Test case for get_model_version get a model version """ query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_list_model_versions(self): """Test case for list_model_versions list versions of a model """ query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/list', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_retrieve_model(self): """Test case for retrieve_model Retrieve a model """ query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/get', method='GET', headers=headers, query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_transition_model_version_stage(self): """Test case for transition_model_version_stage Transition model version stage """ inline_object7_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/transition-stage', method='PATCH', headers=headers, data=json.dumps(inline_object7_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model(self): """Test case for update_model Update a model """ inline_object5_serializer = {} query_string = [('name', 'name_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/models/update', method='PATCH', headers=headers, data=json.dumps(inline_object5_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_update_model_version(self): """Test case for update_model_version update a model version """ inline_object6_serializer = {} query_string = [('name', 'name_example'), ('version', 'version_example')] headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': 'Bearer special-key', } response = self.client.open( '/api/v1/model-versions/update', method='PATCH', headers=headers, data=json.dumps(inline_object6_serializer), content_type='application/json', query_string=query_string) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) if __name__ == '__main__': unittest.main()
2.21875
2
oarepo_fsm/errors.py
oarepo/oarepo-fsm
0
12793099
<filename>oarepo_fsm/errors.py<gh_stars>0 # # Copyright (C) 2020 CESNET. # # oarepo-fsm is free software; you can redistribute it and/or modify it under # the terms of the MIT License; see LICENSE file for more details. """OArepo FSM library for record state transitions.""" import json from flask_login import current_user from invenio_rest.errors import RESTException class FSMException(RESTException): """Base Exception for OArepo FSM module, inherit, don't raise.""" code = 400 @property def name(self): """The status name.""" return type(self).__name__ def get_body(self, environ=None): """Get the request body.""" body = dict( status=self.code, message=self.get_description(environ), error_module="OArepo-FSM", error_class=self.name, ) errors = self.get_errors() if self.errors: body["errors"] = errors if self.code and (self.code >= 500) and hasattr(g, "sentry_event_id"): body["error_id"] = str(g.sentry_event_id) return json.dumps(body) class MissingRequiredParameterError(FSMException): """Exception raised when required parameter is missing.""" class DirectStateModificationError(FSMException): """Raised when a direct modification of record state is attempted.""" code = 403 def __init__(self, **kwargs): """Initialize exception.""" self.description = ( "Direct modification of state is not allowed." ) super().__init__(**kwargs) class TransitionNotAvailableError(FSMException): """Raised when the requested transition is not available to current user.""" code = 404 def __init__(self, transition=None, **kwargs): """Initialize exception.""" self.description = ( "Transition {} is not available on this record".format(transition) ) super().__init__(**kwargs) class InvalidSourceStateError(FSMException): """Raised when source state of the record is invalid for transition.""" def __init__(self, source=None, target=None, **kwargs): """Initialize exception.""" self.description = ( "Transition from {} to {} is not allowed".format(source, target) ) super().__init__(**kwargs) class RecordNotStatefulError(FSMException): """Raised when record does not inherit FSMMixin.""" def __init__(self, record_cls=None, **kwargs): """Initialize exception.""" self.description = ( "{} must be a subclass of oarepo_fsm.mixins.FSMMixin".format(record_cls) ) super().__init__(**kwargs) class InvalidPermissionError(FSMException): """Raised when permissions are not satisfied for transition.""" code = 403 def __init__(self, permissions=None, **kwargs): """Initialize exception.""" self.description = ( "This transition is not permitted " "for your user {}. Required: '{}'".format(current_user, permissions) ) super().__init__(**kwargs)
1.90625
2
flash/build.py
mikeal/windmill
2
12793100
<gh_stars>1-10 #!/usr/bin/env python import optparse import os import re import shutil # Location of compiler MXMLC_PATH = 'mxmlc -debug -verbose-stacktraces -incremental=true -compiler.strict -compiler.show-actionscript-warnings' # For replacing .as with .swf as_re = re.compile('\.as$|\.mxml$') def windmill(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/Windmill.as -o ./org/windmill/Windmill.swf' os.system(cmd) def bootstrap(): cmd = MXMLC_PATH + ' -source-path=. ./org/windmill/WMBootstrap.as -o ./org/windmill/WMBootstrap.swf' os.system(cmd) def clean(): for root, dirs, file_list in os.walk('./'): for file in file_list: if file.endswith('.swf') or file.endswith('.swc'): path = root + '/' + file cmd = 'rm ' + path #print cmd os.system(cmd) def parse_opts(): parser = optparse.OptionParser() parser.add_option('-t', '--target', dest='target', help='build TARGET (windmill/bootstrap/all/clean, default is all)', metavar='TARGET', choices=('windmill', 'bootstrap', 'all', 'clean'), default='all') opts, args = parser.parse_args() return opts, args def main(o, a): target = o.target # Build only the AS tests into loadable swfs if target == 'windmill': windmill() # Build only the test app we use to run the tests against elif target == 'bootstrap': bootstrap() # Build everything, natch elif target == 'all': windmill() bootstrap() # Clean out any swfs in the directory elif target == 'clean': clean() else: print 'Not a valid target.' if __name__ == "__main__": main(*parse_opts())
2.46875
2
Mathematics/106bombyx/usage.py
667MARTIN/Epitech
40
12793101
<filename>Mathematics/106bombyx/usage.py #!/usr/bin/python # -*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by <NAME> ## Login <<EMAIL>> ## ## Started on Sun Dec 7 16:31:56 2014 <NAME> ## Last update Sun Feb 22 18:32:16 2015 <NAME> ## print "Usage: ./106bombyx k(integer >= 1 and <= 4)."
1.9375
2
bomradarloop/__init__.py
kuchel77/bomradarloop
0
12793102
import datetime as dt import io import logging import os import time import PIL.Image import requests RADARS = { 'Adelaide': {'id': '643', 'delta': 360, 'frames': 6}, 'Albany': {'id': '313', 'delta': 600, 'frames': 4}, 'AliceSprings': {'id': '253', 'delta': 600, 'frames': 4}, 'Bairnsdale': {'id': '683', 'delta': 600, 'frames': 4}, 'Bowen': {'id': '243', 'delta': 600, 'frames': 4}, 'Brisbane': {'id': '663', 'delta': 360, 'frames': 6}, 'Broome': {'id': '173', 'delta': 600, 'frames': 4}, 'Cairns': {'id': '193', 'delta': 360, 'frames': 6}, 'Canberra': {'id': '403', 'delta': 360, 'frames': 6}, 'Carnarvon': {'id': '053', 'delta': 600, 'frames': 4}, 'Ceduna': {'id': '333', 'delta': 600, 'frames': 4}, 'Dampier': {'id': '153', 'delta': 600, 'frames': 4}, 'Darwin': {'id': '633', 'delta': 360, 'frames': 6}, 'Emerald': {'id': '723', 'delta': 600, 'frames': 4}, 'Esperance': {'id': '323', 'delta': 600, 'frames': 4}, 'Geraldton': {'id': '063', 'delta': 600, 'frames': 4}, 'Giles': {'id': '443', 'delta': 600, 'frames': 4}, 'Gladstone': {'id': '233', 'delta': 600, 'frames': 4}, 'Gove': {'id': '093', 'delta': 600, 'frames': 4}, 'Grafton': {'id': '283', 'delta': 600, 'frames': 4}, 'Gympie': {'id': '083', 'delta': 360, 'frames': 6}, 'HallsCreek': {'id': '393', 'delta': 600, 'frames': 4}, 'Hobart': {'id': '763', 'delta': 360, 'frames': 6}, 'Kalgoorlie': {'id': '483', 'delta': 360, 'frames': 6}, 'Katherine': {'id': '423', 'delta': 360, 'frames': 6}, 'Learmonth': {'id': '293', 'delta': 600, 'frames': 4}, 'Longreach': {'id': '563', 'delta': 600, 'frames': 4}, 'Mackay': {'id': '223', 'delta': 600, 'frames': 4}, 'Marburg': {'id': '503', 'delta': 600, 'frames': 4}, 'Melbourne': {'id': '023', 'delta': 360, 'frames': 6}, 'Mildura': {'id': '303', 'delta': 600, 'frames': 4}, 'Moree': {'id': '533', 'delta': 600, 'frames': 4}, 'MorningtonIs': {'id': '363', 'delta': 600, 'frames': 4}, 'MountIsa': {'id': '753', 'delta': 360, 'frames': 6}, 'MtGambier': {'id': '143', 'delta': 600, 'frames': 4}, 'Namoi': {'id': '693', 'delta': 600, 'frames': 4}, 'Newcastle': {'id': '043', 'delta': 360, 'frames': 6}, 'Newdegate': {'id': '383', 'delta': 360, 'frames': 6}, 'NorfolkIs': {'id': '623', 'delta': 600, 'frames': 4}, 'NWTasmania': {'id': '523', 'delta': 360, 'frames': 6}, 'Perth': {'id': '703', 'delta': 360, 'frames': 6}, 'PortHedland': {'id': '163', 'delta': 600, 'frames': 4}, 'SellicksHill': {'id': '463', 'delta': 600, 'frames': 4}, 'SouthDoodlakine': {'id': '583', 'delta': 360, 'frames': 6}, 'Sydney': {'id': '713', 'delta': 360, 'frames': 6}, 'Townsville': {'id': '733', 'delta': 600, 'frames': 4}, 'WaggaWagga': {'id': '553', 'delta': 600, 'frames': 4}, 'Warrego': {'id': '673', 'delta': 600, 'frames': 4}, 'Warruwi': {'id': '773', 'delta': 360, 'frames': 6}, 'Watheroo': {'id': '793', 'delta': 360, 'frames': 6}, 'Weipa': {'id': '783', 'delta': 360, 'frames': 6}, 'WillisIs': {'id': '413', 'delta': 600, 'frames': 4}, 'Wollongong': {'id': '033', 'delta': 360, 'frames': 6}, 'Woomera': {'id': '273', 'delta': 600, 'frames': 4}, 'Wyndham': {'id': '073', 'delta': 600, 'frames': 4}, 'Yarrawonga': {'id': '493', 'delta': 360, 'frames': 6}, } class BOMRadarLoop: def __init__(self, location=None, radar_id=None, delta=None, frames=None, outfile=None, logger=None): self._log = logger or logging.getLogger(__name__) if isinstance(radar_id, int): radar_id = '%03d' % radar_id valids = ', '.join(sorted(RADARS.keys())) if not radar_id and location not in RADARS: location = 'Sydney' self._log.error("Bad 'location' specified, using '%s' (valid locations are: %s)", location, valids) if radar_id: if location in RADARS: radar_id = None self._log.error("Valid 'location' specified, ignoring 'radar_id'") elif location: self._log.error("Bad 'location' specified, using ID %s (valid locations are: %s)", radar_id, valids) if radar_id and not delta: delta = 360 self._log.error("No 'delta' specified for radar ID %s, using %s", radar_id, delta) if radar_id and not frames: frames = 6 self._log.error("No 'frames' specified for radar ID %s, using %s", radar_id, frames) self._location = location or 'ID %s' % radar_id self._delta = delta or RADARS[location]['delta'] self._frames = frames or RADARS[location]['frames'] self._radar_id = radar_id or RADARS[location]['id'] self._outfile = outfile self._t0 = 0 self._current = self.current # Public methods @property def current(self): ''' Return the current BOM radar-loop image. ''' now = int(time.time()) t1 = now - (now % self._delta) if t1 > self._t0: self._t0 = t1 self._current = self._get_loop() return self._current # Private methods def _get_background(self): ''' Fetch the background map, then the topography, locations (e.g. city names), and distance-from-radar range markings, and merge into a single image. ''' self._log.debug('Getting background for %s at %s', self._location, self._t0) suffix0 = 'products/radar_transparencies/IDR%s.background.png' url0 = self._get_url(suffix0 % self._radar_id) background = self._get_image(url0) if background is None: return None for layer in ('topography', 'locations', 'range'): self._log.debug('Getting %s for %s at %s', layer, self._location, self._t0) suffix1 = 'products/radar_transparencies/IDR%s.%s.png' % (self._radar_id, layer) url1 = self._get_url(suffix1) image = self._get_image(url1) if image is not None: background = PIL.Image.alpha_composite(background, image) return background def _get_frames(self): ''' Fetch a radar image for each expected time, composite it with a common background image, then overlay on the legend to produce a frame. Collect and return the frames, ignoring any blanks. If no frames were produced, return None (the caller must expect this). ''' self._log.debug('Getting frames for %s at %s', self._location, self._t0) bg = self._get_background() legend = self._get_legend() frames = [] if bg and legend: for time_str in self._get_time_strs(): fg = self._get_wximg(time_str) if fg is not None: frames.append(legend.copy()) frames[-1].paste(PIL.Image.alpha_composite(bg, fg), (0, 0)) return frames or None def _get_image(self, url): # pylint: disable=no-self-use ''' Fetch an image from the BOM. ''' self._log.debug('Getting image %s', url) response = requests.get(url) if response.status_code == 200: image = PIL.Image.open(io.BytesIO(response.content)) rgba_img = image.convert('RGBA') image.close() return rgba_img return None def _get_legend(self): ''' Fetch the BOM colorbar legend image. ''' self._log.debug('Getting legend at %s', self._t0) url = self._get_url('products/radar_transparencies/IDR.legend.0.png') return self._get_image(url) def _get_loop(self): ''' Return an animated GIF comprising a set of frames, where each frame includes a background, one or more supplemental layers, a colorbar legend, and a radar image. ''' self._log.info('Getting loop for %s at %s', self._location, self._t0) loop = io.BytesIO() frames = self._get_frames() if frames is not None: self._log.debug('Got %s frames for %s at %s', len(frames), self._location, self._t0) frames[0].save(loop, append_images=frames[1:], duration=500, format='GIF', loop=0, save_all=True) else: self._log.warning('Got NO frames for %s at %s', self._location, self._t0) PIL.Image.new('RGB', (512, 557)).save(loop, format='GIF') if self._outfile: outdir = os.path.dirname(self._outfile) if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError: self._log.error('Could not create directory %s', outdir) try: with open(self._outfile, 'wb') as outfile: outfile.write(loop.getvalue()) except IOError: self._log.error('Could not write image to %s', self._outfile) return loop.getvalue() def _get_time_strs(self): ''' Return a list of strings representing YYYYMMDDHHMM times for the most recent set of radar images to be used to create the animated GIF. ''' self._log.debug('Getting time strings starting at %s', self._t0) frame_numbers = range(self._frames, 0, -1) tz = dt.timezone.utc f = lambda n: dt.datetime.fromtimestamp(self._t0 - (self._delta * n), tz=tz).strftime('%Y%m%d%H%M') return [f(n) for n in frame_numbers] def _get_url(self, path): # pylint: disable=no-self-use self._log.debug('Getting URL for path %s', path) return 'http://www.bom.gov.au/%s' % path def _get_wximg(self, time_str): ''' Return a radar weather image from the BOM website. Note that get_image() returns None if the image could not be fetched, so the caller must deal with that possibility. ''' self._log.debug('Getting radar imagery for %s at %s', self._location, time_str) suffix = 'radar/IDR%s.T.%s.png' % (self._radar_id, time_str) url = self._get_url(suffix) return self._get_image(url)
2.015625
2
guess_game/main.py
devvspaces/python_projects
0
12793103
from utils import user_game, comp_game def play(func): try: print(func()) except ValueError: # If there was a value error, the function will be called again print('\n Try Again') return play(func) except KeyboardInterrupt: # When ctrl + c is used, end the game, instead of showing errors print('Game Over') if __name__ == '__main__': player = input('Who is guessing: 1 for You and 2 for Computer: ') if player == '1': play(user_game) elif player == '2': play(comp_game) else: print('Please enter a right value')
3.8125
4
I Coding Dojo/Solution/test_dojo.py
ComputerSocietyIFB/Dojo
0
12793104
from dojo import * def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI PESSOAL') == '66644407337777_77776662555'
2.671875
3
software/witnesses.py
aM3z/primality-testing
1
12793105
<gh_stars>1-10 import primality def miller_rabin(n): witnesses, nonwitnesses = list(), list() for a in range(1, n): res = primality.miller_rabin(n=n, a=a) if res == "composite": witnesses.append(a) else: nonwitnesses.append(a) print(str(len(witnesses)) + " witnesses:" + str(witnesses)) print(str(len(nonwitnesses)) + " nonwitnesses:" + str(nonwitnesses))
3.1875
3
ogr2osm/osm_data.py
HSLdevcom/ogr2osm
0
12793106
# -*- coding: utf-8 -*- # Copyright (c) 2012-2021 <NAME>, <NAME> <<EMAIL>>, # <NAME> <<EMAIL>>, The University of Vermont # <<EMAIL>>, github contributors # Released under the MIT license, as given in the file LICENSE, which must # accompany any distribution of this code. import logging from osgeo import ogr from osgeo import osr from .osm_geometries import OsmBoundary, OsmPoint, OsmWay, OsmRelation class OsmData: def __init__(self, translation, rounding_digits=7, max_points_in_way=1800, add_bounds=False): # options self.translation = translation self.rounding_digits = rounding_digits self.max_points_in_way = max_points_in_way self.add_bounds = add_bounds self.__bounds = OsmBoundary() self.__nodes = [] self.__unique_node_index = {} self.__ways = [] self.__relations = [] self.__long_ways_from_polygons = set() def __get_layer_fields(self, layer): layer_fields = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) layer_fields.append((i, field_def.GetNameRef(), field_def.GetType())) return layer_fields # This function builds up a dictionary with the source data attributes # and passes them to the filter_tags function, returning the result. def __get_feature_tags(self, ogrfeature, layer_fields, source_encoding): tags = {} for (index, field_name, field_type) in layer_fields: field_value = '' if field_type == ogr.OFTString: field_value = ogrfeature.GetFieldAsBinary(index).decode(source_encoding) else: field_value = ogrfeature.GetFieldAsString(index) tags[field_name] = field_value.strip() return self.translation.filter_tags(tags) def __calc_bounds(self, ogrgeometry): (minx, maxx, miny, maxy) = ogrgeometry.GetEnvelope() self.__bounds.add_envelope(minx, maxx, miny, maxy) def __round_number(self, n): return int(round(n * 10**self.rounding_digits)) def __add_node(self, x, y, tags, is_way_member): rx = self.__round_number(x) ry = self.__round_number(y) unique_node_id = None if is_way_member: unique_node_id = (rx, ry) else: unique_node_id = self.translation.get_unique_node_identifier(rx, ry, tags) if unique_node_id in self.__unique_node_index: return self.__nodes[self.__unique_node_index[unique_node_id]] else: node = OsmPoint(x, y, tags) self.__unique_node_index[unique_node_id] = len(self.__nodes) self.__nodes.append(node) return node def __add_way(self, tags): way = OsmWay(tags) self.__ways.append(way) return way def __add_relation(self, tags): relation = OsmRelation(tags) self.__relations.append(relation) return relation def __parse_point(self, ogrgeometry, tags): return self.__add_node(ogrgeometry.GetX(), ogrgeometry.GetY(), tags, False) def __parse_linestring(self, ogrgeometry, tags): way = self.__add_way(tags) # LineString.GetPoint() returns a tuple, so we can't call parsePoint on it # and instead have to create the point ourself previous_node_id = None for i in range(ogrgeometry.GetPointCount()): (x, y, z_unused) = ogrgeometry.GetPoint(i) node = self.__add_node(x, y, {}, True) if previous_node_id == None or previous_node_id != node.id: way.points.append(node) node.addparent(way) previous_node_id = node.id return way def __parse_polygon(self, ogrgeometry, tags): # Special case polygons with only one ring. This does not (or at least # should not) change behavior when simplify relations is turned on. if ogrgeometry.GetGeometryCount() == 0: logging.warning("Polygon with no rings?") elif ogrgeometry.GetGeometryCount() == 1: result = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), tags) if len(result.points) > self.max_points_in_way: self.__long_ways_from_polygons.add(result) return result else: relation = self.__add_relation(tags) try: exterior = self.__parse_linestring(ogrgeometry.GetGeometryRef(0), {}) exterior.addparent(relation) except: logging.warning("Polygon with no exterior ring?") return None relation.members.append((exterior, "outer")) for i in range(1, ogrgeometry.GetGeometryCount()): interior = self.__parse_linestring(ogrgeometry.GetGeometryRef(i), {}) interior.addparent(relation) relation.members.append((interior, "inner")) return relation def __parse_collection(self, ogrgeometry, tags): # OGR MultiPolygon maps easily to osm multipolygon, so special case it # TODO: Does anything else need special casing? geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbMultiPolygon, ogr.wkbMultiPolygon25D ]: if ogrgeometry.GetGeometryCount() > 1: relation = self.__add_relation(tags) for polygon in range(ogrgeometry.GetGeometryCount()): ext_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(0) exterior = self.__parse_linestring(ext_geom, {}) exterior.addparent(relation) relation.members.append((exterior, "outer")) for i in range(1, ogrgeometry.GetGeometryRef(polygon).GetGeometryCount()): int_geom = ogrgeometry.GetGeometryRef(polygon).GetGeometryRef(i) interior = self.__parse_linestring(int_geom, {}) interior.addparent(relation) relation.members.append((interior, "inner")) return [ relation ] else: return [ self.__parse_polygon(ogrgeometry.GetGeometryRef(0), tags) ] elif geometry_type in [ ogr.wkbMultiLineString, ogr.wkbMultiLineString25D ]: geometries = [] for linestring in range(ogrgeometry.GetGeometryCount()): geometries.append(self.__parse_linestring(ogrgeometry.GetGeometryRef(linestring), tags)) return geometries else: relation = self.__add_relation(tags) for i in range(ogrgeometry.GetGeometryCount()): member = self.__parse_geometry(ogrgeometry.GetGeometryRef(i), {}) member.addparent(relation) relation.members.append((member, "member")) return [ relation ] def __parse_geometry(self, ogrgeometry, tags): osmgeometries = [] geometry_type = ogrgeometry.GetGeometryType() if geometry_type in [ ogr.wkbPoint, ogr.wkbPoint25D ]: osmgeometries.append(self.__parse_point(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbLineString, ogr.wkbLinearRing, ogr.wkbLineString25D ]: # ogr.wkbLinearRing25D does not exist osmgeometries.append(self.__parse_linestring(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbPolygon, ogr.wkbPolygon25D ]: osmgeometries.append(self.__parse_polygon(ogrgeometry, tags)) elif geometry_type in [ ogr.wkbMultiPoint, ogr.wkbMultiLineString, ogr.wkbMultiPolygon, \ ogr.wkbGeometryCollection, ogr.wkbMultiPoint25D, \ ogr.wkbMultiLineString25D, ogr.wkbMultiPolygon25D, \ ogr.wkbGeometryCollection25D ]: osmgeometries.extend(self.__parse_collection(ogrgeometry, tags)) else: logging.warning("Unhandled geometry, type %s" % str(geometry_type)) return osmgeometries def add_feature(self, ogrfeature, layer_fields, source_encoding, reproject = lambda geometry: None): ogrfilteredfeature = self.translation.filter_feature(ogrfeature, layer_fields, reproject) if ogrfilteredfeature is None: return ogrgeometry = ogrfilteredfeature.GetGeometryRef() if ogrgeometry is None: return feature_tags = self.__get_feature_tags(ogrfilteredfeature, layer_fields, source_encoding) if feature_tags is None: return reproject(ogrgeometry) if self.add_bounds: self.__calc_bounds(ogrgeometry) osmgeometries = self.__parse_geometry(ogrgeometry, feature_tags) # TODO performance: run in __parse_geometry to avoid second loop for osmgeometry in [ geom for geom in osmgeometries if geom ]: self.translation.process_feature_post(osmgeometry, ogrfilteredfeature, ogrgeometry) def __split_way(self, way, is_way_in_relation): new_points = [ way.points[i:i + self.max_points_in_way] \ for i in range(0, len(way.points), self.max_points_in_way - 1) ] new_ways = [ way ] + [ OsmWay(way.get_tags()) for i in range(len(new_points) - 1) ] if not is_way_in_relation: for new_way in new_ways[1:]: self.__ways.append(new_way) for new_way, points in zip(new_ways, new_points): new_way.points = points if new_way.id != way.id: for point in points: point.removeparent(way) point.addparent(new_way) return new_ways def __merge_into_new_relation(self, way_parts): new_relation = self.__add_relation({}) new_relation.members = [ (way, "outer") for way in way_parts ] for way in way_parts: way.addparent(new_relation) def __split_way_in_relation(self, rel, way_parts): way_roles = [ m[1] for m in rel.members if m[0] == way_parts[0] ] way_role = "" if len(way_roles) == 0 else way_roles[0] for way in way_parts[1:]: way.addparent(rel) rel.members.append((way, way_role)) def split_long_ways(self): if self.max_points_in_way < 2: # pointless :-) return logging.debug("Splitting long ways") for way in self.__ways: is_way_in_relation = len([ p for p in way.get_parents() if type(p) == OsmRelation ]) > 0 if len(way.points) > self.max_points_in_way: way_parts = self.__split_way(way, is_way_in_relation) if not is_way_in_relation: if way in self.__long_ways_from_polygons: self.__merge_into_new_relation(way_parts) else: for rel in way.get_parents(): self.__split_way_in_relation(rel, way_parts) def process(self, datasource): for i in range(datasource.get_layer_count()): (layer, reproject) = datasource.get_layer(i) if layer: layer_fields = self.__get_layer_fields(layer) for j in range(layer.GetFeatureCount()): ogrfeature = layer.GetNextFeature() self.add_feature(ogrfeature, layer_fields, datasource.source_encoding, reproject) self.split_long_ways() class DataWriterContextManager: def __init__(self, datawriter): self.datawriter = datawriter def __enter__(self): self.datawriter.open() return self.datawriter def __exit__(self, exception_type, value, traceback): self.datawriter.close() def output(self, datawriter): self.translation.process_output(self.__nodes, self.__ways, self.__relations) with self.DataWriterContextManager(datawriter) as dw: dw.write_header(self.__bounds) dw.write_nodes(self.__nodes) dw.write_ways(self.__ways) dw.write_relations(self.__relations) dw.write_footer()
2.59375
3
smoothfdr/smoothed_fdr.py
tansey/smoothfdr
6
12793107
<gh_stars>1-10 import itertools import numpy as np from scipy import sparse from scipy.stats import norm from scipy.optimize import minimize, minimize_scalar from scipy.sparse import csc_matrix, linalg as sla from functools import partial from collections import deque from pygfl.solver import TrailSolver class GaussianKnown: ''' A simple Gaussian distribution with known mean and stdev. ''' def __init__(self, mean, stdev): self.mean = mean self.stdev = stdev def pdf(self, data): return norm.pdf(data, loc=self.mean, scale=self.stdev) def sample(self): return np.random.normal(loc=self.mean, scale=self.stdev) def noisy_pdf(self, data): return norm.pdf(data, loc=self.mean, scale=np.sqrt(self.stdev**2 + 1)) def __repr__(self): return 'N({:.2f}, {:.2f}^2)'.format(self.mean, self.stdev) class SmoothedFdr(object): def __init__(self, signal_dist, null_dist, penalties_cross_x=None): self.signal_dist = signal_dist self.null_dist = null_dist if penalties_cross_x is None: self.penalties_cross_x = np.dot else: self.penalties_cross_x = penalties_cross_x self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] # ''' Load the graph fused lasso library ''' # graphfl_lib = cdll.LoadLibrary('libgraphfl.so') # self.graphfl_weight = graphfl_lib.graph_fused_lasso_weight_warm # self.graphfl_weight.restype = c_int # self.graphfl_weight.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), # c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'), # c_double, c_double, c_double, c_int, c_double, # ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')] self.solver = TrailSolver() def add_step(self, w, beta, c, delta): self.w_iters.append(w) self.beta_iters.append(beta) self.c_iters.append(c) self.delta_iters.append(delta) def finish(self): self.w_iters = np.array(self.w_iters) self.beta_iters = np.array(self.beta_iters) self.c_iters = np.array(self.c_iters) self.delta_iters = np.array(self.delta_iters) def reset(self): self.w_iters = [] self.beta_iters = [] self.c_iters = [] self.delta_iters = [] def solution_path(self, data, penalties, dof_tolerance=1e-4, min_lambda=0.20, max_lambda=1.5, lambda_bins=30, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=20, cd_converge=0.00001, cd_max_steps=1000, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None, grid_data=None, grid_map=None): '''Follows the solution path of the generalized lasso to find the best lambda value.''' lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins)) aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for finite sample size) bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution log_likelihood_trace = np.zeros(lambda_grid.shape) beta_trace = [] u_trace = [] w_trace = [] c_trace = [] results_trace = [] best_idx = None best_plateaus = None flat_data = data.flatten() edges = penalties[3] if dual_solver == 'graph' else None if grid_data is not None: grid_points = np.zeros(grid_data.shape) grid_points[:,:] = np.nan for i, _lambda in enumerate(lambda_grid): if verbose: print('#{0} Lambda = {1}'.format(i, _lambda)) # Clear out all the info from the previous run self.reset() # Fit to the final values results = self.run(flat_data, penalties, _lambda=_lambda, converge=converge, max_steps=max_steps, m_converge=m_converge, m_max_steps=m_max_steps, cd_converge=cd_converge, cd_max_steps=cd_max_steps, verbose=verbose, dual_solver=dual_solver, admm_alpha=admm_alpha, admm_inflate=admm_inflate, admm_adaptive=admm_adaptive, initial_values=initial_values) if verbose: print('Calculating degrees of freedom') # Create a grid structure out of the vector of betas if grid_map is not None: grid_points[grid_map != -1] = results['beta'][grid_map[grid_map != -1]] else: grid_points = results['beta'].reshape(data.shape) # Count the number of free parameters in the grid (dof) plateaus = calc_plateaus(grid_points, dof_tolerance, edges=edges) dof_trace[i] = len(plateaus) #dof_trace[i] = (np.abs(penalties.dot(results['beta'])) >= dof_tolerance).sum() + 1 # Use the naive DoF if verbose: print('Calculating AIC') # Get the negative log-likelihood log_likelihood_trace[i] = -self._data_negative_log_likelihood(flat_data, results['c']) # Calculate AIC = 2k - 2ln(L) aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i] # Calculate AICc = AIC + 2k * (k+1) / (n - k - 1) aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (flat_data.shape[0] - dof_trace[i] - 1.) # Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi)) bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(flat_data)) - np.log(2 * np.pi)) # Track the best model thus far if best_idx is None or bic_trace[i] < bic_trace[best_idx]: best_idx = i best_plateaus = plateaus # Save the final run parameters to use for warm-starting the next iteration initial_values = results # Save the trace of all the resulting parameters beta_trace.append(results['beta']) u_trace.append(results['u']) w_trace.append(results['w']) c_trace.append(results['c']) if verbose: print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i])) if verbose: print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx])) return {'aic': aic_trace, 'aicc': aicc_trace, 'bic': bic_trace, 'dof': dof_trace, 'loglikelihood': log_likelihood_trace, 'beta': np.array(beta_trace), 'u': np.array(u_trace), 'w': np.array(w_trace), 'c': np.array(c_trace), 'lambda': lambda_grid, 'best': best_idx, 'plateaus': best_plateaus} def run(self, data, penalties, _lambda=0.1, converge=0.00001, max_steps=100, m_converge=0.00001, m_max_steps=100, cd_converge=0.00001, cd_max_steps=100, verbose=0, dual_solver='graph', admm_alpha=1., admm_inflate=2., admm_adaptive=False, initial_values=None): '''Runs the Expectation-Maximization algorithm for the data with the given penalty matrix.''' delta = converge + 1 if initial_values is None: beta = np.zeros(data.shape) prior_prob = np.exp(beta) / (1 + np.exp(beta)) u = initial_values else: beta = initial_values['beta'] prior_prob = initial_values['c'] u = initial_values['u'] prev_nll = 0 cur_step = 0 while delta > converge and cur_step < max_steps: if verbose: print('Step #{0}'.format(cur_step)) if verbose: print('\tE-step...') # Get the likelihood weights vector (E-step) post_prob = self._e_step(data, prior_prob) if verbose: print('\tM-step...') # Find beta using an alternating Taylor approximation and convex optimization (M-step) beta, u = self._m_step(beta, prior_prob, post_prob, penalties, _lambda, m_converge, m_max_steps, cd_converge, cd_max_steps, verbose, dual_solver, admm_adaptive=admm_adaptive, admm_inflate=admm_inflate, admm_alpha=admm_alpha, u0=u) # Get the signal probabilities prior_prob = ilogit(beta) cur_nll = self._data_negative_log_likelihood(data, prior_prob) if dual_solver == 'admm': # Get the negative log-likelihood of the data given our new parameters cur_nll += _lambda * np.abs(u['r']).sum() # Track the change in log-likelihood to see if we've converged delta = np.abs(cur_nll - prev_nll) / (prev_nll + converge) if verbose: print('\tDelta: {0}'.format(delta)) # Track the step self.add_step(post_prob, beta, prior_prob, delta) # Increment the step counter cur_step += 1 # Update the negative log-likelihood tracker prev_nll = cur_nll # DEBUGGING if verbose: print('\tbeta: [{0:.4f}, {1:.4f}]'.format(beta.min(), beta.max())) print('\tprior_prob: [{0:.4f}, {1:.4f}]'.format(prior_prob.min(), prior_prob.max())) print('\tpost_prob: [{0:.4f}, {1:.4f}]'.format(post_prob.min(), post_prob.max())) if dual_solver != 'graph': print('\tdegrees of freedom: {0}'.format((np.abs(penalties.dot(beta)) >= 1e-4).sum())) # Return the results of the run return {'beta': beta, 'u': u, 'w': post_prob, 'c': prior_prob} def _data_negative_log_likelihood(self, data, prior_prob): '''Calculate the negative log-likelihood of the data given the weights.''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) return -np.log(signal_weight + null_weight).sum() def _e_step(self, data, prior_prob): '''Calculate the complete-data sufficient statistics (weights vector).''' signal_weight = prior_prob * self.signal_dist.pdf(data) null_weight = (1-prior_prob) * self.null_dist.pdf(data) post_prob = signal_weight / (signal_weight + null_weight) return post_prob def _m_step(self, beta, prior_prob, post_prob, penalties, _lambda, converge, max_steps, cd_converge, cd_max_steps, verbose, dual_solver, u0=None, admm_alpha=1., admm_inflate=2., admm_adaptive=False): ''' Alternating Second-order Taylor-series expansion about the current iterate and coordinate descent to optimize Beta. ''' prev_nll = self._m_log_likelihood(post_prob, beta) delta = converge + 1 u = u0 cur_step = 0 while delta > converge and cur_step < max_steps: if verbose > 1: print('\t\tM-Step iteration #{0}'.format(cur_step)) print('\t\tTaylor approximation...') # Cache the exponentiated beta exp_beta = np.exp(beta) # Form the parameters for our weighted least squares if dual_solver != 'admm' and dual_solver != 'graph': # weights is a diagonal matrix, represented as a vector for efficiency weights = 0.5 * exp_beta / (1 + exp_beta)**2 y = (1+exp_beta)**2 * post_prob / exp_beta + beta - (1 + exp_beta) if verbose > 1: print('\t\tForming dual...') x = np.sqrt(weights) * y A = (1. / np.sqrt(weights))[:,np.newaxis] * penalties.T else: weights = (prior_prob * (1 - prior_prob)) y = beta - (prior_prob - post_prob) / weights print(weights) print(y) if dual_solver == 'cd': # Solve the dual via coordinate descent u = self._u_coord_descent(x, A, _lambda, cd_converge, cd_max_steps, verbose > 1, u0=u) elif dual_solver == 'sls': # Solve the dual via sequential least squares u = self._u_slsqp(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'lbfgs': # Solve the dual via L-BFGS-B u = self._u_lbfgsb(x, A, _lambda, verbose > 1, u0=u) elif dual_solver == 'admm': # Solve the dual via alternating direction methods of multipliers #u = self._u_admm_1dfusedlasso(y, weights, _lambda, cd_converge, cd_max_steps, verbose > 1, initial_values=u) #u = self._u_admm(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u) u = self._u_admm_lucache(y, weights, _lambda, penalties, cd_converge, cd_max_steps, verbose > 1, initial_values=u, inflate=admm_inflate, adaptive=admm_adaptive, alpha=admm_alpha) beta = u['x'] elif dual_solver == 'graph': u = self._graph_fused_lasso(y, weights, _lambda, penalties[0], penalties[1], penalties[2], penalties[3], cd_converge, cd_max_steps, max(0, verbose - 1), admm_alpha, admm_inflate, initial_values=u) beta = u['beta'] # if np.abs(beta).max() > 20: # beta = np.clip(beta, -20, 20) # u = None else: raise Exception('Unknown solver: {0}'.format(dual_solver)) if dual_solver != 'admm' and dual_solver != 'graph': # Back out beta from the dual solution beta = y - (1. / weights) * penalties.T.dot(u) # Get the current log-likelihood cur_nll = self._m_log_likelihood(post_prob, beta) # Track the convergence delta = np.abs(prev_nll - cur_nll) / (prev_nll + converge) if verbose > 1: print('\t\tM-step delta: {0}'.format(delta)) # Increment the step counter cur_step += 1 # Update the negative log-likelihood tracker prev_nll = cur_nll return beta, u def _m_log_likelihood(self, post_prob, beta): '''Calculate the log-likelihood of the betas given the weights and data.''' return (np.log(1 + np.exp(beta)) - post_prob * beta).sum() def _graph_fused_lasso(self, y, weights, _lambda, ntrails, trails, breakpoints, edges, converge, max_steps, verbose, alpha, inflate, initial_values=None): '''Solve for u using a super fast graph fused lasso library that has an optimized ADMM routine.''' if verbose: print('\t\tSolving via Graph Fused Lasso') # if initial_values is None: # beta = np.zeros(y.shape, dtype='double') # z = np.zeros(breakpoints[-1], dtype='double') # u = np.zeros(breakpoints[-1], dtype='double') # else: # beta = initial_values['beta'] # z = initial_values['z'] # u = initial_values['u'] # n = y.shape[0] # self.graphfl_weight(n, y, weights, ntrails, trails, breakpoints, _lambda, alpha, inflate, max_steps, converge, beta, z, u) # return {'beta': beta, 'z': z, 'u': u } self.solver.alpha = alpha self.solver.inflate = inflate self.solver.maxsteps = max_steps self.solver.converge = converge self.solver.set_data(y, edges, ntrails, trails, breakpoints, weights=weights) if initial_values is not None: self.solver.beta = initial_values['beta'] self.solver.z = initial_values['z'] self.solver.u = initial_values['u'] self.solver.solve(_lambda) return {'beta': self.solver.beta, 'z': self.solver.z, 'u': self.solver.u } def _u_admm_lucache(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.8, initial_values=None, inflate=2., adaptive=False): '''Solve for u using alternating direction method of multipliers with a cached LU decomposition.''' if verbose: print('\t\tSolving u via Alternating Direction Method of Multipliers') n = len(y) m = D.shape[0] a = inflate * _lambda # step-size parameter # Initialize primal and dual variables from warm start if initial_values is None: # Graph Laplacian L = csc_matrix(D.T.dot(D) + csc_matrix(np.eye(n))) # Cache the LU decomposition lu_factor = sla.splu(L, permc_spec='MMD_AT_PLUS_A') x = np.array([y.mean()] * n) # likelihood term z = np.zeros(n) # slack variable for likelihood r = np.zeros(m) # penalty term s = np.zeros(m) # slack variable for penalty u_dual = np.zeros(n) # scaled dual variable for constraint x = z t_dual = np.zeros(m) # scaled dual variable for constraint r = s else: lu_factor = initial_values['lu_factor'] x = initial_values['x'] z = initial_values['z'] r = initial_values['r'] s = initial_values['s'] u_dual = initial_values['u_dual'] t_dual = initial_values['t_dual'] primal_trace = [] dual_trace = [] converged = False cur_step = 0 D_full = D while not converged and cur_step < max_steps: # Update x x = (weights * y + a * (z - u_dual)) / (weights + a) x_accel = alpha * x + (1 - alpha) * z # over-relaxation # Update constraint term r arg = s - t_dual local_lambda = (_lambda - np.abs(arg) / 2.).clip(0) if adaptive else _lambda r = _soft_threshold(arg, local_lambda / a) r_accel = alpha * r + (1 - alpha) * s # Projection to constraint set arg = x_accel + u_dual + D.T.dot(r_accel + t_dual) z_new = lu_factor.solve(arg) s_new = D.dot(z_new) dual_residual_u = a * (z_new - z) dual_residual_t = a * (s_new - s) z = z_new s = s_new # Dual update primal_residual_x = x_accel - z primal_residual_r = r_accel - s u_dual = u_dual + primal_residual_x t_dual = t_dual + primal_residual_r # Check convergence primal_resnorm = np.sqrt((np.array([i for i in primal_residual_x] + [i for i in primal_residual_r])**2).mean()) dual_resnorm = np.sqrt((np.array([i for i in dual_residual_u] + [i for i in dual_residual_t])**2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold if primal_resnorm > 5 * dual_resnorm: a *= inflate u_dual /= inflate t_dual /= inflate elif dual_resnorm > 5 * primal_resnorm: a /= inflate u_dual *= inflate t_dual *= inflate # Update the step counter cur_step += 1 if verbose and cur_step % 100 == 0: print('\t\t\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) return {'x': x, 'r': r, 'z': z, 's': s, 'u_dual': u_dual, 't_dual': t_dual, 'primal_trace': primal_trace, 'dual_trace': dual_trace, 'steps': cur_step, 'lu_factor': lu_factor} def _u_admm(self, y, weights, _lambda, D, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers.''' if verbose: print('\t\tSolving u via Alternating Direction Method of Multipliers') n = len(y) m = D.shape[0] a = _lambda # step-size parameter # Set up system involving graph Laplacian L = D.T.dot(D) W_over_a = np.diag(weights / a) x_denominator = W_over_a + L #x_denominator = sparse.linalg.inv(W_over_a + L) # Initialize primal and dual variables if initial_values is None: x = np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = [] converged = False cur_step = 0 while not converged and cur_step < max_steps: # Update x x_numerator = 1.0 / a * weights * y + D.T.dot(a * z - u) x = np.linalg.solve(x_denominator, x_numerator) Dx = D.dot(x) # Update z Dx_relaxed = alpha * Dx + (1 - alpha) * z # over-relax Dx z_new = _soft_threshold(Dx_relaxed + u / a, _lambda / a) dual_residual = a * D.T.dot(z_new - z) z = z_new primal_residual = Dx_relaxed - z # Update u u = u + a * primal_residual # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based on norm of primal and dual residuals # This is the varying penalty extension to standard ADMM a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 # Recalculate the x_denominator since we changed the step-size # TODO: is this worth it? We're paying a matrix inverse in exchange for varying the step size #W_over_a = sparse.dia_matrix(np.diag(weights / a)) W_over_a = np.diag(weights / a) #x_denominator = sparse.linalg.inv(W_over_a + L) # Update the step counter cur_step += 1 if verbose and cur_step % 100 == 0: print('\t\t\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2 * dof return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_admm_1dfusedlasso(self, y, W, _lambda, converge_threshold, max_steps, verbose, alpha=1.0, initial_values=None): '''Solve for u using alternating direction method of multipliers. Note that this method only works for the 1-D fused lasso case.''' if verbose: print('\t\tSolving u via Alternating Direction Method of Multipliers (1-D fused lasso)') n = len(y) m = n - 1 a = _lambda # The D matrix is the first-difference operator. K is the matrix (W + a D^T D) # where W is the diagonal matrix of weights. We use a tridiagonal representation # of K. Kd = np.array([a] + [2*a] * (n-2) + [a]) + W # diagonal entries Kl = np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal # Initialize primal and dual variables if initial_values is None: x = np.array([y.mean()] * n) z = np.zeros(m) u = np.zeros(m) else: x = initial_values['x'] z = initial_values['z'] u = initial_values['u'] primal_trace = [] dual_trace = [] converged = False cur_step = 0 while not converged and cur_step < max_steps: # Update x out = _1d_fused_lasso_crossprod(a*z - u) x = tridiagonal_solve(Kl, Ku, Kd, W * y + out) Dx = np.ediff1d(x) # Update z Dx_hat = alpha * Dx + (1 - alpha) * z # Over-relaxation z_new = _soft_threshold(Dx_hat + u / a, _lambda / a) dual_residual = a * _1d_fused_lasso_crossprod(z_new - z) z = z_new primal_residual = Dx - z #primal_residual = Dx_hat - z # Update u u = (u + a * primal_residual).clip(-_lambda, _lambda) # Check convergence primal_resnorm = np.sqrt((primal_residual ** 2).mean()) dual_resnorm = np.sqrt((dual_residual ** 2).mean()) primal_trace.append(primal_resnorm) dual_trace.append(dual_resnorm) converged = dual_resnorm < converge_threshold and primal_resnorm < converge_threshold # Update step-size parameter based on norm of primal and dual residuals a *= 2 if primal_resnorm > 10 * dual_resnorm else 0.5 Kd = np.array([a] + [2*a] * (n-2) + [a]) + W # diagonal entries Kl = np.array([-a] * (n-1)) # below the diagonal Ku = np.array([-a] * (n-1)) # above the diagonal cur_step += 1 if verbose and cur_step % 100 == 0: print('\t\t\tStep #{0}: dual_resnorm: {1:.6f} primal_resnorm: {2:.6f}'.format(cur_step, dual_resnorm, primal_resnorm)) dof = np.sum(Dx > converge_threshold) + 1. AIC = np.sum((y - x)**2) + 2 * dof return {'x': x, 'z': z, 'u': u, 'dof': dof, 'AIC': AIC} def _u_coord_descent(self, x, A, _lambda, converge, max_steps, verbose, u0=None): '''Solve for u using coordinate descent.''' if verbose: print('\t\tSolving u via Coordinate Descent') u = u0 if u0 is not None else np.zeros(A.shape[1]) l2_norm_A = (A * A).sum(axis=0) r = x - A.dot(u) delta = converge + 1 prev_objective = _u_objective_func(u, x, A) cur_step = 0 while delta > converge and cur_step < max_steps: # Update each coordinate one at a time. for coord in range(len(u)): prev_u = u[coord] next_u = prev_u + A.T[coord].dot(r) / l2_norm_A[coord] u[coord] = min(_lambda, max(-_lambda, next_u)) r += A.T[coord] * prev_u - A.T[coord] * u[coord] # Track the change in the objective function value cur_objective = _u_objective_func(u, x, A) delta = np.abs(prev_objective - cur_objective) / (prev_objective + converge) if verbose and cur_step % 100 == 0: print('\t\t\tStep #{0}: Objective: {1:.6f} CD Delta: {2:.6f}'.format(cur_step, cur_objective, delta)) # Increment the step counter and update the previous objective value cur_step += 1 prev_objective = cur_objective return u def _u_slsqp(self, x, A, _lambda, verbose, u0=None): '''Solve for u using sequential least squares.''' if verbose: print('\t\tSolving u via Sequential Least Squares') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for u0_i in u0] results = minimize(_u_objective_func, u0, args=(x, A), jac=_u_objective_deriv, bounds=bounds, method='SLSQP', options={'disp': False, 'maxiter': 1000}) if verbose: print('\t\t\t{0}'.format(results.message)) print('\t\t\tFunction evaluations: {0}'.format(results.nfev)) print('\t\t\tGradient evaluations: {0}'.format(results.njev)) print('\t\t\tu: [{0}, {1}]'.format(results.x.min(), results.x.max())) return results.x def _u_lbfgsb(self, x, A, _lambda, verbose, u0=None): '''Solve for u using L-BFGS-B.''' if verbose: print('\t\tSolving u via L-BFGS-B') if u0 is None: u0 = np.zeros(A.shape[1]) # Create our box constraints bounds = [(-_lambda, _lambda) for _ in u0] # Fit results = minimize(_u_objective_func, u0, args=(x, A), method='L-BFGS-B', bounds=bounds, options={'disp': verbose}) return results.x def plateau_regression(self, plateaus, data, grid_map=None, verbose=False): '''Perform unpenalized 1-d regression for each of the plateaus.''' weights = np.zeros(data.shape) for i,(level,p) in enumerate(plateaus): if verbose: print('\tPlateau #{0}'.format(i+1)) # Get the subset of grid points for this plateau if grid_map is not None: plateau_data = np.array([data[grid_map[x,y]] for x,y in p]) else: plateau_data = np.array([data[x,y] for x,y in p]) w = single_plateau_regression(plateau_data, self.signal_dist, self.null_dist) for idx in p: weights[idx if grid_map is None else grid_map[idx[0], idx[1]]] = w posteriors = self._e_step(data, weights) weights = weights.flatten() return (weights, posteriors) def _u_objective_func(u, x, A): return np.linalg.norm(x - A.dot(u))**2 def _u_objective_deriv(u, x, A): return 2*A.T.dot(A.dot(u) - x) def _u_slsqp_constraint_func(idx, _lambda, u): '''Constraint function for the i'th value of u.''' return np.array([_lambda - np.abs(u[idx])]) def _u_slsqp_constraint_deriv(idx, u): jac = np.zeros(len(u)) jac[idx] = -np.sign(u[idx]) return jac def _1d_fused_lasso_crossprod(x): '''Efficiently compute the cross-product D^T x, where D is the first-differences matrix.''' return -np.ediff1d(x, to_begin=x[0], to_end=-x[-1]) def _soft_threshold(x, _lambda): return np.sign(x) * (np.abs(x) - _lambda).clip(0) ## Tri-Diagonal Matrix Algorithm (a.k.a Thomas algorithm) solver ## Source: http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm def tridiagonal_solve(a,b,c,f): alpha = [0] beta = [0] n = len(f) x = [0] * n for i in range(n-1): alpha.append(-b[i]/(a[i]*alpha[i] + c[i])) beta.append((f[i] - a[i]*beta[i])/(a[i]*alpha[i] + c[i])) x[n-1] = (f[n-1] - a[n-2]*beta[n-1])/(c[n-1] + a[n-2]*alpha[n-1]) for i in reversed(range(n-1)): x[i] = alpha[i+1]*x[i+1] + beta[i+1] return np.array(x) def ilogit(x): return 1. / (1. + np.exp(-x)) def calc_plateaus(beta, rel_tol=1e-4, edges=None, verbose=0): '''Calculate the plateaus (degrees of freedom) of a 1d or 2d grid of beta values in linear time.''' to_check = deque(itertools.product(*[range(x) for x in beta.shape])) if edges is None else deque(range(len(beta))) check_map = np.zeros(beta.shape, dtype=bool) check_map[np.isnan(beta)] = True plateaus = [] if verbose: print('\tCalculating plateaus...') if verbose > 1: print('\tIndices to check {0} {1}'.format(len(to_check), check_map.shape)) # Loop until every beta index has been checked while to_check: if verbose > 1: print('\t\tPlateau #{0}'.format(len(plateaus) + 1)) # Get the next unchecked point on the grid idx = to_check.popleft() # If we already have checked this one, just pop it off while to_check and check_map[idx]: try: idx = to_check.popleft() except: break # Edge case -- If we went through all the indices without reaching an unchecked one. if check_map[idx]: break # Create the plateau and calculate the inclusion conditions cur_plateau = set([idx]) cur_unchecked = deque([idx]) val = beta[idx] min_member = val - rel_tol max_member = val + rel_tol # Check every possible boundary of the plateau while cur_unchecked: idx = cur_unchecked.popleft() # neighbors to check local_check = [] # Generic graph case if edges is not None: local_check.extend(edges[idx]) # 1d case -- check left and right elif len(beta.shape) == 1: if idx[0] > 0: local_check.append(idx[0] - 1) # left if idx[0] < beta.shape[0] - 1: local_check.append(idx[0] + 1) # right # 2d case -- check left, right, up, and down elif len(beta.shape) == 2: if idx[0] > 0: local_check.append((idx[0] - 1, idx[1])) # left if idx[0] < beta.shape[0] - 1: local_check.append((idx[0] + 1, idx[1])) # right if idx[1] > 0: local_check.append((idx[0], idx[1] - 1)) # down if idx[1] < beta.shape[1] - 1: local_check.append((idx[0], idx[1] + 1)) # up # Only supports 1d and 2d cases for now else: raise Exception('Degrees of freedom calculation does not currently support more than 2 dimensions unless edges are specified explicitly. ({0} given)'.format(len(beta.shape))) # Check the index's unchecked neighbors for local_idx in local_check: if not check_map[local_idx] \ and beta[local_idx] >= min_member \ and beta[local_idx] <= max_member: # Label this index as being checked so it's not re-checked unnecessarily check_map[local_idx] = True # Add it to the plateau and the list of local unchecked locations cur_unchecked.append(local_idx) cur_plateau.add(local_idx) # Track each plateau's indices plateaus.append((val, cur_plateau)) # Returns the list of plateaus and their values return plateaus def plateau_loss_func(c, data, signal_dist, null_dist): '''The negative log-likelihood function for a plateau.''' return -np.log(c * signal_dist.pdf(data) + (1. - c) * null_dist.pdf(data)).sum() def single_plateau_regression(data, signal_dist, null_dist): '''Perform unpenalized 1-d regression on all of the points in a plateau.''' return minimize_scalar(plateau_loss_func, args=(data, signal_dist, null_dist), bounds=(0,1), method='Bounded').x
2.1875
2
models/model.py
Jungyhuk/plotcoder
10
12793108
<reponame>Jungyhuk/plotcoder import torch import torch.nn as nn from torch.autograd import Variable from torch import cuda import torch.optim as optim from torch.nn.utils import clip_grad_norm import torch.nn.functional as F import numpy as np from .data_utils import data_utils from .modules import mlp class PlotCodeGenerator(nn.Module): def __init__(self, args, word_vocab, code_vocab): super(PlotCodeGenerator, self).__init__() self.cuda_flag = args.cuda self.word_vocab_size = args.word_vocab_size self.code_vocab_size = args.code_vocab_size self.num_plot_types = args.num_plot_types self.word_vocab = word_vocab self.code_vocab = code_vocab self.batch_size = args.batch_size self.embedding_size = args.embedding_size self.LSTM_hidden_size = args.LSTM_hidden_size self.MLP_hidden_size = args.MLP_hidden_size self.num_LSTM_layers = args.num_LSTM_layers self.num_MLP_layers = args.num_MLP_layers self.gradient_clip = args.gradient_clip self.lr = args.lr self.dropout_rate = args.dropout_rate self.nl = args.nl self.use_comments = args.use_comments self.code_context = args.code_context self.hierarchy = args.hierarchy self.copy_mechanism = args.copy_mechanism self.nl_code_linking = args.nl_code_linking self.max_word_len = args.max_word_len self.max_code_context_len = args.max_code_context_len self.max_decode_len = args.max_decode_len self.dropout = nn.Dropout(p=self.dropout_rate) self.word_embedding = nn.Embedding(self.word_vocab_size, self.embedding_size) if self.copy_mechanism: self.code_embedding = nn.Embedding(self.code_vocab_size, self.embedding_size) else: self.code_embedding = nn.Embedding(self.code_vocab_size + self.max_code_context_len, self.embedding_size) self.code_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.copy_predictor = nn.Linear(self.embedding_size, self.code_vocab_size + self.max_code_context_len) self.input_nl_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.input_code_encoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) if self.hierarchy: self.decoder = nn.LSTM(input_size=self.embedding_size * 2, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) else: self.decoder = nn.LSTM(input_size=self.embedding_size, hidden_size=self.LSTM_hidden_size, num_layers=self.num_LSTM_layers, dropout=self.dropout_rate, batch_first=True, bidirectional=True) self.word_attention = nn.Linear(self.LSTM_hidden_size * 2, self.LSTM_hidden_size * 2) if not self.nl_code_linking: self.code_ctx_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) else: self.code_ctx_word_linear = nn.Linear(self.LSTM_hidden_size * 4 + self.embedding_size, self.embedding_size) self.code_word_linear = nn.Linear(self.LSTM_hidden_size * 2 + self.embedding_size, self.embedding_size) self.encoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_code_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.decoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.encoder_copy_attention_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) self.target_embedding_linear = nn.Linear(self.LSTM_hidden_size * 2, self.embedding_size) # training self.loss = nn.CrossEntropyLoss() if args.optimizer == 'adam': self.optimizer = optim.Adam(self.parameters(), lr=self.lr) elif args.optimizer == 'sgd': self.optimizer = optim.SGD(self.parameters(), lr=self.lr) elif args.optimizer == 'rmsprop': self.optimizer = optim.RMSprop(self.parameters(), lr=self.lr) else: raise ValueError('optimizer undefined: ', args.optimizer) def init_weights(self, param_init): for param in self.parameters(): nn.init.uniform_(param, -param_init, param_init) def lr_decay(self, lr_decay_rate): self.lr *= lr_decay_rate for param_group in self.optimizer.param_groups: param_group['lr'] = self.lr def train_step(self): if self.gradient_clip > 0: clip_grad_norm(self.parameters(), self.gradient_clip) self.optimizer.step() def forward(self, batch_input, batch_labels, eval_flag=False): batch_size = batch_labels.size()[0] batch_init_data = batch_input['init_data'] batch_nl_input = batch_input['nl'] batch_nl_embedding = self.word_embedding(batch_nl_input) encoder_word_mask = (batch_nl_input == data_utils.PAD_ID).float() encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.UNK_ID).float()) encoder_word_mask = torch.max(encoder_word_mask, (batch_nl_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_word_mask = encoder_word_mask.cuda() nl_encoder_output, nl_hidden_state = self.input_nl_encoder(batch_nl_embedding) decoder_hidden_state = nl_hidden_state batch_code_context_input = batch_input['code_context'] batch_code_context_embedding = self.code_embedding(batch_code_context_input) batch_code_nl_embedding = [] batch_input_code_nl_indices = batch_input['input_code_nl_indices'] max_code_len = batch_code_context_input.size()[1] max_word_len = batch_nl_input.size()[1] if self.nl_code_linking: for batch_idx in range(batch_size): input_code_nl_indices = batch_input_code_nl_indices[batch_idx, :, :] cur_code_nl_embedding_0 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 0]] cur_code_nl_embedding_1 = nl_encoder_output[batch_idx, input_code_nl_indices[:, 1]] cur_code_nl_embedding = cur_code_nl_embedding_0 + cur_code_nl_embedding_1 batch_code_nl_embedding.append(cur_code_nl_embedding) batch_code_nl_embedding = torch.stack(batch_code_nl_embedding, dim=0) code_encoder_input = torch.cat([batch_code_context_embedding, batch_code_nl_embedding], dim=-1) code_encoder_input = self.code_word_linear(code_encoder_input) else: code_encoder_input = batch_code_context_embedding encoder_code_mask = (batch_code_context_input == data_utils.PAD_ID).float() encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.UNK_ID).float()) encoder_code_mask = torch.max(encoder_code_mask, (batch_code_context_input == data_utils.EOS_ID).float()) if self.cuda_flag: encoder_code_mask = encoder_code_mask.cuda() code_encoder_output, code_hidden_state = self.input_code_encoder(code_encoder_input) decoder_hidden_state = code_hidden_state gt_output = batch_input['gt'] target_code_output = batch_input['code_output'] target_df_output = batch_input['df_output'] target_var_output = batch_input['var_output'] target_str_output = batch_input['str_output'] code_output_mask = batch_input['code_output_mask'] output_df_mask = batch_input['output_df_mask'] output_var_mask = batch_input['output_var_mask'] output_str_mask = batch_input['output_str_mask'] gt_decode_length = target_code_output.size()[1] if not eval_flag: decode_length = gt_decode_length else: decode_length = self.max_decode_len decoder_input_sketch = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input_sketch = decoder_input_sketch.cuda() decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input = torch.ones(batch_size, 1, dtype=torch.int64) * data_utils.GO_ID if self.cuda_flag: decoder_input = decoder_input.cuda() decoder_input_embedding = self.code_embedding(decoder_input) finished = torch.zeros(batch_size, 1, dtype=torch.int64) max_code_mask_len = code_output_mask.size()[1] pad_mask = torch.zeros(max_code_mask_len) pad_mask[data_utils.PAD_ID] = 1e9 pad_mask = torch.stack([pad_mask] * batch_size, dim=0) if self.cuda_flag: finished = finished.cuda() pad_mask = pad_mask.cuda() batch_code_output_indices = data_utils.np_to_tensor(np.array(list(range(self.code_vocab_size))), 'int', self.cuda_flag) batch_code_output_embedding = self.code_embedding(batch_code_output_indices) batch_code_output_embedding = torch.stack([batch_code_output_embedding] * batch_size, dim=0) batch_output_code_ctx_embedding = [] batch_output_code_ctx_indices = batch_input['output_code_ctx_indices'] for batch_idx in range(batch_size): output_code_ctx_indices = batch_output_code_ctx_indices[batch_idx] cur_output_code_ctx_embedding = code_encoder_output[batch_idx, output_code_ctx_indices] batch_output_code_ctx_embedding.append(cur_output_code_ctx_embedding) batch_output_code_ctx_embedding = torch.stack(batch_output_code_ctx_embedding, dim=0) if self.nl_code_linking: batch_output_code_nl_embedding = [] batch_output_code_nl_indices = batch_input['output_code_nl_indices'] for batch_idx in range(batch_size): output_code_nl_indices = batch_output_code_nl_indices[batch_idx, :, :] cur_output_code_nl_embedding_0 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 0]] cur_output_code_nl_embedding_1 = nl_encoder_output[batch_idx, output_code_nl_indices[:, 1]] cur_output_code_nl_embedding = cur_output_code_nl_embedding_0 + cur_output_code_nl_embedding_1 batch_output_code_nl_embedding.append(cur_output_code_nl_embedding) batch_output_code_nl_embedding = torch.stack(batch_output_code_nl_embedding, dim=0) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding, batch_output_code_nl_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_word_linear(batch_code_output_embedding) else: batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_output_code_ctx_embedding], dim=-1) batch_code_output_embedding = self.code_ctx_linear(batch_code_output_embedding) if self.code_context: batch_code_output_context_embedding = [] for batch_idx in range(batch_size): output_code_indices = batch_init_data[batch_idx]['output_code_indices'] cur_code_output_context_embedding = [] for code_idx in output_code_indices: cur_code_output_context_embedding.append(code_encoder_output[batch_idx, code_idx, :]) if len(cur_code_output_context_embedding) < max_code_mask_len - self.code_vocab_size: cur_code_output_context_embedding += [data_utils.np_to_tensor(np.zeros(self.LSTM_hidden_size * 2), 'float', self.cuda_flag)] * (max_code_mask_len - self.code_vocab_size - len(cur_code_output_context_embedding)) cur_code_output_context_embedding = torch.stack(cur_code_output_context_embedding, dim=0) batch_code_output_context_embedding.append(cur_code_output_context_embedding) batch_code_output_context_embedding = torch.stack(batch_code_output_context_embedding, dim=0) batch_code_output_context_embedding = self.target_embedding_linear(batch_code_output_context_embedding) batch_code_output_embedding = torch.cat([batch_code_output_embedding, batch_code_output_context_embedding], dim=1) code_pred_logits = [] code_predictions = [] df_pred_logits = [] df_predictions = [] var_pred_logits = [] var_predictions = [] str_pred_logits = [] str_predictions = [] predictions = [] for step in range(decode_length): if self.hierarchy: decoder_output, decoder_hidden_state = self.decoder( torch.cat([decoder_input_sketch_embedding, decoder_input_embedding], dim=-1), decoder_hidden_state) else: decoder_output, decoder_hidden_state = self.decoder(decoder_input_embedding, decoder_hidden_state) decoder_output = decoder_output.squeeze(1) decoder_nl_attention = self.word_attention(decoder_output) attention_logits = torch.bmm(nl_encoder_output, decoder_nl_attention.unsqueeze(2)) attention_logits = attention_logits.squeeze(-1) attention_logits = attention_logits - encoder_word_mask * 1e9 attention_weights = nn.Softmax(dim=-1)(attention_logits) attention_weights = self.dropout(attention_weights) nl_attention_vector = torch.bmm(torch.transpose(nl_encoder_output, 1, 2), attention_weights.unsqueeze(2)) nl_attention_vector = nl_attention_vector.squeeze(-1) input_code_encoding = self.encoder_code_attention_linear(nl_attention_vector) if self.hierarchy: input_copy_encoding = self.encoder_copy_attention_linear(nl_attention_vector) decoder_code_output = self.decoder_code_attention_linear(decoder_output) if self.hierarchy: decoder_copy_output = self.decoder_copy_attention_linear(decoder_output) decoder_code_output = decoder_code_output + input_code_encoding if self.hierarchy: decoder_copy_output = decoder_copy_output + input_copy_encoding if self.copy_mechanism: cur_code_pred_logits = torch.bmm(batch_code_output_embedding, decoder_code_output.unsqueeze(2)) cur_code_pred_logits = cur_code_pred_logits.squeeze(-1) else: cur_code_pred_logits = self.code_predictor(decoder_code_output) cur_code_pred_logits = cur_code_pred_logits + finished.float() * pad_mask cur_code_pred_logits = cur_code_pred_logits - (1.0 - code_output_mask) * 1e9 cur_code_predictions = cur_code_pred_logits.max(1)[1] if eval_flag: sketch_predictions = cur_code_predictions else: sketch_predictions = target_code_output[:, step] if self.hierarchy: if self.copy_mechanism: cur_copy_pred_logits = torch.bmm(batch_code_output_embedding, decoder_copy_output.unsqueeze(2)) cur_copy_pred_logits = cur_copy_pred_logits.squeeze(-1) else: cur_copy_pred_logits = self.copy_predictor(decoder_copy_output) cur_df_pred_logits = cur_copy_pred_logits - (1.0 - output_df_mask) * 1e9 cur_df_predictions = cur_df_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.DF_ID).long()) cur_var_pred_logits = cur_copy_pred_logits - (1.0 - output_var_mask) * 1e9 cur_var_predictions = cur_var_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.VAR_ID).long()) cur_str_pred_logits = cur_copy_pred_logits - (1.0 - output_str_mask) * 1e9 cur_str_predictions = cur_str_pred_logits.max(1)[1] * ((sketch_predictions == data_utils.STR_ID).long()) if eval_flag: decoder_input_sketch = cur_code_predictions decoder_input = cur_code_predictions if self.hierarchy: decoder_input = torch.max(decoder_input, cur_df_predictions) decoder_input = torch.max(decoder_input, cur_var_predictions) decoder_input = torch.max(decoder_input, cur_str_predictions) else: decoder_input_sketch = target_code_output[:, step] decoder_input = gt_output[:, step] if self.copy_mechanism: decoder_input_sketch_embedding = [] for batch_idx in range(batch_size): decoder_input_sketch_embedding.append(batch_code_output_embedding[batch_idx, decoder_input_sketch[batch_idx], :]) decoder_input_sketch_embedding = torch.stack(decoder_input_sketch_embedding, dim=0) decoder_input_embedding = [] for batch_idx in range(batch_size): decoder_input_embedding.append(batch_code_output_embedding[batch_idx, decoder_input[batch_idx], :]) decoder_input_embedding = torch.stack(decoder_input_embedding, dim=0) else: decoder_input_sketch_embedding = self.code_embedding(decoder_input_sketch) decoder_input_embedding = self.code_embedding(decoder_input) decoder_input_sketch_embedding = decoder_input_sketch_embedding.unsqueeze(1) decoder_input_embedding = decoder_input_embedding.unsqueeze(1) if step < gt_decode_length: code_pred_logits.append(cur_code_pred_logits) code_predictions.append(cur_code_predictions) cur_predictions = cur_code_predictions if self.hierarchy: if step < gt_decode_length: df_pred_logits.append(cur_df_pred_logits) var_pred_logits.append(cur_var_pred_logits) str_pred_logits.append(cur_str_pred_logits) df_predictions.append(cur_df_predictions) var_predictions.append(cur_var_predictions) str_predictions.append(cur_str_predictions) cur_predictions = torch.max(cur_predictions, cur_df_predictions) cur_predictions = torch.max(cur_predictions, cur_var_predictions) cur_predictions = torch.max(cur_predictions, cur_str_predictions) predictions.append(cur_predictions) cur_finished = (decoder_input == data_utils.EOS_ID).long().unsqueeze(1) finished = torch.max(finished, cur_finished) if torch.sum(finished) == batch_size and step >= gt_decode_length - 1: break total_loss = 0.0 code_pred_logits = torch.stack(code_pred_logits, dim=0) code_pred_logits = code_pred_logits.permute(1, 2, 0) code_predictions = torch.stack(code_predictions, dim=0) code_predictions = code_predictions.permute(1, 0) total_loss += F.cross_entropy(code_pred_logits, target_code_output, ignore_index=data_utils.PAD_ID) if self.hierarchy: df_pred_logits = torch.stack(df_pred_logits, dim=0) df_pred_logits = df_pred_logits.permute(1, 2, 0) df_predictions = torch.stack(df_predictions, dim=0) df_predictions = df_predictions.permute(1, 0) df_loss = F.cross_entropy(df_pred_logits, target_df_output, ignore_index=-1) var_pred_logits = torch.stack(var_pred_logits, dim=0) var_pred_logits = var_pred_logits.permute(1, 2, 0) var_predictions = torch.stack(var_predictions, dim=0) var_predictions = var_predictions.permute(1, 0) var_loss = F.cross_entropy(var_pred_logits, target_var_output, ignore_index=-1) str_pred_logits = torch.stack(str_pred_logits, dim=0) str_pred_logits = str_pred_logits.permute(1, 2, 0) str_predictions = torch.stack(str_predictions, dim=0) str_predictions = str_predictions.permute(1, 0) str_loss = F.cross_entropy(str_pred_logits, target_str_output, ignore_index=-1) total_loss += (df_loss + var_loss + str_loss) / 3.0 predictions = torch.stack(predictions, dim=0) predictions = predictions.permute(1, 0) return total_loss, code_pred_logits, predictions
2.125
2
src/ribo_api/models/user_activity_log.py
RinPham/RiBo-Core
0
12793109
import json from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from ribo_api.models.usertypes import TinyIntegerField from .usertypes import NormalTextField class UserActivityLog(models.Model): id = models.AutoField(primary_key=True) user = models.ForeignKey(settings.AUTH_USER_MODEL) ip = models.GenericIPAddressField() action = models.CharField(_('Action'), max_length=6) status = models.SmallIntegerField(_('Request status code'), default=200) url = models.CharField(_('Url'), max_length=2000, default='') meta = NormalTextField(_('Meta data'), default='{}') created_at = models.DateTimeField(default=timezone.now) latest_at = models.DateTimeField(default=timezone.now) device_type = TinyIntegerField(default=0) @property def meta_json(self): if self.meta: return json.loads(self.meta) return {} class Meta: verbose_name = _('activity_log') verbose_name_plural = _('activity_logs') db_table = 'ribo_user_activity_logs'
2.03125
2
app.py
jimzers/annotate-mock
0
12793110
<reponame>jimzers/annotate-mock from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:pAASSSJFLFDSDF@localhost/annotate_mock_db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) class User(db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(80), unique=True) user_library = db.Column(db.String(65535)) follow_list = db.Column(db.String(65535)) def __init__(self, username, email): self.username = username self.email = email class Book(db.Model): __tablename__ = "books" id = db.Column(db.Integer, primary_key=True) text = db.Column(db.TEXT(65535)) chapters = db.Column(db.Integer) has_access = db.Column(db.String(65535)) class Annotation(db.Model): __tablename__ = "annotations" id = db.Column(db.Integer, primary_key=True) book_id = db.Column(db.Integer, db.ForeignKey(Book.id), primary_true=True) text range_ @app.route('/') def hello_world(): return 'Hello World!' if __name__ == '__main__': app.run()
2.8125
3
src/day6/day6_part1.py
LotusEcho/AdventOfCode2018
0
12793111
import os import re from collections import defaultdict input_file = open(os.path.join(os.path.dirname(__file__), 'day6_input.txt'), 'r') min_x = -1 min_y = -1 max_x = -1 max_y = -1 points = [] for line in input_file: matcher = re.match("(\d+),\s(\d+)", line) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) if x > max_x: max_x = x if y > max_y: max_y = y if min_x == -1 or min_y == -1: min_x = x min_y = y else: if x < min_x: min_x = x if y < min_y: min_y = y point_string = f"{y},{x}" points.append(point_string) def calculate_distances(minx, maxx, miny, maxy): distances = defaultdict(lambda:1) for x_point in range(minx, maxx+1): for y_point in range(miny, maxy+1): point_string = f"{y_point},{x_point}" if point_string not in points: min_point_value = 100 min_point = None for point in points: matcher = re.match("(\d+),(\d+)", point) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x - x_point) + abs(y - y_point) if current_point_value < min_point_value: min_point_value = current_point_value min_point = point else: raise ValueError(f"Formatting was wrong for {point}") for point in points: matcher = re.match("(\d+),(\d+)", point) if matcher is not None: y = int(matcher.group(1)) x = int(matcher.group(2)) current_point_value = abs(x_point - x) + abs(y_point -y) if point != min_point and current_point_value == min_point_value: min_point = None else: raise ValueError(f"Formatting was wrong for {point}") if min_point is not None: distances[min_point] += 1 return distances print(f"Grid dimensions: {min_x}, {min_y} to {max_x}, {max_y}") max_point_area = 0 max_point = None orig_distances = calculate_distances(min_x, max_x, min_y, max_y) bigger_distances = calculate_distances(min_x -1, max_x +1, min_y -1, max_y+1) distances = dict() for distance_key in orig_distances: if orig_distances[distance_key] == bigger_distances[distance_key]: distances[distance_key] = orig_distances[distance_key] for point in distances.keys(): if distances[point] > max_point_area: max_point = point max_point_area = distances[point] print(f"{point} = {distances[point]}") print(f"Max point is {max_point} with distance {max_point_area}")
3.203125
3
exercises/CursoemVideo/ex104.py
arthurguerra/cursoemvideo-python
0
12793112
def leiaInt(num): while True: teste = input(num) if teste.isnumeric(): teste = int(teste) return teste break else: print('\033[0;31mERRO! digite um número inteiro válido.\033[m') # PROGRAMA PRINCIPAL n = leiaInt('Digite umm número: ') print(f'Você acabou de digitar o número {n}')
3.875
4
booking_app/views.py
alikhalilica/cilinic-temp
0
12793113
<filename>booking_app/views.py from django.http import HttpResponse,HttpResponseRedirect from . models import HelthDepartment , Patient from . forms import PatientForm from django.shortcuts import render,redirect,get_object_or_404 #import requests # Create your views here. # Retrieve def index(request): #categories = Category.objects.all() helthDepartments=HelthDepartment.objects.all() patients = Patient.objects.all() form = PatientForm() context = {"form":form} return render(request, "booking_app/index.html",context) # Delete def delete(request,id): patient = Patient.objects.get(id=id) patient.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER')) # update def edit(request,id): patients = Patient.objects.all() #selected_patient = patients.objects.get(id=id) selected_patient = get_objects_or_404(Patient,id=id) if request.method == "GET": form = PatientForm(instance=selected_patient) context = {"patients":patients,"form":form ,"selected_patient":selected_patient} return render(request,"edit.html",context) if request.method == "POST": form = PatientForm(request.POST) if form.is_valid: #print(form.__dict__) selected_patient.name = form.data["name"] selected_patient.contact = form.data["contact"] selected_patient.email = form.data["email"] selected_patient. booking_date = form.data[" booking_date"] selected_patient.appointment_date = form.data["appointment_date"] selected_patient.helth_department = form.data["helth_department"] selected_patient.history = form.data["historyl"] selected_patient.save() return redirect("/") # Insert def add_(request): if request.method=="POST": form = PatientForm(data=request.POST) if form.is_valid(): form.save() return render (request, "website/index.html")
2.28125
2
phrasebook/views/general.py
DanCatchpole/phrasebook-django
1
12793114
<filename>phrasebook/views/general.py<gh_stars>1-10 from django.contrib.auth.decorators import login_required from django.http import JsonResponse from django.shortcuts import render, redirect from django.utils.decorators import decorator_from_middleware from phrasebook.contexts import get_sidebar_args from phrasebook.middleware import FirstLoginMiddleware from phrasebook.models import * def index(request): return render(request, 'phrasebook/index.html') @login_required() @decorator_from_middleware(FirstLoginMiddleware) def app(request): words = Word.objects.filter(category__user=request.user).order_by("-created_on")[:10] return render(request, 'phrasebook/app.html', context=get_sidebar_args(request, {"words": words, "words__len": words.__len__(), "page_app": "active"})) @login_required() def first_login(request): user_languages = UserLanguage.objects.filter(user=request.user) if user_languages.__len__() > 0: return redirect('phrasebook:app') else: languages = list(Language.objects.all().order_by('english_name')) return render(request, 'phrasebook/firstlogin.html', context={'languages': languages}) @login_required() def lang_pick(request): return render(request, 'phrasebook/changelanguage.html', context=get_sidebar_args(request, {})) @login_required() def profile(request): ctx = get_sidebar_args(request, {}) ctx.update({'extra_langs': ctx['languages_len'] - 5, 'last_name': request.user.last_name}) return render(request, 'phrasebook/profile.html', context=ctx)
2.109375
2
ventanas_clases/bienvenidos.py
SilviaSanjose/Music_Project_DB_PyQt5
0
12793115
<filename>ventanas_clases/bienvenidos.py from ventanas.ventana_bienvenida import Ui_music_window from PyQt5.Qt import QTime, QDate class VentanaMusical(Ui_music_window): def __init__(self, my_win, registro, listado, tabla, usuarios): self.registro = registro self.listado = listado self.tabla = tabla self.usuarios = usuarios self.my_win = my_win def setupUi(self, my_win): super().setupUi(my_win) self.sub_menu_inicio.aboutToShow.connect(self.volver_incio) self.sub_menu_registro.triggered.connect(self.mostrar_registrar_cancion) self.sub_meni_listado_canciones.triggered.connect(self.mostrar_listado_canciones) self.sub_menu_tabla_Canciones.triggered.connect(self.mostrar_tabla_canciones) self.sub_menu_registro_usuario.triggered.connect(self.registrar_usuario) #incluir fecha date = QDate.currentDate() date = str(date.toPyDate()) self.txt_fecha.setText(date) #incluir hora hora = QTime.currentTime() hora_txt = hora.toString("hh:mm") self.lcdNumber.display(hora_txt) def mostrar_registrar_cancion(self): self.registro.setupUi(self.my_win) def mostrar_listado_canciones(self): self.listado.setupUi(self.my_win) def mostrar_tabla_canciones(self): self.tabla.setupUi(self.my_win) def volver_incio(self): self.setupUi(self.my_win) def registrar_usuario(self): self.usuarios.setupUi(self.my_win)
2.5
2
qtgallery/qtscraper.py
ixjlyons/qtgallery
0
12793116
<filename>qtgallery/qtscraper.py from sphinx_gallery import scrapers from qtpy.QtWidgets import QApplication __all__ = ['qtscraper', 'reset_qapp'] def qtscraper(block, block_vars, gallery_conf): """Basic implementation of a Qt window scraper. Looks for any non-hidden windows in the current application instance and uses ``grab`` to render an image of the window. The window is closed afterward, so you have to call ``show()`` again to render it in a subsequent cell. ``processEvents`` is called once in case events still need to propagate. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. """ imgpath_iter = block_vars['image_path_iterator'] app = QApplication.instance() if app is None: app = QApplication([]) app.processEvents() # get top-level widgets that aren't hidden widgets = [w for w in app.topLevelWidgets() if not w.isHidden()] rendered_imgs = [] for widg, imgpath in zip(widgets, imgpath_iter): pixmap = widg.grab() pixmap.save(imgpath) rendered_imgs.append(imgpath) widg.close() return scrapers.figure_rst(rendered_imgs, gallery_conf['src_dir']) def reset_qapp(gallery_conf, fname): """Shutdown an existing QApplication and disable ``exec_``. Disabling ``QApplication.exec_`` means your example scripts can run the Qt event loop (so the scripts work when run normally) without blocking example execution by sphinx-gallery. Intended for use in ``image_scrappers`` in the sphinx-gallery configuration. """ try: # pyside-specific if qApp: qApp.shutdown() except NameError: pass QApplication.exec_ = lambda _: None
2.390625
2
dimred/models/auto/trainer.py
kulkarnisp/dim-red
1
12793117
<filename>dimred/models/auto/trainer.py import numpy as np import matplotlib.pyplot as plt import torch from torch import nn from torch.autograd import Variable # from model import AutoEncoder from utils import rolling_window class Trainer(): def __init__(self,netr,x=None,time_window=10, lr=0.01): self.tw = time_window self.x = self.process(x,time_window) self.net = netr # AutoEncoder(time_window) self.criterion = nn.MSELoss() self.optimizer = torch.optim.Adam(self.net.parameters(), lr=0.01) def process(self,x,time_window): xtrain = rolling_window(x,time_window) xtrain = xtrain.reshape(-1,1,time_window) return torch.from_numpy(xtrain).float() def train(self,Nmax=500): self.net_loss=[] for e in range(Nmax): var_x = Variable(self.x) out = self.net(var_x) loss = self.criterion(out, var_x) self.optimizer.zero_grad() loss.backward() self.optimizer.step() self.net_loss.append(loss.item()) if (e + 1) % 50 == 0: print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) def eval(self,x=0): net = self.net.eval() trainx = self.x if type(x)!=int: trainx = self.process(x,self.tw) xans = net(trainx).detach().cpu().numpy() return xans[:,0,1] def plott(self): self.xi = self.x.detach().numpy()[:,0,0] self.xo = self.eval() plt.plot(xi,label="Input") plt.plot(xo,label="Reconstruction") plt.title("Train data reconstruction") plt.xlabel("") plt.show() def plote(self): # n = plt.plot((self.xi-self.xe)**2,label="Error") plt.title("Reconstruction Error") plt.show()
2.640625
3
news/api.py
nicbou/markdown-notes
121
12793118
<reponame>nicbou/markdown-notes from django.conf.urls import url from django.http import HttpResponse from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from tastypie.http import HttpForbidden from tastypie.resources import ModelResource from news.models import News class NewsResource(ModelResource): """ Get and update user profile, also serves as login route for retrieving the ApiKey. This resource doesn't have any listing route, the root route /user/ is redirected to retrieving the authenticated user's data. """ class Meta: authentication = ApiKeyAuthentication() authorization = Authorization() list_allowed_methods = ['get'] detail_allowed_methods = ['patch'] always_return_data = True include_resource_uri = False queryset = News.objects.all() fields = ['id', 'title', 'content', 'news_date'] def prepend_urls(self): return [ url(r"^(?P<resource_name>%s)/(?P<pk>.*?)/read/$" % self._meta.resource_name, self.wrap_view('mark_news_read'), name="api_mark_news_read"), ] def get_object_list(self, request): return super(NewsResource, self).get_object_list(request).exclude(user=request.user) def mark_news_read(self, request, **kwargs): """ Special view which enables to override the root route /user/ for accessing the data of currently authenticated user and not the listing of all users. :param request: :param kwargs: :return: """ self.method_check(request, allowed=['patch']) self.is_authenticated(request) user = getattr(request, 'user', None) if not user or user.is_anonymous(): return HttpForbidden() News.objects.get(pk=int(kwargs['pk'])).user.add(user) return HttpResponse(status=200)
2.5
2
SHIMON/api/api_calls.py
dosisod/SHIMON
0
12793119
<filename>SHIMON/api/api_calls.py<gh_stars>0 from SHIMON.api.external import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent(), ApiAllfor(), ApiAddFriend(), ]
1.265625
1
tests/r/test_sparrows.py
hajime9652/observations
199
12793120
from __future__ import absolute_import from __future__ import division from __future__ import print_function import shutil import sys import tempfile from observations.r.sparrows import sparrows def test_sparrows(): """Test module sparrows.py by downloading sparrows.csv and testing shape of extracted data has 116 rows and 3 columns """ test_path = tempfile.mkdtemp() x_train, metadata = sparrows(test_path) try: assert x_train.shape == (116, 3) except: shutil.rmtree(test_path) raise()
2.40625
2
utils/admin.py
drx/archfinch
1
12793121
<gh_stars>1-10 from django.contrib import admin def raw_id_fields_admin(*args): fields = args class cls(admin.ModelAdmin): raw_id_fields = fields return cls
1.90625
2
w3copentracing/text_propagator.py
eyjohn/w3copentracing-python
0
12793122
<filename>w3copentracing/text_propagator.py from opentracing import SpanContextCorruptedException from .context import SpanContext class InvalidSpanContextException(Exception): """InvalidSpanContextException is used when the provided span context instance does is not a W3C span context.""" pass class TextPropagator(object): """A W3C Trace Context compatible Propagator for Format.TEXT_MAP.""" def inject(self, span_context, carrier): if span_context is None: return if not isinstance(span_context, SpanContext): raise InvalidSpanContextException() carrier["trace-parent"] = ("00-" + span_context.trace_id.hex().upper() + "-" + span_context.span_id.hex().upper() + ("-01" if span_context.sampled else "-00")) if span_context.baggage is not None: carrier["trace-state"] = ",".join(k+"=" + v for (k, v) in span_context.baggage.items()) def extract(self, carrier): trace_parent = carrier.get("trace-parent", None) if trace_parent is None: return None try: (version, trace_id, span_id, sampled) = trace_parent.split("-") trace_id = bytes.fromhex(trace_id) span_id = bytes.fromhex(span_id) if version != "00" or sampled not in ("00", "01") or len(trace_id) != 16 or len(span_id) != 8: raise SpanContextCorruptedException() sampled = (sampled == "01") baggage = None trace_state = carrier.get("trace-state", None) if trace_state is not None: baggage = dict((x.strip() for x in p.split("=")) for p in trace_state.split(",")) return SpanContext(trace_id, span_id, sampled, baggage) except ValueError: raise SpanContextCorruptedException()
2.203125
2
yyxutils/__init__.py
yanyuxiangToday/yyxutils
0
12793123
<reponame>yanyuxiangToday/yyxutils # -*- coding: utf-8 -*- ''' @Time : 2021/08/08 @Author : Yanyuxiang @Email : <EMAIL> @FileName: __init__.py @Software: PyCharm ''' from . import list from . import video from . import file from .utils import *
1.078125
1
autonetkit/design/validation/validation.py
ZhiHaoi/autonetkit
91
12793124
<filename>autonetkit/design/validation/validation.py import logging from autonetkit.design.utils import filters from autonetkit.design.utils.graph_utils import connected_components from autonetkit.network_model.topology import Topology from autonetkit.network_model.types import DeviceType logger = logging.getLogger(__name__) def check_layer2_conn(topology: Topology) -> bool: """ @param topology: @return: """ valid = True components = connected_components(topology) if len(components) > 1: logger.warning("Disconnected network: %s components", len(components)) valid = False routers = filters.routers(topology) routers_present = len(routers) > 0 if routers_present: # check that all hosts connect to a router hosts = filters.hosts(topology) for host in hosts: peers = host.peer_nodes() if not any(n.type == DeviceType.ROUTER for n in peers): logger.warning("Host %s is not connected to a router", host.label) valid = False return valid
2.578125
3
tests/data/sample/__init__.py
SD2E/python-datacatalog
0
12793125
CREATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ] CLEAN = [ ('expt41', {'sample_id': 'sample1.lab.experiment.lab.4', 'control_type': 'HIGH_FITC', 'standard_type':'BEAD_FLUORESCENCE', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '1dcbf4fc-63f0-403d-9a3b-a4d9838b00b0'), ('expt42', {'sample_id': 'sample2.lab.experiment.lab.4', 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '596521f3-a72c-4b05-a315-34b51cde25de'), ('expt51', {'sample_id': 'sample1.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '22d9781d-3a53-4258-8d0b-948c28fc02f6'), ('expt52', {'sample_id': 'sample2.lab.experiment.lab.5', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '84ce0cfb-5240-4e4a-9642-0d37df354337') ] DELETES = CREATES UPDATES = [ ('expt1', {'sample_id': 'biofab.sample.900000', 'replicate': 0, 'child_of': ['102edd93-29d6-5483-b60b-8dfd4d094b9c']}, '103dfcc6-7dd8-54a1-b5d7-c36129511173'), ('expt2', {'sample_id': 'ginkgo.sample.ABCDEFG', 'control_type': 'BASELINE', 'child_of': ['102d8a08-034d-5c27-8d9c-a24bfcc94858']}, '1031e39a-ac24-57b6-8fb3-b8fb65708654'), ('expt3', {'sample_id': 'tacc.sample.8675309', 'replicate': 3, 'child_of': ['1027aa77-d524-5359-a802-a8008adaecb5']}, '103b4050-f7dc-5680-8445-cd14e092445a') ]
1.078125
1
scripts/lessons/Lesson11.py
njfritter/ml-mastery
7
12793126
#!/usr/bin/env python3 ################################### # Mastering ML Python Mini Course # # Inspired by the project here: # # https://s3.amazonaws.com/MLMastery/machine_learning_mastery_with_python_mini_course.pdf?__s=mxhvphowryg2sfmzus2q # # By <NAME> # # Project will soon be found at: # # https://www.inertia7.com/projects/ #################################### # Welcome to my repo for the Mastering Machine Learning Python Mini Course # Here I will be going through each part of the course # So you can get a feel of the different parts import numpy as np import pandas as pd from pandas import read_csv, Series from sklearn.linear_model import LogisticRegression from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.ensemble import GradientBoostingClassifier, VotingClassifier from sklearn.model_selection import cross_val_score, KFold, train_test_split # Define url and columns url = 'https://goo.gl/bDdBiA' columns = np.array(['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']) # Read in data data = read_csv(url, names = columns) array = data.values # Divide data into attributes and predictor X = array[:, 0:8] y = array[:, 8] #################################### # Lesson 11: Improve Accuracy with Ensemble Methods #################################### ''' Here in the course would have been a section to do some ensemble model training, as it represents an extra layer on top of traditional models But since I have already done this, I will instead invoke the one ensemble method I haven't tried: The Voting Classifier This method involves literally combining different models (such as Logsitic Regression + Decision Tree) versus many of the same models (many Decision Trees in a Random Forest or Gradient Boosted Machine) Here I will try out a bunch of different things and see where it goes! Will use cross validation metrics here, nothing too fancy ''' # Make list for models models = np.empty([3, 2], dtype = object) # Voting ensembles # Number 1: Hard Vote (Predicted class labels used for majority rule voting) models[0] = ['Voting Classifier 1', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'hard')] # Number 2: Soft Vote (Argmax of sums of predicted probabilities used) # Recommended for ensemble of well-calibrated classifiers models[1] = ['Voting Classifier 2', VotingClassifier(estimators = [ ('lda', LinearDiscriminantAnalysis()), ('lr', LogisticRegression(random_state = 1))], voting = 'soft')] # Number 3: Soft Vote with weights # Some models will be more valuable than others models[2] = ['Voting Classifier 3', VotingClassifier(estimators = [ ('lr', LogisticRegression(random_state = 1)), ('gbm', GradientBoostingClassifier(random_state = 1)),], voting = 'soft', weights = (0.25, 0.75))] # Iterate through models, then fit & evaluate for name, model in models: k_fold = KFold(n_splits = 10, random_state = 1) for scoring in ('accuracy', 'roc_auc', 'neg_log_loss'): try: result = cross_val_score(model, X, y, cv = k_fold, scoring = scoring) if scoring == 'accuracy': print("\n%s of %s model:\n %.3f%% (+\-%.3f%%)" % (scoring, name, result.mean() * 100.0, result.std() * 100.0)) else: print("\n%s of %s model:\n %.3f (+\-%.3f)" % (scoring, name, result.mean(), result.std())) except AttributeError: print("The %s model cannot perform cross validation with the %s metric" % (name, scoring))
2.96875
3
legacy/regenesis/soap.py
crijke/regenesis
16
12793127
from threading import local from suds.client import * from suds import * import config from mongo import cache_result, load_cached, EMPTY from util import serialize_soap def get_test_service(): d = local() if d.__dict__.get('test') is None: d.__dict__['test'] = Client(config.get('test_service')) return d.__dict__.get('test') def get_recherche_service(): d = local() if d.__dict__.get('recherche') is None: d.__dict__['recherche'] = Client(config.get('recherche_service')) return d.__dict__.get('recherche') def get_download_service(): d = local() if d.__dict__.get('download') is None: d.__dict__['download'] = Client(config.get('download_service'), retxml=True) return d.__dict__.get('download') def get_export_service(): d = local() if d.__dict__.get('export') is None: d.__dict__['export'] = Client(config.get('export_service')) return d.__dict__.get('export') def get_statistic(stat_id): statistic = load_cached(stat_id=stat_id, _type='statistic') if not statistic: service = get_recherche_service().service data = service.StatistikKatalog(config.get('user'), config.get('password'), stat_id, "", "100", config.get('lang')) #print data if data.objektKatalogEintraege: statistic = serialize_soap(data.objektKatalogEintraege[0]) cache_result(statistic, stat_id=stat_id, _type='statistic') elif statistic == EMPTY: return None return statistic def get_table_meta(table_id): table = load_cached(table_id=table_id, _type='table_meta') if not table: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), table_id, "", "100", config.get('lang')) #print data if data.objektKatalogEintraege: table = serialize_soap(data.objektKatalogEintraege[0]) cache_result(table, table_id=table_id, _type='table_meta') elif table == EMPTY: return None return table def get_table(table_id, transposed=False, format="csv", from_year=1800, to_year=2100): table = load_cached(table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') if not table: service = get_download_service().service try: #table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, "All", # format, False, transposed, str(from_year), str(to_year), # "*", "*", "*", "*", "*", "*", "*", "*", "*", False, "", # config.get('lang')) table = service.TabellenDownload(config.get('user'), config.get('password'), table_id, "Alle", format, False, str(from_year), str(to_year), "*", "*", "*", "*", "*", "*", "*", "*", "*", False, "", config.get('lang')) parts = table.split(table.split("\r\n")[1]) csv = parts[2].split("\r\n\r\n", 1)[-1] table = unicode(csv.decode('latin-1')) except WebFault, wf: #print wf table = None cache_result(table, table_id=table_id, transposed=False, format=format, from_year=1800, to_year=2100, _type='table') elif table == EMPTY: return None return table def find_tables_by_statistic(stat_id): tables = load_cached(stat_id=stat_id, _type='table_list') if not tables: service = get_recherche_service().service data = service.TabellenKatalog(config.get('user'), config.get('password'), "%s-*" % stat_id, "code", "100", config.get('lang')) tables = [] #print data if hasattr(data.objektKatalogEintraege, 'abrufbar'): tables.append(serialize_soap(data.objektKatalogEintraege)) else: for t in data.objektKatalogEintraege: tables.append(serialize_soap(t)) cache_result(tables, stat_id=stat_id, _type='table_list') elif tables == EMPTY: return [] return tables def get_variable(var_value, var_name='code'): variable = load_cached(var_value=var_value, var_name=var_name, _type='feature') if not variable: service = get_recherche_service().service variable = service.MerkmalsKatalog(config.get('user'), config.get('password'), var_value, var_name, "*", "Alle", "100", config.get('lang'), ) if variable.objektKatalogEintraege: variable = serialize_soap(variable.objektKatalogEintraege[0]) data = service.MerkmalAuspraegungenKatalog(config.get('user'), config.get('password'), variable.get('code'), "*", "", "100", config.get('lang'), ) if data.merkmalAuspraegungenKatalogEintraege: attributes = [] for e in data.merkmalAuspraegungenKatalogEintraege: attributes.append(serialize_soap(e)) variable['attributes'] = attributes else: variable = None cache_result(variable, var_value=var_value, var_name=var_name, _type='feature') elif variable == EMPTY: return None return variable
2.125
2
pylimit/tests/test_pylimit.py
biplap-sarkar/pylim
17
12793128
<gh_stars>10-100 from pylimit import PyRateLimit from pylimit import PyRateLimitException import unittest import time class TestPyLimit(unittest.TestCase): def test_exception(self): limit = PyRateLimit(period=10, limit=10) self.assertRaises(PyRateLimitException, limit.attempt, 'test_namespace') def test_throttle(self): PyRateLimit.init(redis_host="localhost", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 20): time.sleep(.5) if x < 10: self.assertTrue(limit.attempt('test_namespace')) else: self.assertFalse(limit.attempt('test_namespace')) time.sleep(6) self.assertTrue(limit.attempt('test_namespace')) def test_peek(self): PyRateLimit.init(redis_host="localhost", redis_port=6379) limit = PyRateLimit(period=10, limit=10) for x in range(0, 10): self.assertTrue(limit.attempt('test_namespace2')) self.assertTrue(limit.is_rate_limited('test_namespace2')) time.sleep(10) self.assertFalse(limit.is_rate_limited('test_namespace2'))
2.453125
2
src/wagtail_marketing/helpers.py
mikiec84/wagtail-marketing-addons
0
12793129
<filename>src/wagtail_marketing/helpers.py from django.template.defaultfilters import truncatechars from wagtail.contrib.modeladmin.helpers import PageAdminURLHelper as AbstractPageAdminURLHelper from wagtail_marketing.conf import get_wagtail_marketing_setting class PageAdminURLHelper(AbstractPageAdminURLHelper): def get_action_url(self, action, *args, **kwargs): action_url = super().get_action_url(action, *args, **kwargs) if action == 'edit': action_url += '#tab-promote' return action_url class SeoHelper: def __init__(self, page_title, seo_title=None, search_description=None): self.page_title = page_title self.seo_title = seo_title self.search_description = search_description @property def title(self): return self.seo_title or self.page_title @property def description(self): return self.search_description or '' @property def truncated_title(self): return truncatechars( self.title, get_wagtail_marketing_setting('MAX_TITLE_LENGTH'), ) @property def truncated_description(self): return truncatechars( self.description, get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH'), ) @property def score(self): score = 0 if ( get_wagtail_marketing_setting('MIN_TITLE_LENGTH') <= len(self.title) <= get_wagtail_marketing_setting('MAX_TITLE_LENGTH') ): score += 10 title_word_count = self.title.split() if ( get_wagtail_marketing_setting('MIN_TITLE_WORD_COUNT') <= len(title_word_count) <= get_wagtail_marketing_setting('MAX_TITLE_WORD_COUNT') ): score += 40 if len(self.description) >= get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH'): score += 25 if ( get_wagtail_marketing_setting('MIN_DESCRIPTION_LENGTH') <= len(self.description) <= get_wagtail_marketing_setting('MAX_DESCRIPTION_LENGTH') ): score += 25 return score @property def icon(self): if self.score == 0: return '😱' elif self.score < 35: return '😢' elif self.score > 65: return '😄' return '😏'
2.109375
2
src/metric_helpers.py
florianvonunold/DYME
5
12793130
<filename>src/metric_helpers.py import numpy as np def cosine_similarity(a, b): return np.sum(a * b, axis=1) / np.sqrt((np.sum(a * a, axis=1) * np.sum(b * b, axis=1))) def _get_emojis(): # All emojis in the order returned by deepmoji EMOJIS = ":joy: :unamused: :weary: :sob: :heart_eyes: :pensive: " + \ ":ok_hand: :blush: :heart: :smirk: :grin: :notes: :flushed: " + \ ":100: :sleeping: :relieved: :relaxed: :raised_hands: " + \ ":two_hearts: :expressionless: :sweat_smile: :pray: " + \ ":confused: :kissing_heart: :heartbeat: :neutral_face: " + \ ":information_desk_person: :disappointed: :see_no_evil: " + \ ":tired_face: :v: :sunglasses: :rage: :thumbsup: :cry: " + \ ":sleepy: :yum: :triumph: :hand: :mask: :clap: :eyes: :gun: " + \ ":persevere: :smiling_imp: :sweat: :broken_heart: " + \ ":yellow_heart: :musical_note: :speak_no_evil: :wink: :skull: " + \ ":confounded: :smile: :stuck_out_tongue_winking_eye: :angry: " + \ ":no_good: :muscle: :facepunch: :purple_heart: " + \ ":sparkling_heart: :blue_heart: :grimacing: :sparkles:" EMOJIS = EMOJIS.split(' ') return EMOJIS def _get_emoji_to_sentiment_dict(): # How detected emojis map to sentiment score emoji_to_sentiment = { # very strongly positive ':kissing_heart:': 1, ':thumbsup:': 1, ':ok_hand:': 1, ':smile:': 1, # strongly positive ':blush:': 0.75, ':wink:': 0.75, ':muscle:': 0.75, ':grin:': 0.75, ':heart_eyes:': 0.75, ':100:': 0.75, # positive ':smirk:': 0.5, ':stuck_out_tongue_winking_eye:': 0.5, ':sunglasses:': 0.5, ':relieved:': 0.5, ':relaxed:': 0.5, ':blue_heart:': 0.5, ':two_hearts:': 0.5, ':heartbeat:': 0.5, ':yellow_heart:': 0.5, # negative ':disappointed:': -0.5, ':eyes:': -0.5, ':expressionless:': -0.5, ':sleeping:': -0.5, ':grimacing:': -0.5, # strongly negative ':neutral_face:': -0.75, ':confused:': -0.75, ':triumph:': -0.75, ':confounded:': -0.75, # very strongly negative ':unamused:': -1, ':angry:': -1, # removing ':hand:': -1 due to ambiguity ':rage:': -1 } return emoji_to_sentiment def _get_sentiment_multiplier(): EMOJIS = _get_emojis() emojis_to_sentiment = _get_emoji_to_sentiment_dict() sentiment_multiplier = np.zeros(len(EMOJIS)) for emoji, reward_val in emojis_to_sentiment.items(): loc = EMOJIS.index(emoji) sentiment_multiplier[loc] = reward_val return sentiment_multiplier
2.859375
3
Algorithm/BasicAlgorithm/ArrayAndSort/select_sort.py
hqzhang83/Everything101
0
12793131
def selectSort(nums): l = len(nums) for i in range(l): for j in range(i + 1, l): if nums[j] < nums[i]: nums[i], nums[j] = nums[j], nums[i] return nums
3.65625
4
valid_braces.py
guesschess/wars
0
12793132
def validBraces(string): braces = ["(",")","{","}","[","]"] labels = [0,1,3,4,7,8] dic = dict(zip(braces, labels)) valid = True for key, value in dic.iteritems(): string = string.replace(key, str(value)) nums = [int(i) for i in string] n = len(nums)# the length of the string for i in range(n-1): if nums[i]+1 == nums[i+1]: nums[i] = nums[i+1] = -2 stack = list(filter(lambda x: x >= 0, nums)) if len(stack) > 0: valid = all(stack[i]+1 == stack[-i-1] for i in range(len(stack)/2)) return valid
3.53125
4
Chapter14/milk.py
tongni1975/Hands-On-Machine-Learning-on-Google-Cloud-Platform
11
12793133
<gh_stars>10-100 import pandas as pd import matplotlib.pyplot as plt import numpy from sklearn.linear_model import LinearRegression from statsmodels.tsa.seasonal import seasonal_decompose data = pd.read_csv('milk-production-pounds.csv',parse_dates=True, index_col='DateTime', names=['DateTime', 'Milk'], header=None) print(data.info()) print(data.head()) print(data.describe()) data.plot() plt.show() X = [i for i in range(0, len(data))] X = numpy.reshape(X, (len(X), 1)) y = data.values LModel = LinearRegression() LModel.fit(X, y) print(LModel.intercept_,LModel.coef_) trend = LModel.predict(X) plt.plot(y) plt.plot(trend) plt.show() DecompDataAdd = seasonal_decompose(data, model='additive') DecompDataAdd.plot() plt.show() SeasRemov= data-DecompDataAdd.seasonal SeasRemov.plot() plt.show() DecompDataMult = seasonal_decompose(data, model='multiplicative') DecompDataMult.plot() plt.show()
2.96875
3
setup.py
afcarl/tensorcv
1
12793134
<reponame>afcarl/tensorcv<gh_stars>1-10 from distutils.core import setup setup( name='tensorcv', version=0.1, packages=['tensorcv'], entry_points=""" [console_scripts] tcv=tensorcv:cli """ )
1.273438
1
hdbo/exceptions.py
eric-vader/HD-BO-Additive-Models
5
12793135
# For BO early Termination class EarlyTerminationException(Exception): def __init__(self, message, metrics): # Call the base class constructor with the parameters it needs super(EarlyTerminationException, self).__init__(message) self.metrics = metrics
2.6875
3
038. Count and Say.py
95subodh/Leetcode
2
12793136
#The count-and-say sequence is the sequence of integers beginning as follows: #1, 11, 21, 1211, 111221, ... # #1 is read off as "one 1" or 11. #11 is read off as "two 1s" or 21. #21 is read off as "one 2, then one 1" or 1211. #Given an integer n, generate the nth sequence. class Solution(object): def countAndSay(self, A): """ :type n: int :rtype: str """ z=["0","1"] for i in xrange(1,A+1): s,n,c="",z[i][0],1 for j in xrange(1,len(z[i])): if z[i][j]==n:c+=1 else: s+=str(c)+n n=z[i][j] c=1 s+=str(c)+n z.append(s) return z[A]
3.78125
4
CodeIA/venv/Lib/site-packages/coremltools/converters/mil/mil/ops/defs/random.py
Finasty-lab/IA-Python
11,356
12793137
# Copyright (c) 2020, Apple Inc. All rights reserved. # # Use of this source code is governed by a BSD-3-clause license that can be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause from coremltools.converters.mil.mil.types.symbolic import any_symbolic from coremltools.converters.mil.mil import get_new_symbol, get_new_variadic_symbol from ._op_reqs import * """ Random Op Superclass """ class RandomDistribution(Operation): input_spec = InputSpec(shape=IntTensorInputType(),) def __init__(self, **kwargs): super(RandomDistribution, self).__init__(**kwargs) def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, shape) return types.tensor(types.fp32, tuple(self.shape.sym_val.tolist())) """ Random Op Implementation(s) """ @register_op( doc_str=r""" Returns a tensor with specified shape with random values from a Bernoulli distribution. .. math:: f(k) = \begin{cases}1-p &\text{if } k = 0\\ p &\text{if } k = 1\end{cases} for :math:`k` in :math:`\{0, 1\}`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is the rank of the output tensor. shape[k] > 0 for k = 0,..., K-1. prob: const<f32>, optional The probability of sampling 1. Defaults to 0.5. seed: const<i32>, optional Seed to create a reproducible sequence of values across multiple invokes. Returns ------- <*, T>, a tensor of given target output shape filled with random values. See Also -------- random_categorical, random_normal, random_uniform """ ) class random_bernoulli(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), prob=FloatInputType(const=True, default=0.5), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_bernoulli, self).__init__(**kwargs) @register_op( doc_str=r""" Returns random values from a categorical distribution. Parameters ---------- shape: <*D_in, T> N-dimensional tensor, one of logits (event log-probabilities) or probs (event probabilities). The first N - 1 dimensions specifies distributions, the last dimension represents a vector of probabilities. mode: const<str>, optional One of ['logits', 'probs']. Defaults to 'logits'. size: const<i32>, optional Number of samples to draw. Defaults to 1. seed: const<i32>, optional Seed to create a reproducible sequence of values across multiple invokes. Returns ------- <*D_in[:-1] + [size], T>, a tensor of given target output shape filled with random values. See Also -------- random_bernoulli, random_normal, random_uniform """ ) class random_categorical(Operation): input_spec = InputSpec( x=TensorInputType(), mode=StringInputType(const=True, default="logits"), size=IntInputType(const=True, default=1), seed=IntInputType(const=True, default=-1), ) def __init__(self, **kwargs): super(random_categorical, self).__init__(**kwargs) def type_inference(self): output_shape = self.x.shape[:-1] + (self.size.val,) return types.tensor(types.fp32, output_shape) @register_op( doc_str=r""" Returns a tensor with specified shape with random values from a normal distribution. .. math:: f(x) = \frac{\exp(-x^2/2)}{\sqrt{2\pi}} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is the rank of the output tensor. shape[k] > 0 for k = 0,..., K-1. mean: const<f32>, optional The mean (center) of the normal distribution. Defaults to 0.0. stddev: const<f32>, optional The standard deviation (width) of the normal distribution. Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence of values across multiple invokes. Returns ------- <*, T>, a tensor of given target output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_uniform """ ) class random_normal(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), mean=FloatInputType(const=True, default=0.0), stddev=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_normal, self).__init__(**kwargs) @register_op( doc_str=r""" Returns a tensor with specified shape with random values from a normal distribution. .. math:: p(x) = \frac{1}{high - low} for a real number :math:`x`. Parameters ---------- shape: <K, i32>, required Target output tensor shape. K is the rank of the output tensor. shape[k] > 0 for k = 0,..., K-1. low: const<f32>, optional Lower boundary of the output interval (inclusive). Defaults to 0.0. high: const<f32>, optional Upper boundary of the output interval (exclusive). Defaults to 1.0. seed: const<i32>, optional Seed to create a reproducible sequence of values across multiple invokes. Returns ------- <*, T>, a tensor of given target output shape filled with random values. See Also -------- random_categorical, random_bernoulli, random_normal """ ) class random_uniform(RandomDistribution): input_spec = ( InputSpec( shape=IntTensorInputType(), low=FloatInputType(const=True, default=0.0), high=FloatInputType(const=True, default=1.0), seed=IntInputType(const=True, default=-1), ) + RandomDistribution.input_spec ) def __init__(self, **kwargs): super(random_uniform, self).__init__(**kwargs)
1.84375
2
gimp_plugin/create_mask.py
open-dynamic-robot-initiative/trifinger_object_tracking
6
12793138
#!/usr/bin/env python from gimpfu import * def create_mask_from_selection(image, drawable): # image = gimp.image_list()[0] # drawable = pdb.gimp_image_get_active_drawable(image) pdb.gimp_image_undo_group_start(image) pdb.gimp_edit_bucket_fill(drawable, 1, 0, 100.0, 255.0, 0, 0.0, 0.0) pdb.gimp_selection_invert(image) pdb.gimp_edit_bucket_fill(drawable, 0, 0, 100.0, 255.0, 0, 0.0, 0.0) # change to binary pallete pdb.gimp_convert_indexed(image, NO_DITHER, 0, 2, False, True, "") pdb.gimp_image_undo_group_end(image) register( "create_mask_from_selection", "Create mask image from selection", "Create a black/white mask from the current selection", "<NAME>", "<NAME>", "2020", "<Image>/File/MaskImage", "RGB*, GRAY*", [], [], create_mask_from_selection, ) main()
3.09375
3
solutions/Craftingtables.py
aalekhpatel07/IEEExtreme14
2
12793139
<reponame>aalekhpatel07/IEEExtreme14 def solve(c, n, p, w): """ Solve the problem here. :return: The expected output. """ if c > w: return 0 leading_ps = w // p tables = 0 zeroes_available = n - (leading_ps + 1) if w % p == 0: zeroes_available += 1 while zeroes_available > 0: number_of_zeroes_filled = min(w // c, zeroes_available) # w % p chips_used = number_of_zeroes_filled * c # first chip drawn from the non-zero entry if exists. chips_used -= (w % p) w -= number_of_zeroes_filled * c tables += number_of_zeroes_filled if number_of_zeroes_filled < zeroes_available: return tables zeroes_available = 1 + chips_used // p # now assume no zeroes available already. return tables def driver(): """ Make sure this driver returns the result. :return: result - Result of computation. """ c, n, p, w = list(map(int, input().split(' '))) result = solve(c, n, p, w) print(result) return result def main(): return driver() if __name__ == '__main__': main()
3.625
4
Ranking/tests/test.py
iznokill/Epit-as-Ranking
0
12793140
from Ranking.src.Ranker import Ranker, add_player def test_update(file, update_text, expected): res = Ranker(file, update_text) assert res == expected, "Update failed\ngot:\n" + str(res) + "\nexpected:\n" + expected def test_add(file, player, expected): res = add_player(file, player) assert res == expected, "Update failed\ngot:\n" + str(res) + "\nexpected:\n" + expected if __name__ == "__main__": test_add("test_files/empty.json","youssef","{'players': [{'name': 'youssef', 'rank': 0, 'points': 0}]}")
2.640625
3
test/extract_examples.py
poponealex/files_renamer
0
12793141
<reponame>poponealex/files_renamer from pathlib import Path import re import context from src.goodies import print_warning SECTION_PATTERN = r"""(?m)^### (.+)\n #### Example\n original path \| new name ---\|--- ((?:.+\n)*) #### Result\n original path \| new path ---\|--- ((?:.+\n)*)""" def extract_rows(table): if not table: return [] return [tuple(row.split("|")) for row in table.strip().split("\n")] def main(path): result = [] text = path.read_text() for match in re.finditer(SECTION_PATTERN, text): (title, example, expected) = match.groups() result.append( { "title": title, "example": extract_rows(example), "expected": extract_rows(expected), } ) if len(result) < len(re.findall("(?m)^### ", text)): print_warning('"examples.md" has more sections than matches.') return result if __name__ == "__main__": for (i, test_data) in enumerate(main(Path("test/examples.md")), 1): test_data["#"] = i print("Section {#}: {title}".format(**test_data)) print("Example: {example}".format(**test_data)) print("Expected: {expected}".format(**test_data))
2.84375
3
tests/test_auth_endpoint.py
ephes/django-indieweb
9
12793142
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_django-indieweb ------------ Tests for `django-indieweb` auth endpoint. """ import pytz from datetime import datetime from datetime import timedelta from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import settings from django.test import TestCase from django.utils.http import urlencode from django.contrib.auth.models import User from django.urls import reverse from indieweb.models import Auth class TestIndiewebAuthEndpoint(TestCase): def setUp(self): self.username = "foo" self.email = "<EMAIL>" self.password = "password" self.user = User.objects.create_user(self.username, self.email, self.password) self.base_url = reverse("indieweb:auth") url_params = { "me": "http://example.org", "client_id": "https://webapp.example.org", "redirect_uri": "https://webapp.example.org/auth/callback", "state": 1234567890, "scope": "post", } self.endpoint_url = "{}?{}".format(self.base_url, urlencode(url_params)) def test_not_authenticated(self): """ Assure we are redirected to login if we try to get an auth-code from the indieweb auth endpoint and are not yet logged in. """ response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue("login" in response.url) def test_authenticated_without_params(self): """ Assure get without proper parameters raises an error. """ self.client.login(username=self.username, password=self.password) response = self.client.get(self.base_url) self.assertEqual(response.status_code, 404) self.assertTrue("missing" in response.content.decode("utf-8")) def test_authenticated(self): """Assure we get back an auth code if we are authenticated.""" self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue("code" in response.url) def test_get_or_create(self): """ Test get or create logic for Auth object. """ self.client.login(username=self.username, password=<PASSWORD>) for i in range(2): response = self.client.get(self.endpoint_url) self.assertEqual(response.status_code, 302) self.assertTrue("code" in response.url) def test_auth_timeout_reset(self): """ Test timeout is resetted on new authentication. """ self.client.login(username=self.username, password=<PASSWORD>) response = self.client.get(self.endpoint_url) data = parse_qs(urlparse(response.url).query) auth = Auth.objects.get(owner=self.user, me=data["me"][0]) timeout = getattr(settings, "INDIWEB_AUTH_CODE_TIMEOUT", 60) auth.created = auth.created - timedelta(seconds=timeout + 10) auth.save() response = self.client.get(self.endpoint_url) auth = Auth.objects.get(owner=self.user, me=data["me"][0]) self.assertTrue((datetime.now(pytz.utc) - auth.created).seconds <= timeout)
2.53125
3
scripts/viz/controller.py
GiorgiaAuroraAdorni/learning-relative-interactions-through-imitation
1
12793143
import numpy as np from kinematics import to_robot_velocities from viz.env import Viz class ControlSignalsViz(Viz): def __init__(self, marxbot, time_window=10): super().__init__() self.marxbot = marxbot self.marxbot_max_vel = 30 self.time_window = time_window def _show(self, env): self.ax = env.get_axes() self.ax.set_title('Control signals over time') self.ax.set_xlabel("time [s]") self.ax.set_xlim(-self.time_window, 0) self.ax.grid(True) self.n_dims = 2 self.n_samples = round(self.time_window / env.refresh_interval) self.time = np.linspace(-self.time_window, 0, self.n_samples) self.readings = np.full((self.n_dims, self.n_samples), np.nan) labels = ["linear velocity [cm/s]", "angular velocity [rad/s]"] colors = ["tab:blue", "tab:orange"] mins = [-self.marxbot_max_vel, -10] maxs = [+self.marxbot_max_vel, +10] self.plots = [] for i in range(self.n_dims): ax = self.ax if i > 0: ax = ax.twinx() ax.set_ylabel(labels[i], color=colors[i]) ax.tick_params(axis='y', labelcolor=colors[i]) ax.tick_params(labelsize=8) plot = ax.plot(self.time, self.readings[i], color=colors[i])[0] ax.set_ylim( mins[i] - 0.1 * abs(mins[i]), maxs[i] + 0.1 * abs(maxs[i]) ) self.plots.append(plot) def _update(self): robot_velocities = to_robot_velocities(*self.marxbot.wheel_target_speeds) self.readings = np.roll(self.readings, -1, axis=1) self.readings[:, -1] = robot_velocities for i in range(self.n_dims): self.plots[i].set_ydata(self.readings[i])
2.515625
3
01-01. variable and constant.py
dhchoi82/mathematics-python
3
12793144
<reponame>dhchoi82/mathematics-python #!/usr/bin/env python3 # -*- coding: utf-8 -*- # 변수 ''' Python에서는 변수를 선언하여 사용할 수 있다. 변수 이름은 문자나 숫자로 이루어지는데, 변수 이름은 숫자나 '_' 이외의 특수문자로 시작할 수 없고, 이미 역할이 지정된 keyword는 변수 이름으로 사용할 수 없다. ''' for a in (1, 3, 10, -2): print('a =', a) print('a × 3 + 1 =', a * 3 + 1, '\n') # 상수 ''' Python에서 변수와 같은 이름을 갖는 상수를 선언하는 방법은 기본 문법으로는 제공되지 않는다. 관습적으로 대문자와 '_' 만으로 이루어진 변수는 상수로 취급하여, 처음 대입된 값 이외의 값을 다시 대입하지 않는다. ''' PI = 3.14159265 for r in (0.1, 3, 15, 100): print('r =', r) print('PI * (r ** 2) =', PI * (r ** 2), '\n') INCH_TO_CENTIMETER = 2.54 for centimeter in range(1, 11): print(centimeter, 'cm ==',\ centimeter * INCH_TO_CENTIMETER, 'in\n')
3.984375
4
openapi_core/enums.py
andyceo/openapi-core
0
12793145
<gh_stars>0 from enum import Enum class ParameterLocation(Enum): PATH = 'path' QUERY = 'query' HEADER = 'header' COOKIE = 'cookie' @classmethod def has_value(cls, value): return (any(value == item.value for item in cls)) class ParameterStyle(Enum): MATRIX = 'matrix' LABEL = 'label' FORM = 'form' SIMPLE = 'simple' SPACE_DELIMITED = 'spaceDelimited' PIPE_DELIMITED = 'pipeDelimited' DEEP_OBJECT = 'deepObject' class SchemaType(Enum): INTEGER = 'integer' NUMBER = 'number' STRING = 'string' BOOLEAN = 'boolean' ARRAY = 'array' OBJECT = 'object' class SchemaFormat(Enum): NONE = None INT32 = 'int32' INT64 = 'int64' FLOAT = 'float' DOUBLE = 'double' BYTE = 'byte' BINARY = 'binary' DATE = 'date' DATETIME = 'date-time' PASSWORD = 'password'
2.953125
3
Session09_AWSSagemakerAndLargeScaleModelTraining/utils_cifar.py
garima-mahato/TSAI_EMLO1.0
0
12793146
import torch import torchvision import torchvision.transforms as transforms import matplotlib.pyplot as plt import numpy as np classes = ('beaver','dolphin','otter','seal','whale','aquarium fish','flatfish','ray','shark','trout','orchids','poppies','roses','sunflowers','tulips','bottles','bowls','cans','cups','plates','apples','mushrooms','oranges','pears','sweet peppers','clock','computer keyboard','lamp','telephone','television','bed','chair','couch','table','wardrobe','bee','beetle','butterfly','caterpillar','cockroach','bear','leopard','lion','tiger','wolf','bridge','castle','house','road','skyscraper','cloud','forest','mountain','plain','sea','camel','cattle','chimpanzee','elephant','kangaroo','fox','porcupine','possum','raccoon','skunk','crab','lobster','snail','spider','worm','baby','boy','girl','man','woman','crocodile','dinosaur','lizard','snake','turtle','hamster','mouse','rabbit','shrew','squirrel','maple','oak','palm','pine','willow','bicycle','bus','motorcycle','pickup truck','train','lawn-mower','rocket','streetcar','tank','tractor') def _get_transform(): return transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) def get_train_data_loader(): transform = _get_transform() trainset = torchvision.datasets.CIFAR100(root='./data', train=True, download=True, transform=transform) return torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=2) def get_test_data_loader(): transform = _get_transform() testset = torchvision.datasets.CIFAR100(root='./data', train=False, download=True, transform=transform) return torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=2) # function to show an image def imshow(img): img = img / 2 + 0.5 # unnormalize npimg = img.numpy() plt.imshow(np.transpose(npimg, (1, 2, 0)))
2.609375
3
tests/grammpy_test/oldapi_tests/grammar_copy_tests/GrammarShallowCopyTest.py
PatrikValkovic/grammpy
1
12793147
<reponame>PatrikValkovic/grammpy<gh_stars>1-10 #!/usr/bin/env python """ :Author <NAME> :Created 16.08.2017 19:16 :Licence MIT Part of grammpy """ from copy import copy from unittest import main, TestCase from grammpy.old_api import * class A(Nonterminal): pass class B(Nonterminal): pass class RuleAtoB(Rule): rule = ([A], [B]) class GrammarCopyTest(TestCase): def __init__(self, methodName='runTest'): super().__init__(methodName) self.g = Grammar() def setUp(self): self.g = Grammar(terminals=[0, 1], nonterminals=[A, B], rules=[RuleAtoB], start_symbol=A) def test_shouldNotDeleteTerminals(self): g = copy(self.g) g.remove_term() self.assertTrue(self.g.have_term([0, 1])) self.assertFalse(g.have_term(0)) self.assertFalse(g.have_term(1)) def test_shouldNotDeleteNonterminals(self): g = copy(self.g) g.remove_nonterm() self.assertTrue(self.g.have_nonterm([A, B])) self.assertFalse(g.have_term(A)) self.assertFalse(g.have_term(B)) def test_shouldNotDeleteRules(self): g = copy(self.g) g.remove_rule() self.assertTrue(self.g.have_rule(RuleAtoB)) self.assertFalse(g.have_rule(RuleAtoB)) def test_shouldNotChangeStartSymbol(self): g = copy(self.g) g.start_set(None) self.assertTrue(self.g.start_is(A)) self.assertFalse(g.start_is(A)) if __name__ == '__main__': main()
2.515625
3
Nduja/user_info_retriever/__init__.py
herrBez/Nduja
2
12793148
<reponame>herrBez/Nduja<gh_stars>1-10 """This package contains the definition and implementation of the user info retriever classes, i.e., classes that fetches from the relative APIs the information about an account"""
0.964844
1
examples/ffs_config.py
textileio/pygate-gRPC
0
12793149
<filename>examples/ffs_config.py from pygate_grpc.client import PowerGateClient client = PowerGateClient("127.0.0.1:5002", False) print("Creating a new FFS:") newFfs = client.ffs.create() tk = newFfs.token print("Token: " + tk) print("Using the new FFS token to request the default config:") defaultConfig = client.ffs.default_config(tk) print(defaultConfig) print("Loading new default config...") with open("cidconfig.json", "r") as f: config = f.read() client.ffs.set_default_config(config, tk) defaultConfig = client.ffs.default_config(tk) print("Updated default config:") print(defaultConfig)
2.421875
2
ch08/myproject_virtualenv/src/django-myproject/myproject/apps/ideas1/forms.py
PacktPublishing/Django-3-Web-Development-Cookbook
159
12793150
<reponame>PacktPublishing/Django-3-Web-Development-Cookbook from django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model from crispy_forms import bootstrap, helper, layout from mptt.forms import TreeNodeChoiceField from myproject.apps.categories1.models import Category from .models import Idea, RATING_CHOICES from ..core.form_fields import MultipleChoiceTreeField User = get_user_model() class IdeaForm(forms.ModelForm): categories = MultipleChoiceTreeField( label=_("Categories"), required=False, queryset=Category.objects.all(), ) class Meta: model = Idea exclude = ["author"] def __init__(self, request, *args, **kwargs): self.request = request super().__init__(*args, **kwargs) title_field = layout.Field("title") content_field = layout.Field("content", rows="3") main_fieldset = layout.Fieldset(_("Main data"), title_field, content_field) picture_field = layout.Field("picture") format_html = layout.HTML( """{% include "ideas1/includes/picture_guidelines.html" %}""" ) picture_fieldset = layout.Fieldset( _("Picture"), picture_field, format_html, title=_("Image upload"), css_id="picture_fieldset", ) categories_field = layout.Field( "categories", template="core/includes/checkboxselectmultiple_tree.html" ) categories_fieldset = layout.Fieldset( _("Categories"), categories_field, css_id="categories_fieldset" ) submit_button = layout.Submit("save", _("Save")) actions = bootstrap.FormActions(submit_button, css_class="my-4") self.helper = helper.FormHelper() self.helper.form_action = self.request.path self.helper.form_method = "POST" self.helper.layout = layout.Layout( main_fieldset, picture_fieldset, categories_fieldset, actions, ) def save(self, commit=True): instance = super().save(commit=False) instance.author = self.request.user if commit: instance.save() self.save_m2m() return instance class IdeaFilterForm(forms.Form): author = forms.ModelChoiceField( label=_("Author"), required=False, queryset=User.objects.all(), ) category = TreeNodeChoiceField( label=_("Category"), required=False, queryset=Category.objects.all(), level_indicator=mark_safe("&nbsp;&nbsp;&nbsp;&nbsp;") ) rating = forms.ChoiceField( label=_("Rating"), required=False, choices=RATING_CHOICES ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) author_field = layout.Field("author") category_field = layout.Field("category") rating_field = layout.Field("rating") submit_button = layout.Submit("filter", _("Filter")) actions = bootstrap.FormActions(submit_button) main_fieldset = layout.Fieldset( _("Filter"), author_field, category_field, rating_field, actions, ) self.helper = helper.FormHelper() self.helper.form_method = "GET" self.helper.layout = layout.Layout(main_fieldset)
2.015625
2