max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
scenario/trajectory_tracking/experiment/trajectory_tracking_trainer.py
NREL/K_Road
1
12790551
import os import sys import tempfile from datetime import datetime from pprint import pprint import ray from ray import tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] # print(info) # trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ["ip_head"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print("Nodes in the Ray cluster:") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, ) print('shutting down') ray.shutdown() print('done')
1.828125
2
emodelrunner/protocols_func.py
AurelienJaquier/EModelRunner
0
12790552
"""Protocol-related functions.""" # Copyright 2020-2021 Blue Brain Project / EPFL # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import logging from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name="soma", seclist_name="somatic", sec_index=0, comp_x=0.5 ) seclist_to_sec = { "somatic": "soma", "apical": "apic", "axonal": "axon", "myelinated": "myelin", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): """Read ramp threshold protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current """ ramp_definition = protocol_definition["stimuli"]["ramp"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition["ramp_delay"], ramp_duration=ramp_definition["ramp_duration"], location=soma_loc, total_duration=ramp_definition["totduration"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition["totduration"], location=soma_loc, total_duration=ramp_definition["totduration"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition["thresh_perc_start"], thresh_perc_end=ramp_definition["thresh_perc_end"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): """Read ramp protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol: Ramp Protocol """ ramp_definition = protocol_definition["stimuli"]["ramp"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition["ramp_amplitude_start"], ramp_amplitude_end=ramp_definition["ramp_amplitude_end"], ramp_delay=ramp_definition["ramp_delay"], ramp_duration=ramp_definition["ramp_duration"], location=soma_loc, total_duration=ramp_definition["totduration"], ) if "holding" in protocol_definition["stimuli"]: holding_definition = protocol_definition["stimuli"]["holding"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition["amp"], step_delay=holding_definition["delay"], step_duration=holding_definition["duration"], location=soma_loc, total_duration=holding_definition["totduration"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): """Read step protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol: Step Protocol """ # pylint: disable=undefined-loop-variable step_definitions = protocol_definition["stimuli"]["step"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition["amp"], step_delay=step_definition["delay"], step_duration=step_definition["duration"], location=soma_loc, total_duration=step_definition["totduration"], ) step_stimuli.append(step_stim) if "holding" in protocol_definition["stimuli"]: holding_definition = protocol_definition["stimuli"]["holding"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition["amp"], step_delay=holding_definition["delay"], step_duration=holding_definition["duration"], location=soma_loc, total_duration=holding_definition["totduration"], ) else: holding_stimulus = None if stochkv_det is None: stochkv_det = ( step_definition["stochkv_det"] if "stochkv_det" in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): """Read step threshold protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd """ # pylint: disable=undefined-loop-variable step_definitions = protocol_definition["stimuli"]["step"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition["delay"], step_duration=step_definition["duration"], location=soma_loc, total_duration=step_definition["totduration"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition["totduration"], location=soma_loc, total_duration=step_definition["totduration"], ) if stochkv_det is None: stochkv_det = ( step_definition["stochkv_det"] if "stochkv_det" in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition["thresh_perc"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): """Read Vecstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses """ stim_definition = protocol_definition["stimuli"] if stim_definition["vecstim_random"] not in [ "python", "neuron", ]: logger.warning( "vecstim random not set to 'python' nor to 'neuron' in config file." "vecstim random will be re-set to 'python'." ) stim_definition["vecstim_random"] = "python" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition["syn_start"], stim_definition["syn_stop"], stim_definition["syn_stim_seed"], stim_definition["vecstim_random"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): """Read Netstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses """ stim_definition = protocol_definition["stimuli"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition["syn_stop"], stim_definition["syn_nmb_of_spikes"], stim_definition["syn_interval"], stim_definition["syn_start"], stim_definition["syn_noise"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): """Get the location for the extra recording. Args: recording_definition (dict): contains the extra recording configuration data apical_point_isec (int): apical point section index. Should be given if the recording definition "type" is "somadistanceapic" Raises: Exception: if the recording definition "type" is "somadistanceapic" and apical_point_isec is -1. Exception: if the 'type' in the recording definition is neither "somadistance", nor "somadistanceapic", nor "nrnseclistcomp" Returns: location of the extra recording """ if recording_definition["type"] == "somadistance": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition["name"], soma_distance=recording_definition["somadistance"], seclist_name=recording_definition["seclist_name"], ) elif recording_definition["type"] == "somadistanceapic": if apical_point_isec == -1: raise Exception( "Cannot record at a given distance from apical point" f"if apical_point_isec is {apical_point_isec}." ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition["name"], soma_distance=recording_definition["somadistance"], sec_name=seclist_to_sec[recording_definition["seclist_name"]], sec_index=apical_point_isec, ) elif recording_definition["type"] == "nrnseclistcomp": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition["name"], comp_x=recording_definition["comp_x"], sec_index=recording_definition["sec_index"], seclist_name=recording_definition["seclist_name"], ) else: raise Exception(f"Recording type {recording_definition['type']} not supported") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): """Get recordings from protocol definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol data prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int): apical point section index Should be given if there is "somadistanceapic" in "type" of at least one of the extra recording definition Returns: list of RecordingCustom """ recordings = [] recordings.append( RecordingCustom( name=f"{prefix}.{protocol_name}.soma.v", location=soma_loc, variable="v", ) ) if "extra_recordings" in protocol_definition: for recording_definition in protocol_definition["extra_recordings"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition["var"] recording = RecordingCustom( name=f"{prefix}.{protocol_name}.{location.name}.{var}", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): """Add protocol from protocol definition to protocols dict. Args: protocols_dict (dict): the dict to which to append the protocol protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used in naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) """ if "type" in protocol_definition and protocol_definition["type"] == "StepProtocol": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( "type" in protocol_definition and protocol_definition["type"] == "StepThresholdProtocol" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( "type" in protocol_definition and protocol_definition["type"] == "RampThresholdProtocol" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( "type" in protocol_definition and protocol_definition["type"] == "RampProtocol" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( "type" in protocol_definition and protocol_definition["type"] == "RatSSCxThresholdDetectionProtocol" ): protocols_dict["ThresholdDetection"] = RatSSCxThresholdDetectionProtocol( "IDRest", step_protocol_template=read_step_protocol( "Threshold", protocol_definition["step_template"], recordings ), prefix=prefix, ) elif "type" in protocol_definition and protocol_definition["type"] == "Vecstim": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif "type" in protocol_definition and protocol_definition["type"] == "Netstim": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = [] for stimulus_definition in protocol_definition["stimuli"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition["amp"], step_delay=stimulus_definition["delay"], step_duration=stimulus_definition["duration"], location=soma_loc, total_duration=stimulus_definition["totduration"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): """Check for unsupported protocol. Args: protocols_dict (dict): contains all protocols to be run Raises: Exception: If a protocol that should only be used with MainProtocol is present in protocols_dict """ # Those protocols cannot be used if they are not in MainProtocol forbidden_prots = [ "RatSSCxRinHoldcurrentProtocol", "RatSSCxThresholdDetectionProtocol", "StepThresholdProtocol", "RampThresholdProtocol", ] # check the class name of each protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( "No MainProtocol found, but {prot} was found." f"To use {prot_name}, please set MainProtocol." ) def define_protocols( protocols_filepath, stochkv_det=None, prefix="", apical_point_isec=-1, syn_locs=None, ): """Define protocols. Args: protocols_filename (str): path to the protocols file stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int): apical point section index Should be given if there is "somadistanceapic" in "type" of at least one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) Returns: dict containing the protocols """ with open(protocols_filepath, "r", encoding="utf-8") as protocol_file: protocol_definitions = json.load(protocol_file) if "__comment" in protocol_definitions: del protocol_definitions["__comment"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in ["Main", "RinHoldcurrent"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if "Main" in protocol_definitions.keys(): protocols_dict["RinHoldcurrent"] = RatSSCxRinHoldcurrentProtocol( "RinHoldCurrent", rin_protocol_template=protocols_dict["Rin"], holdi_precision=protocol_definitions["RinHoldcurrent"]["holdi_precision"], holdi_max_depth=protocol_definitions["RinHoldcurrent"]["holdi_max_depth"], prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions["Main"]["other_protocols"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if "pre_protocols" in protocol_definitions["Main"]: for protocol_name in protocol_definitions["Main"]["pre_protocols"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict["Main"] = RatSSCxMainProtocol( "Main", rmp_protocol=protocols_dict["RMP"], rinhold_protocol=protocols_dict["RinHoldcurrent"], thdetect_protocol=protocols_dict["ThresholdDetection"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): """Set the efeatures of the main protocol. Args: protocols_dict (dict): contains all protocols to be run If this function is called, should contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix used in naming responses, features, recordings, etc. """ protocols_dict["Main"].rmp_efeature = efeatures[f"{prefix}.RMP.soma.v.voltage_base"] protocols_dict["Main"].rin_efeature = efeatures[ f"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse" ] protocols_dict["Main"].rin_efeature.stimulus_current = protocols_dict[ "Main" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict["RinHoldcurrent"].voltagebase_efeature = efeatures[ f"{prefix}.Rin.soma.v.voltage_base" ] protocols_dict["ThresholdDetection"].holding_voltage = efeatures[ f"{prefix}.Rin.soma.v.voltage_base" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path="", mtype="", syn_locs=None, stochkv_det=None, ): """Return a dict containing protocols. Args: apical_point_isec (int): section index of the apical point Set to -1 no apical point is used in any extra recordings prot_path (str): path to the protocols file features_path (str): path to the features file mtype (str): morphology name to be used as prefix in output filenames syn_locs (list): list of synapse locations stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols """ # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if "Main" in protocols_dict: efeatures = define_efeatures( protocols_dict["Main"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict["Main"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol( "all protocols", protocols=protocols, )
1.929688
2
Lib/xmlrpc/__init__.py
sireliah/polish-python
1
12790553
# This directory jest a Python package.
0.820313
1
app.py
jonnelafin/B_
0
12790554
import os from flask import Flask, redirect from flask import request from flask import jsonify import hashlib app = Flask(__name__) c = 0 clients = [] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {} version = 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode("utf8")).hexdigest() def getUN(ip): return int(str(ip).replace(".", "")) def addChat(toAdd, limit = True): global chat, additive if limit: additive = additive + 1 print("new chat: " + toAdd) toAdd = toAdd.replace("<script>", "").replace("</script>", "") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID) addChat("--- " + uID + " Joined the Chat ---") print("connection from " + str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat("--- " + uID + " Left the Chat ---") @app.route('/') def hello(): global chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view = "<title>A+</title>" global c c = c + 1 view = view + "<h3> Public Chat </h3>" view = view + "Connected as: " + uID + " (" + uIp + ")<br \\>" view = view + "Refresh the page to access the latest messages." view = view + "<br \\>-----------------------------------------------------------------------<br \\>" for i in chat: view = view + i.replace("<", "").replace(">", "") + "<br \\>" view = view + "<br \\>-----------------------------------------------------------------------<br \\>" view = view + "note that only the latest 50 messages are stored and displayed. <br \\><br \\>" view = view + "<form action=\" " + "/post" + "\" method=\"post\">" view = view + "<input type=\"text\" name=\"msg\">" view = view + "<input type=\"submit\">" view = view + "</form>" view = view + "<br \\><hr \\>" view = view + "A+ v. " + str(version) + " | <a href=\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\">LICENSE</a>" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID + ": " + msg) return redirect("/", code=302) @app.route("/get_my_ip", methods=["GET"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route("/announce", methods=["GET"]) def announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route("/unannounce", methods=["GET"]) def unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route("/list", methods=["GET"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route("/req", methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if "to" in request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if not (to in requests): requests[to] = [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect("/", code=302) else: return jsonify({'error': "400: POST Request expected"}), 400 @app.route("/status", methods=["GET"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis = [] if not (uID in requests_sent): requests_sent[uID] = [] if not (uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route("/send", methods=["GET"]) def sendView(): view = "" view = view + "<h3> Send a Chat Request </h3>" view = view + "<hr \\>" view = view + "<form action=\" " + "/req" + "\" method=\"post\">" view = view + "<h4> To: </h4>" view = view + "<input type=\"text\" name=\"to\"><br \\>" view = view + "<input type=\"submit\">" view = view + "</form>" view = view + "<hr \\>" return view, 200 if __name__ == '__main__': # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
2.828125
3
test/test_npu/test_network_ops/test_not_equal.py
Ascend/pytorch
1
12790555
# Copyright (c) 2020, Huawei Technologies.All rights reserved. # # Licensed under the BSD 3-Clause License (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import torch import numpy as np import copy from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.to("cpu") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to("cpu") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output = out.to("cpu") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to("npu").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to("npu").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for="cpu") if __name__ == "__main__": run_tests()
2.171875
2
utils/queries.py
sand-ci/AlarmsAndAlerts
1
12790556
from elasticsearch.helpers import scan import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = { "size" : 0, "query" : { "bool" : { "must" : [ { "range" : { "timestamp" : { "gt" : dateFrom, "lte": dateTo } } }, { "term" : { "src_production" : True } }, { "term" : { "dest_production" : True } } ] } }, "aggregations" : { "groupby" : { "composite" : { "size" : 9999, "sources" : [ { "src" : { "terms" : { "field" : "src" } } }, { "dest" : { "terms" : { "field" : "dest" } } }, { "src_host" : { "terms" : { "field" : "src_host" } } }, { "dest_host" : { "terms" : { "field" : "dest_host" } } }, { "src_site" : { "terms" : { "field" : "src_site" } } }, { "dest_site" : { "terms" : { "field" : "dest_site" } } } ] }, "aggs": { val_fld: { "avg": { "field": val_fld } } } } } } # print(idx, str(query).replace("\'", "\"")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { "size" : 0, "query" : { "bool" : { "must" : [ { "range" : { "timestamp" : { "from" : dateFrom, "to" : dateTo } } }, { "term" : { "src_production" : True } }, { "term" : { "dest_production" : True } } ] } }, "_source" : False, "stored_fields" : "_none_", "aggregations" : { "groupby" : { "composite" : { "size" : 9999, "sources" : [ { fld : { "terms" : { "field" : fld, "missing_bucket" : True, "order" : "asc" } } }, { str(fld+"_host") : { "terms" : { "field" : str(fld+"_host"), "missing_bucket" : True, "order" : "asc" } } } ] } } } } res_ip_host = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results["aggregations"]["groupby"]["buckets"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not None) and (host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { "size" : 0, "query" : { "bool" : { "must" : [ { "range" : { "timestamp" : { "from" : dateFrom, "to" : dateTo } } }, { "term" : { "src_production" : True } }, { "term" : { "dest_production" : True } } ] } }, "_source" : False, "stored_fields" : "_none_", "aggregations" : { "groupby" : { "composite" : { "size" : 9999, "sources" : [ { fld : { "terms" : { "field" : fld, "missing_bucket" : True, "order" : "asc" } } }, { str(fld+"_site") : { "terms" : { "field" : str(fld+"_site"), "missing_bucket" : True, "order" : "asc" } } }, { "ipv6" : { "terms" : { "field" : "ipv6", "missing_bucket" : True, "order" : "asc" } } } ] } } } } res_ip_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results["aggregations"]["groupby"]["buckets"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { "size" : 0, "query" : { "bool" : { "must" : [ { "range" : { "timestamp" : { "from" : dateFrom, "to" : dateTo } } }, { "term" : { "src_production" : True } }, { "term" : { "dest_production" : True } } ] } }, "_source" : False, "stored_fields" : "_none_", "aggregations" : { "groupby" : { "composite" : { "size" : 9999, "sources" : [ { str(fld+"_site") : { "terms" : { "field" : str(fld+"_site"), "missing_bucket" : True, "order" : "asc" } } }, { str(fld+"_host") : { "terms" : { "field" : str(fld+"_host"), "missing_bucket" : True, "order" : "asc" } } } ] } } } } res_host_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results["aggregations"]["groupby"]["buckets"]: site = item['key'][str(field+"_site")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not None)) or (host not in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { "size" : 0, "query" : { "range" : { "timestamp" : { "from" : dateFrom, "to" : dateTo } } }, "_source" : False, "aggregations" : { "groupby" : { "composite" : { "size" : 9999, "sources" : [ { "site" : { "terms" : { "field" : "config.site_name.keyword", "missing_bucket" : True, "order" : "asc" } } }, { "admin_email" : { "terms" : { "field" : "administrator.email", "missing_bucket" : True, "order" : "asc" } } }, { "admin_name" : { "terms" : { "field" : "administrator.name", "missing_bucket" : True, "order" : "asc" } } }, { "ipv6" : { "terms" : { "field" : "external_address.ipv6_address", "missing_bucket" : True, "order" : "asc" } } }, { "ipv4" : { "terms" : { "field" : "external_address.ipv4_address", "missing_bucket" : True, "order" : "asc" } } }, { "host" : { "terms" : { "field" : "host.keyword", "missing_bucket" : True, "order" : "asc" } } } ] } } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results["aggregations"]["groupby"]["buckets"]: host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'], 'ipv4': item['key']['ipv4']} return res_meta
2.421875
2
tests/framework/test_sun.py
altair12/night_exoplanet_scheduler
0
12790557
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = "00" FAKE_LONGITUDE = "11" FAKE_DATE = "YYYY-MM-DD" FAKE_SUNSET = "99:88:77 PM" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { "results": { "sunrise": "4:26:42 AM", "sunset": "99:88:77 PM", "solar_noon": "11:50:51 AM", "day_length": "14:48:18", "civil_twilight_begin": "3:54:08 AM", "civil_twilight_end": "7:47:34 PM", "nautical_twilight_begin": "3:12:59 AM", "nautical_twilight_end": "8:28:43 PM", "astronomical_twilight_begin": "2:25:39 AM", "astronomical_twilight_end": "9:16:04 PM" }, "status": "OK" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url="{}/json?lat={}&lng={}&date={}".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset() assert sunset == TestSun.FAKE_SUNSET
2.171875
2
xv_leak_tools/scriptlets/command_line_for_pid.py
UAEKondaya1/expressvpn_leak_testing
219
12790558
#!/usr/bin/env python3 import argparse import sys import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process = psutil.Process(int(args.pid)) return process.cmdline() sys.exit(wrap_scriptlet(run))
2.359375
2
Bank.pyw
mukeshgurpude/Bank-Operations
0
12790559
<filename>Bank.pyw<gh_stars>0 # -*- coding: utf-8 -*- """ Created on Tue May 19 15:51:38 2020 """ from tkinter import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',"You don't have enough balance in your account") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this customer ID') except Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\nReturning to Menu') except Exception as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f"Your account has been created!!\nCustomer ID: {new[0]}") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect("customer.db") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State text, City text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect("BankData.db") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO Bank_Data VALUES(0,0,0)') bank_data.commit() home()
3.125
3
server/main_build_DB.py
ZzhKlaus/2018-SURF-Trajectory-Estimation
14
12790560
''' By Zhenghang(<NAME> ############################################################################################ It's a light server based on FLASK micro framework, 1.Requirements: Python 3, Flask and relevant packages 2. How does this work? (1) Firstly, modify the host IP address of your own environment. (2) Then run this python file, A temporary file called 'tempList.csv' will be initialized with default data (e.g. for signal level RSS it would be -110, magnetic field value would be none) with order according to the unchanged file 'APs.csv' (to store the AP info in a defined order) Each time one complete info of AP arrival, (assume there are 60 APs that is detected once, then the transmission would be repeated 60 times and one symbol called "Done" would be set to '1' for last time, which means info of one scan has all been sent), the 'tempList.csv' would be refreshed with one line of AP's info. After 60 times (AP number), the function 'refreshCSV()' would be called. Then scan info of once would be be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores all info that is similar to database) and be refreshed in 'oneTime.csv' (for check last time's scan info). Finally, refresh 'tempList.csv' with default value for next time's transmission. ############################################################################################ ''' # coding: utf-8 from flask import Flask, request from app import db, models import csv import os #to get current path import importlib from model import * #algorithm part import pandas as pdb import numpy as np import tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING="UTF-8" #set the utf-8 encode mode # create the application object app = Flask(__name__) #edition # Write all info in DB into a csv file, without SSID stored, encode mode is UTF-8 (as some SSID contains chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in reader] APlength = len(APs) lists = [[0 for col in range(19)] for row in range(APlength)] row = 0 for AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID is in the mapping.csv, which contains 200 APs def checkAP(list, AP): row = 0 for row in range(0,517): if AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in reader] APlength = len(APs) lists = [[0 for col in range(19)] for row in range(APlength)] row = 0 for AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__ == "__main__": #Use local host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose name is app.db def addAPs(list): for row in range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in users: db.session.delete(u) db.session.commit() '''
2.90625
3
packages/std/nodes/std___ExtractProperty0/widgets/std___ExtractProperty0___main_widget___METACODE.py
lidong1266/Ryven-Switch
18
12790561
from NIWENV import * from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data def set_data(self, data): self.setPlainText(data['text']) def remove_event(self): pass
2.390625
2
main.py
marbiru/Model-Major-Generator
0
12790562
<gh_stars>0 print "\n Welcome to the Model Major Generator. This program gives you an image to help you memorise any number \n" def create_mnemonic(): import pegs mnemonic = "" the_input = raw_input("Enter the number you'd like to memorise \n") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += " " + next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += " " + next_image else: pass print mnemonic create_mnemonic()
3.875
4
tests/composite_query_test.py
briandorsey/mimic
1
12790563
<filename>tests/composite_query_test.py # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests for composite_query.py.""" import unittest from __mimic import common from __mimic import composite_query from tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext import db # A query limit large enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): """A simple entity with 3 integer properties.""" x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having a root entity key allows us to use ancestor queries for strong # consistency in the High Replication Datastore. We initialize this global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass expected = """indexes: - kind: Item properties: - name: x - name: y - name: z direction: desc""" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for x in range(5): for y in range(5): for z in range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): """Unit tests for the functions that maintain a set of index definitions.""" def setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = """indexes: bar foo""" self.assertEquals(expected, composite_query.GetIndexYaml()) if __name__ == '__main__': unittest.main()
2.265625
2
addons/Sprytile-6b68d00/rx/linq/observable/dowhile.py
trisadmeslek/V-Sekai-Blender-tools
733
12790564
from rx.core import Observable from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): """Repeats source as long as condition holds emulating a do while loop. Keyword arguments: condition -- {Function} The condition which determines if the source will be repeated. Returns an observable {Observable} sequence which is repeated as long as the condition holds. """ return Observable.concat([self, Observable.while_do(condition, self)])
3.21875
3
sw/groundstation/wasp/__init__.py
nzjrs/wasp
2
12790565
<gh_stars>1-10 """ libwasp is a library for interacting with UAVs running the wasp software system. This library can be used to create groundstation and other monitoring software for interacting with the UAV. The library is coupled with the onboard sofware througn * *messages.xml* - the defintion of messages sent over the chosen communication channel from the UAV to the groundstation * *settings.xml* - a concept of a semi-persistant setting on the UAV. The setting may be read/updated from the groundstation and stored on the UAV """ import os import random import time import math HOME_LAT = float(os.environ.get("WASP_HOME_LAT", -43.520451)) HOME_LON = float(os.environ.get("WASP_HOME_LON", 172.582377)) IS_TESTING = os.environ.get("WASP_IS_TESTING") #: dictionary mapping the C type to its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { "char" : 1, "uint8" : 1, "int8" : 1, "uint16" : 2, "int16" : 2, "uint32" : 4, "int32" : 4, "float" : 4, } #: dictionary mapping the C type to correct format string TYPE_TO_PRINT_MAP = { float : "%f", str : "%s", chr : "%c", int : "%d" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to format characters DEBUG_MESSAGES = { "DEBUG_UINT8" : "%d", "DEBUG_INT32" : "%d", "DEBUG_FLOAT" : "%#f" } class _Noisy: """ An interface for objects providing noisy data (usually for testing) """ def value(self): """ :returns: the next value """ raise NotImplementedError class NoisySine(_Noisy): """ Generates a noisy sinewave """ def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq = freq self.amp = amplitude self.type = value_type #add 1 to sine to keep +ve if positive: self.offset = 1.0 else: self.offset = 0.0 #the noise is x percent of the amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2 = self.amp + n def value(self): t = time.time() self.dt += (self.freq * (t - self.t)) self.t = t val = (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): """ Generates a noisy random walk """ def __init__(self, start, end, delta, value_type=float): self.v = start self.end = end self.start = start self.delta = delta self.type = value_type def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v > self.end and v < self.start: self.v = v else: if v < self.end and v > self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value self.delta = delta self.type = value_type def value(self): return self.type(self.v + (self.delta * random.randrange(0.0,1.0, int=float)))
2.5
2
search_cycles.py
hazdzz/MGC_v2
2
12790566
import os import networkx as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc() q = search_cycle(dir_adj) print(q)
2.71875
3
demo/code/2021-11-10/f.py
uxlsl/uxlsl.github.io
0
12790567
#!/usr/bin/env python # -*-coding:utf-8-*- # File Name : f.py # Description : # Author : # Creation Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By : lsl def f(words): def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w, -1) ans = [] for i,word in enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k = find(b) if k != -1 and k != i: ans.append([k, i]) if j and isPalindrome(b): k = find(a) if k != -1 and k != i: ans.append([i, k]) return ans words = ["abcd","dcba"] # ,"lls","s","sssll"] print(f(words)) # words = ["bat","tab","cat"] # print(f(words)) # words = ["a", ""] # print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left: int, right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int) -> bool: return (sub := s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]: i for i, word in enumerate(words)} ret = list() for i, word in enumerate(words): m = len(word) for j in range(m + 1): if isPalindrome(word, j, m - 1): leftId = findWord(word, 0, j - 1) if leftId != -1 and leftId != i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j - 1): rightId = findWord(word, j, m - 1) if rightId != -1 and rightId != i: ret.append([rightId, i]) return ret
3.4375
3
sdk/python/pulumi_google_native/compute/alpha/get_network_edge_security_service.py
AaronFriel/pulumi-google-native
44
12790568
<reponame>AaronFriel/pulumi-google-native # coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError("Expected argument 'creation_timestamp' to be a str") pulumi.set(__self__, "creation_timestamp", creation_timestamp) if description and not isinstance(description, str): raise TypeError("Expected argument 'description' to be a str") pulumi.set(__self__, "description", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError("Expected argument 'fingerprint' to be a str") pulumi.set(__self__, "fingerprint", fingerprint) if kind and not isinstance(kind, str): raise TypeError("Expected argument 'kind' to be a str") pulumi.set(__self__, "kind", kind) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if region and not isinstance(region, str): raise TypeError("Expected argument 'region' to be a str") pulumi.set(__self__, "region", region) if security_policy and not isinstance(security_policy, str): raise TypeError("Expected argument 'security_policy' to be a str") pulumi.set(__self__, "security_policy", security_policy) if self_link and not isinstance(self_link, str): raise TypeError("Expected argument 'self_link' to be a str") pulumi.set(__self__, "self_link", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError("Expected argument 'self_link_with_id' to be a str") pulumi.set(__self__, "self_link_with_id", self_link_with_id) @property @pulumi.getter(name="creationTimestamp") def creation_timestamp(self) -> str: """ Creation timestamp in RFC3339 text format. """ return pulumi.get(self, "creation_timestamp") @property @pulumi.getter def description(self) -> str: """ An optional description of this resource. Provide this property when you create the resource. """ return pulumi.get(self, "description") @property @pulumi.getter def fingerprint(self) -> str: """ Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order to update the NetworkEdgeSecurityService, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService. """ return pulumi.get(self, "fingerprint") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self, "kind") @property @pulumi.getter def name(self) -> str: """ Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. """ return pulumi.get(self, "name") @property @pulumi.getter def region(self) -> str: """ URL of the region where the resource resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. """ return pulumi.get(self, "region") @property @pulumi.getter(name="securityPolicy") def security_policy(self) -> str: """ The resource URL for the network edge security service associated with this network edge security service. """ return pulumi.get(self, "security_policy") @property @pulumi.getter(name="selfLink") def self_link(self) -> str: """ Server-defined URL for the resource. """ return pulumi.get(self, "self_link") @property @pulumi.getter(name="selfLinkWithId") def self_link_with_id(self) -> str: """ Server-defined URL for this resource with the resource id. """ return pulumi.get(self, "self_link_with_id") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: """ Gets a specified NetworkEdgeSecurityService. """ __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: """ Gets a specified NetworkEdgeSecurityService. """ ...
1.609375
2
basic_programs/7_circleArea.py
SU-Ki-MA/python-programs-gfg
0
12790569
<filename>basic_programs/7_circleArea.py import math def circleArea(r): return math.pi*r*r print(circleArea(55)) print(math.pi)
3.171875
3
tests/missing_data/test_missing_data_air_passengers_None_None.py
shaido987/pyaf
377
12790570
<filename>tests/missing_data/test_missing_data_air_passengers_None_None.py import tests.missing_data.test_missing_data_air_passengers_generic as gen gen.test_air_passengers_missing_data(None, None)
1.117188
1
test/test_status.py
System1Bio/asitiger
2
12790571
from asitiger.status import ( AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = ":A 10N 138" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag("N") == Status.IDLE assert Status.from_flag("B") == Status.BUSY
2.34375
2
api/serializers.py
bartoszper/Django-REST-API-movierater
0
12790572
from django.contrib.auth.models import User from rest_framework import serializers from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) # aktor.save() # return aktor
2.0625
2
tests/image/svg_file_test.py
pywikibot-catfiles/file-metadata
10
12790573
<gh_stars>1-10 # -*- coding: utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format() self.assertIn('Composite:FileFormat', data) self.assertEqual(data['Composite:FileFormat'], 'svg')
2.40625
2
xenso/indices.py
DangoMelon/xENSO
1
12790574
""" Module containing the definitions and methods to compute a variety of indices used to study ENSO """ from typing import List, Optional, Tuple import numpy as np import xarray as xr from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: """ Computes the E and C index according to Takahashi """ def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = ("1979-01-01", "2009-12-30"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data = sst_data self.base_period = base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: """ Compute the principal components """ _subset = self.sst_data.sortby("lat").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop("month"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: """ Return the pcs with the correction factor applied """ return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: """ Automatically determine the correction factor by estimating the sign of known events for the E and C index. """ _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset: """ Compute the E and C index """ _pcs = self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim="time") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 ** (1 / 2)) eindex.name = "E_index" cindex = (pc1 + pc2) / (2 ** (1 / 2)) cindex.name = "C_index" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: """ Return the correction factor applied to the first two pcs """ return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: """ Set a new correction factor to be applied to the first two pcs """ self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[("mode", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: """ Return the smooth kernel used in the first two pcs """ return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: """ Set a new smooth kernel to be applied to the first two pcs """ kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=["time"]) @property def pcs(self) -> xr.DataArray: """ Return the first two principal components used in the computation of the E and C index """ return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: """ Return the first two principal components smoothed with the specified smooth_kernel """ if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim="time", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: """ Return the first two principal components rotated, also known as the E and C index """ return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: """ Return the first two principal components smoothed and rotated, also known as the E and C index """ return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = "34") -> xr.DataArray: """ Computes the mean from the selected El Niño zone, also know as El Niño Index for each of the zones. """ zones = { "12": {"lat": slice(-10, 0), "lon": slice(270, 280)}, "3": {"lat": slice(-5, 5), "lon": slice(210, 270)}, "34": {"lat": slice(-5, 5), "lon": slice(190, 240)}, "4": {"lat": slice(-5, 5), "lon": slice(160, 210)}, } return data.sel(**zones[zone]).mean(dim=["lat", "lon"])
2.71875
3
datauploader/apps.py
OpenHumans/oh-rescuetime-source
11
12790575
<filename>datauploader/apps.py<gh_stars>10-100 from django.apps import AppConfig class DatauploaderConfig(AppConfig): name = 'datauploader'
1.242188
1
pyvlova/op/__init__.py
ModelTC/pyvlova
1
12790576
<reponame>ModelTC/pyvlova # Copyright 2020 <NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import PlainPool, AdaptivePool, Pool from .unary import ReLU, ReLU6, UnaryElementwise
1.398438
1
bash_script_executer.py
Mohan-Zhang-u/TextSummarization
0
12790577
<reponame>Mohan-Zhang-u/TextSummarization<filename>bash_script_executer.py import subprocess import sys def main(args): runstring = "./run.sh " + args[0] print(runstring) subprocess.call(runstring, shell=True) if __name__ == "__main__": main(sys.argv[1:])
2.515625
3
src/umls_api_tool/auth.py
dcronkite/umls_api_tool
0
12790578
<gh_stars>0 import json import urllib.parse import requests from lxml.html import fromstring from loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self, *url, **params): if not params: params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params, } r = None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e r.encoding = 'utf-8' return json.loads(r.text)
2.515625
3
goodbye_cruel_world.py
sgriffith3/2021_05_10_pyna
0
12790579
print("Goodbye Cruel World!!!!") print("see ya later, aligator")
1.789063
2
auv_nav/parsers/parse_NOC_nmea.py
ocean-perception/oplab_pipeline
5
12790580
# -*- coding: utf-8 -*- """ Copyright (c) 2020, University of Southampton All rights reserved. Licensed under the BSD 3-Clause License. See LICENSE.md file in the project root for full license information. """ import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = "autosub" category = category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / ".." / filepath) file_list = get_file_list(path) data_list = [] for file in file_list: with file.open("r", errors="ignore") as nmea_file: for line in nmea_file.readlines(): parts = line.split("\t") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(" ")[0] hour_str = str(parts[1]).split(",")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data) return data_list
2.484375
2
src/lingcomp/data_utils/__init__.py
CharlottePouw/interpreting-complexity
2
12790581
<reponame>CharlottePouw/interpreting-complexity from .et_processor import DundeeProcessor, GECOProcessor, ZuCoProcessor
0.914063
1
projects/hupun/test.py
kingking888/crawler-pyspider
1
12790582
import unittest from hupun.page.hupun_goods.goods_information import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): """ 订单 的测试部分 :return: """ Order(True) \ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): """ 订单商品详情 的测试部分 :return: """ assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): """ 采购订单 的测试部分 :return: """ assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): """ 采购订单查看详情 的测试部分 :return: """ assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): """ 采购入库单 的测试部分 :return: """ assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): """ 采购入库单查看详情数据 的测试部分 :return: """ assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): """ 进销存表报导出 的单元测试 :return: """ storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): """ 进销存报表导出记录查询 的单元测试 :return: """ compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): """ 进销存报表下载 的单元测试 :return: """ data = { "task_id": 3686347, "oper_uid": "9459514BF68F3C0A84343938A2CD7D75", "status": 2, "export_type": 7, "exportCaption": "进销存报表", "create_time": "2019-06-10T19:12:24Z", "download_time": "2019-06-11T12:02:50Z", "count": 1462, "download_times": 4, "oper_nick": None, "file_path": "export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): """ 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: """ bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \ .set_start_time(Date.now().plus_days(-60).format()) \ .set_end_time(Date.now().format()) \ .test() def _test_choose_purchase_bill_sku(self): """ 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: """ bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): """ 采购入库单 的提交入库变动的 的单元测试 :return: """ data = [ { "goodsUid": "4AFB3148514C3FA99F332B05AAEC0A92", "goodsName": "测试--想念", "specUid": "1000577C001E3D14A8041BC5FD4CCDCE", "pic1": "http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png", "specCode": "1919N00002W404", "specName": None, "unit_size": 1, "pchs_unit": None, "unit": None, "shouldNums": 87, "nums": 1, "discount_rate": 100, "price": 188, "pivtLast": 188, "primePrice": 188, "base_price": 188, "tax_rate": 0, "pchs_bill_uid": "483FAB78DF98341C8A7E0F16577E4F21", "pchs_bill_code": "CD201905300017", "appointBillType": 0, "pchs_detail_uid": "9DC3D695B16A3160BAEDD6E249B01C25", "pchs_detail_index": "10000", "remark": None, "openSN": 0, "expiration": None, "total_money": 188, "pay_type": None, "pchs_advance_balance": 18128, "stock_advance_balance": None, "settle_advance_balance": None, "tax": 0, "net_price": 188, "sn": None, "$dataType": "v:purchase.stock$dtStockBillDetail" }, { "$dataType": "v:purchase.stock$dtStockBillDetail" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): """ 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: """ token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): """ 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: """ PurchaseStockToken().test() def _test_get_goods_information(self): """ 商品信息 的单元测试 :return: """ GoodsInformation().test() def test_get_goods_information_sku(self): """ 商品信息sku 的单元测试 :return: """ goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if __name__ == '__main__': unittest.main()
2.3125
2
borre/__init__.py
younessidbakkasse/bore
1
12790583
""" Borre is a dead simple Farkle dice game implementation and game maker, game rules are simple, you usually have five dices with six sides, you roll the set of dices, and check if you score bonus or regular standard points. Made by a <NAME>, <NAME> for a School project at Hetic. """ __version__ = "0.1.5" from .main import Borre as Borre from .dice import Dice as Dice from .player import Player as Player from .score import Score as Score
2.984375
3
resources/Wireshark/WiresharkDissectorFoo/tools/validate-clang-check.py
joshis1/C_Programming
2
12790584
<filename>resources/Wireshark/WiresharkDissectorFoo/tools/validate-clang-check.py #!/bin/sh # Copyright 2018, <NAME> (See AUTHORS file) # # Verifies last commit with clang-check (like scan-build) for Petri Dish # # Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v "^D" | cut -f2 | grep "\\.c$\|cpp$" ) for FILE in $COMMIT_FILES; do clang-check -analyze ../$FILE done
1.523438
2
fit2gether_core/models/abstract.py
kapucko/fit2gether
0
12790585
<filename>fit2gether_core/models/abstract.py import uuid from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise ValidationError({'date_from': msg, 'date_to': msg})
2.3125
2
Company_Based_Questions/TCS/Codevita/mock1.py
Satyam-Bhalla/Competitive-Coding
1
12790586
n = int(input()) l = list(map(int,input().split(', '))) for i in range(len(l)): num = l[i] a = 0 while num>0: a += num%6 num = num//6 l[i] = a count = 0 for i in range(n): for j in range(i,n): if l[i]>l[j]: count += 1 print(count)
3.09375
3
log_api/migrations/0009_auto_20200728_2127.py
mpupo/oh-my-log
0
12790587
# Generated by Django 3.0.8 on 2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived', field=models.BooleanField(default=False, verbose_name='Archived'), ), ]
1.414063
1
pearl/plugins/eightball.py
dynosaur72/pearl
11
12790588
import asyncio, random import nacre class EightBallSession: answers = [ "It is certain", "It is decidedly so", "Without a doubt", "Yes definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful" ] def __init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\s+8ball(\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl, config): return EightBallSession(pearl, config)
2.609375
3
src/game/commands/general.py
gtaylor/dott
3
12790589
<filename>src/game/commands/general.py """ General commands that are available to everyone. """ import json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): """ Examines an object. """ name = 'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting to 'here'. if not invoker.location: # This shouldn't ever happen, but... raise CommandError('You appear to be nowhere. Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): """ Checks to see whether the invoker is an admin. If so, admins get a very nerdy examine display that shows an object's un-parsed name/description, and attributes. If the invoker is a normal player, this will simply return the normal description. :rtype: str :returns: The object's appearance, from the invoker's perspective. """ if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): """ Shows the object as it were examined. """ attributes_str = ' Parent: %s (%s)\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\n' % ', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\n' % obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\n' % obj.internal_description if obj.attributes: attributes_str += '\n### ATTRIBUTES ###\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return "%s\n%s" % (name, attributes_str) class CmdGo(BaseCommand): """ Attempts to traverse an exit. """ name = 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError("Destination unknown.") if not obj_to_traverse.base_type == 'exit': invoker.emit_to("That doesn't look like an exit.") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): """ Attempts to enter an object. """ name = 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError("You look around, but can't find it.") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to("You enter %s" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): """ Attempts to leave an object. """ name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object puts us. leave_to = location.determine_leave_destination(invoker) # Use the original object's name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to("You leave %s" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): """ Lists a break-down of available commands. Takes into account your location's command table (if applicable), and admin status. """ name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to send to user. buf = '' if invoker.is_admin(): buf += '\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): """ Given a CommandTable instance, return a string that lists the commands in the table. :param CommandTable table: The command table whose commands to list. :rtype: str :returns: A string list of commands in the table. """ buf = '' for cmd in table.commands: buf += ' %s' % cmd.name return buf class CmdLook(CmdExamine): """ Synonymous with examine, aside from always getting the object's normal appearance, regardless of whether the player is an admin or not. """ name = 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): """ The 'look' command always shows an object's normal appearance, despite whether the invoker is a player or admin. :rtype: str :returns: The object's appearance. """ return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): """ A REALLY basic WHO list. """ name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): """ The proxy has all of the details on who is connected, so the mud server has to ask. This is handled through a deferred and a callback. """ service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): """ Once the proxy gets back to us on who is connected, this callback triggers. :param dict results: The details returned by the proxy. :param PlayerObject invoker: The player who ran the command. """ accounts = results['accounts'] retval = "Player\n" for account in accounts: retval += " %s\n" % account nplayers = len(accounts) if nplayers == 1: retval += 'One player logged in.' else: retval += '%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): """ Communicate with people in the same room as you. """ name = 'say' def func(self, invoker, parsed_cmd): # The sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to see. speech_str = u"%s says '%s'" % (invoker.name, speech) # What the invoker sees. self_str = u"You say '%s'" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): """ Disconnects from the game. """ name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to("Quitting...") service = invoker.mud_service # This asks the proxy to disconnect any sessions that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): """ Shows the dott version identifier. Currently a git commit hash. """ name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = "-" * 78 buf += "\n %s version %s\n" % ( settings.GAME_NAME, settings.VERSION ) buf += "-" * 78 invoker.emit_to(buffer)
2.59375
3
npro/npro/report/__init__.py
VasuGoel-zz/npro
0
12790590
# Copyright (c) 2013, GreyCube Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import cint import shutil, os from frappe.modules import scrub, get_module_path def copy_report( module="NPro", src="Interviews", tgt="Interview Results", ): """usage: copy_report("NPro", "src", "tgt")""" doc = frappe.copy_doc(frappe.get_doc("Report", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying "' + src + '" to "' + tgt, '"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, "report", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + ".py") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + ".js") tgt_folder = module_path and os.path.join(module_path, "report", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + ".py") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + ".js") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path) print(src_path, tgt_path) print(src_script_path, tgt_script_path)
2.171875
2
mazikeen/GeneratorMakedirs.py
hanniballar/mazikeen
0
12790591
from mazikeen.MakedirsBlock import MakedirsBlock from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException("'makedirs' block not recognized") return MakedirsBlock(data)
2.09375
2
face_detect.py
Biel-Hammer/IDphoto
2
12790592
<filename>face_detect.py import cv2 import mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape print(image.shape) # Convert the BGR image to RGB and process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image = image.copy() for detection in results.detections: print("Nose tip:") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info = {"top": ymin, "left": xmin, "width": width, "height": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y * h) print("center point is ", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow("facemesh", cv2.WINDOW_NORMAL) cv2.imshow("facemesh", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point if __name__ == "__main__": file = "/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg" from face_crop import get_crop_img from inference import run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite("temp3.jpeg", res_img) cv2.namedWindow("img_crop", cv2.WINDOW_NORMAL) cv2.imshow("img_crop", res_img) cv2.waitKey(0) free_up_memory()
2.96875
3
AD-HOC/NIVEL-8/1127/1127.py
Cluxnei/uri-online-judge-problems
0
12790593
def lp(copy, original): copy_len = len(copy) original_len = len(original) if copy_len > original_len: return original_len - 1 return copy_len - 1 if original_len == copy_len else copy_len def not_flat_or_sharp(char): return char != '#' and char != 'b' def transpose_note(note, notes): if note == 'E#': return 'F#' if note == 'B#': return 'C#' if note == 'B': return 'C' current_index = notes.index(note) if current_index == len(notes) - 1: current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes = '' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes = '' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop while True: # Read line line = input().split(' ') # Get the length of musics num_original = int(line[0]) num_copy = int(line[1]) # Check if is end of program if num_copy == num_original and num_copy == 0: break # Read the original original = input() # Read the copy copy = input() # Check if copy is substring of original and last char is not # or b last_char_of_substr = original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up and check substring of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and check substring of original for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not # or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a copy check 1 time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the output print('S' if is_copy else 'N')
3.4375
3
eoncloud_web/biz/firewall/migrations/0001_initial.py
eoncloud-dev/eoncloud_web
10
12790594
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules', }, bases=(models.Model,), ), ]
1.8125
2
challenge-49/test_solver.py
mauricioklein/algorithm-exercises
3
12790595
import unittest from solver import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings("ab" , "ba" ), True ) self.assertEqual(buddy_strings("ab" , "ab" ), False) self.assertEqual(buddy_strings("aa" , "aa" ), True ) self.assertEqual(buddy_strings("aaaaaaabc", "aaaaaaacb"), True ) self.assertEqual(buddy_strings("" , "aa" ), False) if __name__ == "__main__": unittest.main()
3.03125
3
test/test_billing_prices_api.py
CiscoDevNet/python-msx-sdk
0
12790596
<reponame>CiscoDevNet/python-msx-sdk """ MSX SDK MSX SDK client. # noqa: E501 The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech """ import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): """BillingPricesApi unit test stubs""" def setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self): """Test case for add_price Add price for tenant and event type. # noqa: E501 """ pass def test_delete_price(self): """Test case for delete_price Delete a price. # noqa: E501 """ pass def test_get_price(self): """Test case for get_price Get a price. # noqa: E501 """ pass def test_get_prices_page(self): """Test case for get_prices_page Retrieve a page of prices. # noqa: E501 """ pass def test_update_price(self): """Test case for update_price Update price for an event type and tenant. # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
2.359375
2
model/train.py
dbetm/handwritten-flowchart-with-cnn
24
12790597
<filename>model/train.py # -*- coding: utf-8 -*- from __future__ import division import random import pprint import sys import time import pickle import logging import traceback from optparse import OptionParser import numpy as np import tensorflow as tf from keras import backend as K from keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input from keras.models import Model from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object): """Setup training and run for some epochs.""" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser = None self.all_data = [] self.classes_count = [] self.class_mapping = [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path # Datasets for training, split 80% training and 20% for validation self.train_images = None self.val_images = None # Convolutional Neural Network self.cnn = None # Data generators self.data_gen_train = None self.data_gen_val = None # Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all = None # Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and session setup self.__setup() def __setup(self): """System and session, setup.""" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth = True # to log device placement (on which device the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): """Set hyperparameters before the training process.""" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path, annotate_path="frcnn/utilities/annotate.txt", generate_annotate=False): """Recover data from annotate file or create annotate file from dataset. """ # Instance parser, recover data from annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg was not added, it will be added to the image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping = self.class_mapping # Show resume from loaded data self.show_info_data() def show_info_data(self): """Show data that it will use for training.""" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): """Do persistence the config data for training process.""" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has been written to {}, and can be ' message += 'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def train(self): """Train the Faster R-CNN.""" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss = np.Inf # Start iterative process print("The training has begun :)") for epoch_num in range(self.config.num_epochs): start_time = time.time() # init time for current epoch # Instance progress bar for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If an epoch is completed + allowed verbose, then: # print the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress bar in the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num += 1 # If the current epoch is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): """Initialize data generators, shuffle the data and create other data structures. """ # Randomize data random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data) self.train_images = [s for s in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for s in self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): """Create the unified model Faster R-CNN.""" img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a model that holds both the RPN and the classifier... # Used to load/save weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save weights for the models. self.__load_weights() # Save the models like a trainable object. self.__compile_models() def __compile_models(self): """ Create optimizers and compile models.""" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, "rpn") self.history.save_summary(self.model_classifier, "classifier") self.history.save_summary(self.model_all, "all") # test save plots self.history.save_model_image(self.model_rpn, "rpn") self.history.save_model_image(self.model_classifier, "classifier") self.history.save_model_image(self.model_all, "all") def __load_weights(self): """Load weights from a pretrained model.""" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print("Couldn't load pretrained model weights.") print("Weights can be found in the keras application folder \ https://github.com/fchollet/keras/tree/master/keras/applications") def __print_average_bbxes(self): """Show the average number of overlapping bboxes.""" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = "Average number of overlapping bounding boxes from RPN = {}" message += " for {} previous iteration(s)." print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = "RPN is not producing bounding boxes that overlap the " message += "ground truth boxes. Check RPN settings or keep training." print(message) def __validate_samples(self, neg_samples, pos_samples): """Format positives and negatives samples.""" if len(neg_samples) > 0: # Just choose the first one neg_samples = neg_samples[0] else: # Leave the negative samples list empty neg_samples = [] if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): """Select X positives samples and Y negatives samples for complete number RoIs. """ if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: """The replace parameter determines whether or not the selection is made with replacement (default this parameter takes the value False). """ selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: """In the extreme case where num_rois = 1, we pick a random pos or neg sample. """ selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): """Update losses for RPN and classifier.""" # Calculate weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): """Update the final losses after the epochs ends.""" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the resume of the epoch if self.config.verbose: message = 'Mean number of bounding boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update the best loss if the current loss is better. if curr_loss < best_loss: message = 'Total loss decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch info info = [] # add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path = "training_results/1" trainer = Trainer(results_path) weights_input_path = "vgg16_weights_tf_dim_ordering_tf_kernels.h5" path_dataset = "/home/octocat/Escritorio/flowchart_3b_v3" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + "/annotate.txt" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + "/model_frcnn.hdf5", weights_input_path=weights_input_path, num_epochs=1 ) trainer.save_config(results_path + "/config.pickle") trainer.train()
2.4375
2
src/modules/json/pysud_config_json_parser.py
joaonlopes/pysud
1
12790598
import json import pysud class ConfigJsonParser(): """ Summary Attributes: config_json_file: A valid json file path containing pysud game configurations. """ def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # # run this script in a directory containing a valid config.json file # # along with the pysud basic modules # # python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations found: ' + str(config))
3.15625
3
client/summoner.py
sandbox-pokhara/auto-disenchanter-v2
0
12790599
<filename>client/summoner.py import requests def get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel'] except requests.exceptions.RequestException: return -1
2.578125
3
editing.py
JuliaSmeredchuk/python_training
0
12790600
class Editing: def __init__(self, company, mobile, address2, notes): self.company = company self.mobile = mobile self.address2 = address2 self.notes = notes
2.625
3
test/testExecutor.py
AutoDash/AutoDash
3
12790601
#!/usr/bin/env python3 import unittest from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if __name__ == '__main__': unittest.main()
2.375
2
complexcnn/modules.py
maxisoft/ComplexCNN
0
12790602
<filename>complexcnn/modules.py<gh_stars>0 # -*- coding: utf-8 -*- import torch import torch.nn as nn import numpy as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ == "__main__": ## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2. compute y = complexConv(x)
2.25
2
application.py
Alastairsc/CITS4406-Assignment2
4
12790603
"""Main execution body for program. Contains GUI interface and exporting class that creates files instead of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 """ import argparse import webbrowser import textwrap import xlrd from tkinter import * from tkinter import filedialog, ttk from threading import Thread try: from .data import * from .report import * from .template_reader import * except: from data import * from report import * from template_reader import * terminal = False """ Global Variables: terminal -- boolean value whether program is running through terminal or through GUI progress -- Progress bar showing progress through program """ class DisplayWindow: """GUI for application allowing users to interact with program in simpler and more explanatory way Methods: dataaskopenfile -- Asks for files to process and displays them in the output window dataaskopenfolder -- Asks for folder to process and displays the contained files in the output window filetext -- Fills output box given a list of files maketemplate -- Links to Create template web page of Data-oracle website process_report -- Runs program and generates report for all files processed process_export -- Runs program and creates a file containing analysis of all files processed removefile -- Removes file from being processed after being selected in output window reset -- Resets the program removing all files from the process queue and sets progress bar back to the start templateaskopenfile -- Asks for a template to use during processing and displays it in the output window Variables: datafiles -- list of datafiles to be processed display -- output window Frame object template -- template to use in process if applicable """ def __init__(self): root = Tk() root.wm_title("UWA Data-oracle") self.datafiles = [] self.template = None # Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text="Select File(s) or Folder(s) to process: ").grid(row=0, sticky=E, pady=10) Label(mainwindow, text="Select template file(optional): ").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text="> Create Template", fg="blue") label3.bind("<Button-1>", self.maketemplate) label3.grid(row=2) Button(mainwindow, text="Browse Files...", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text="Browse Templates...", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text="View Report", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text="Export", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text="Reset", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text="Exit", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient="horizontal", mode="determinate") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set("Waiting for File...") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): """ Asks for files to process and displays them in the output window""" self.reset() if self.template: Label(self.display, text=str("Template Selected: " + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension="*.csv") if self.datafiles is not None: self.datafiles = [file.name for file in self.datafiles] Label(self.display, text="Selected Files: ", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set("Ready to Process Files...") return self.datafiles def dataaskopenfolder(self): """Asks for folder to process and displays the contained files in the output window""" self.reset() if self.template is not None: Label(self.display, text=str("Template Selected: " + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str("Selected Folder: " + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): """Provides text for output box given a list of files""" remove_file = lambda x, m: (lambda p: self.removefile(x, m)) for file in files: label = Label(self.display, text=str("\t" + file), anchor='w') if os.name == 'posix': label.bind("<Button-2>", remove_file(file, label)) else: label.bind("<Button-3>", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): """Opens webbrowser to create template page on Data-oracle website""" webbrowser.open_new("http://www.data-oracle.com/upload/createTemplate/") def process_report(self): """Runs program and generates report at the end""" self.progress["value"] = 0 self.setstatus("Processing Files...") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): """Runs program and exports results to file""" self.progress["value"] = 0 self.setstatus("Processing Files...") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export file is open self.setstatus("ERROR: Permission Denied, ensure export file is not open in another program") def removefile(self, file, label): """Removes file from process list and removes label""" print("Removing: ", file) self.datafiles.remove(file) label.destroy() def reset(self): """Resets all files""" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus("Waiting for File...") self.progress["value"] = 0 def templateaskopenfile(self): """Asks for template to use in processing""" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension="*.csv") if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str("Template Selected: " + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus("Ready to Process Folder...") return self.template def setmaxprogress(self, max): self.progress["maximum"] = max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): """Class that creates a file containing analysis of all files run in program Methods: write_stats -- writes summary of a single data object write_summary -- writes summary of all files to be run after processing all files Variables: filename -- file name to save export file as total_files -- total number of files processed total_invalid -- total number of invalid rows total_empty -- total number of empty columns total_errors -- total numher of errors throughout files """ def __init__(self, filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0 self.total_col = 0 if not offline: with open(self.filename, 'w') as fp: pass def write_stats(self, data): """Writes statistics of a single data object""" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write("Analysis of " + os.path.split(data.filename)[1] + '\n') self.total_files += 1 fp.write("Number of Invalid rows: " + str(len(data.invalid_rows)) + '\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in data.columns if column.empty] fp.write("Number of Empty Columns: " + str(len(empty_columns)) + '\n') self.total_empty = len(empty_columns) fp.write("Number of Error Cells: " + str(len(data.errors)) + '\n') self.total_errors = len(data.errors) fp.write("Number of Valid Columns: " + str(len(data.columns)) + '\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write("Delimiter: comma\n") else: fp.write("Delimiter: " + data.delimiter_type + '\n') fp.write("\n") def write_summary(self): """Writes summary of all files processed""" temp_file = os.path.join(os.path.split(self.filename)[0],"Tempfile") with open( temp_file, 'w') as fp: fp.write("Error Report " + os.path.split(self.filename)[1] + "\n\n") fp.write("Total Files Analysed: " + str(self.total_files) + "\n") fp.write("Total Invalid Rows: " + str(self.total_invalid) + "\n") fp.write("Total Empty Columns: " + str(self.total_empty) + "\n") fp.write("Total Valid Columns: " + str(self.total_col) + "\n") fp.write("Total Errors: " + str(self.total_errors) + "\n\n") with open(self.filename, 'r') as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): """Writes error message for files not processed fully""" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write("Analysis of " + os.path.split(data.filename)[1] + '\n') fp.write("ERROR: Unable to read file, no readable data detected.\n\n") def main(*args, **kwargs): """ Create Data and Report objects, providing necessary information for them to run analysis and create desired outputs (i.e. HTML report or writing to exported file). Keyword Arguments: args -- Arguments provided to the program at runtime. exporter -- Exporter object if applicable """ exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0] print("[Step 1/7] Processing file: ",filename) print("[Step 2/7] Reading data") if window is not None: window.step_progress() window.setstatus("Processing " + filename + "...") if len(args) > 1: temp = Template(args[1]) data = Data(filename, temp) else: data = Data(filename) if not data.raw_data: print("ERROR: Unable to read file: " + filename) window.setstatus("ERROR: Unable to read file: " + filename) if exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print("[Step 3/7] Running pre-analysis") if window is not None: window.step_progress() data.pre_analysis() print("[Step 4/7] Finding Errors") if window is not None: window.step_progress() data.find_errors() print("[Step 5/7] Running Analysis") if window is not None: window.step_progress() window.setstatus("Running Analysis on " + filename + "...") data.analysis() if exporter is None: print("[Step 6/7] Generating report") report = Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns string of html, also generates html report for debugging purposes print("[Step 7/7] Report Successfully Generated") print("Completed analysis for: ",filename) if window is not None: window.step_progress() webbrowser.open("file://"+html,new=2) else: print("[Step 6/7] Generating report") exporter.write_stats(data) print("[Step 7/7] Report Successfully Generated") if window is not None: window.step_progress() print("Completed analysis for: ", filename) if window is not None: window.setstatus("Completed Analysis for " + filename) def get_file_dir(location): """Returns the directory of the file with the file name Keyword arguments: location -- A file path. """ return location.rpartition('\\') def process_files(files, templates, exportfile='', window=None): """Process files and templates and runs the program over them. Converts excel files and applies template to each file Keyword arguments: files -- files to be processed templates -- files to use as templates in processing exportfile -- file to export analysis to if applicable """ filenames = [] excel = [] for file in files: name_ext = os.path.splitext(file) # TODO handle empty sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print("[Step 0/7] Converting to csv file") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + ".csv" with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + "_" + sheet + ".csv") try: with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already exists and is open window.setstatus("ERROR: Permission Denied, ensure " + new_name + " is not open in another program") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print("ERROR: Unsupported file type: " + file) if window is not None: window.setstatus("WARNING: Unsupported file type " + file) if exportfile != '': export = Exporter(exportfile) else: export = None if window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None or templates: if len(templates) == 1: for name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality when excel files have multiple sheets print("Error, different number of files and templates") else: for name in filenames: main(name, exporter=export, window=window) if export != None: export.write_summary() if excel: for file in excel: os.remove(file) if __name__ == '__main__': """If the program is run with application.py as the argument to the command line execution begins here. This will process all the command line arguments before proceeding. """ files = [] templates = [] if len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\ description=textwrap.dedent('''\ Processes Csv files. ---------------------------------- Can process one or more csv files. Can specify template to describe data further. Templates can be used to describe one or more csv files. If using multiple templates for multiple files list templates in the same order as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\ help='one or more filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files') args = parser.parse_args() process_files(args.filenames, args.t) else: DisplayWindow()
3.09375
3
index/tests.py
CodeMath/SmartCotract_Etherum_Example
2
12790604
from django.test import TestCase, override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session["github_status"] = "1" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin', kwargs={'backend': 'github'})) self.assertEqual(response.status_code, 302)
2.234375
2
main_node/stop_condition/stop_condition.py
dpukhkaiev/BRISE2
4
12790605
import datetime import json import logging import os import threading import time from abc import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv("BRISE_EVENT_SERVICE_HOST") self.event_port = os.getenv("BRISE_EVENT_SERVICE_AMQP_PORT") self.database = MongoDB(os.getenv("BRISE_DATABASE_HOST"), os.getenv("BRISE_DATABASE_PORT"), os.getenv("BRISE_DATABASE_NAME"), os.getenv("BRISE_DATABASE_USER"), os.getenv("BRISE_DATABASE_PASS")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters["Name"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters["Name"]) self.repetition_interval = datetime.timedelta(**{ experiment_description["StopConditionTriggerLogic"]["InspectionParameters"]["TimeUnit"]: experiment_description["StopConditionTriggerLogic"]["InspectionParameters"]["RepetitionPeriod"]}).total_seconds() def start_threads(self): """ Start 2 threads. One thread listens event to shut down Stop Condition. Second thread run the functionality of Stop Condition (`self_evaluation` method). """ self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): """ This function stops Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty """ self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): """ Main logic of Stop Condition should be overridden in this method. Later, this method will be called in `self_evaluation` method with defined in Experiment Description period. When the Stop Condition is triggered to stop BRISE, it changes internal state of variable 'self.decision' to True. :return: None """ def update_expression(self, stop_condition_type: str, decision: bool) -> None: """ This function sends event to Stop Condition Validator with command to check StopConditionTriggerLogic expression, since this particular Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision (boolean) """ dictionary_dump = {"experiment_id": self.experiment_id, "stop_condition_type": stop_condition_type, "decision": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): """ This function performs self-evaluation of Stop Condition periodically according to user-defined repetition interval. """ counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending the update only when decision changes while self.thread_is_active: # time.sleep blocks thread execution for whole time specified in function argument # and stop message from main-node could be delivered only after this timer ends. # This code decision is designed to accelerate stopping process. time.sleep(listen_interval) counter = counter + 1 if counter % 10 == 0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \ self.database.get_last_record_by_experiment_id("Experiment_state", self.experiment_id)["Number_of_measured_configs"] except TypeError: self.logger.warning(f"No Experiment state is yet available for the experiment {self.experiment_id}") if numb_of_measured_configurations > 0: search_space_size = \ self.database.get_last_record_by_experiment_id("Search_space", self.experiment_id)["Search_space_size"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision: msg = f"{self.__class__.__name__} Stop Condition decision: " \ f"{ 'stop' if self.decision else 'continue'} running Experiment." self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): """ This function sends stop_experiment message to main node. It could be triggered only if Stop Condition initialization fails. """ with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body="Stop condition is not able to initialize.") class EventServiceConnection(threading.Thread): """ This class is responsible for listening `stop_brise_components` queue for shutting down Stop Condition (in case of BRISE Experiment termination). """ def __init__(self, stop_condition: StopCondition): """ The function for initializing consumer thread :param stop_condition: an instance of Stop Condition object """ super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): """ The function for thread stop """ self._is_interrupted = True def run(self): """ Point of entry to tasks results consumers functionality, listening of queue with task result """ try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second finally: if self.connection.is_open: self.connection.close()
2.4375
2
random_query_generator.py
Nispand1492/amazon_cloud_project
0
12790606
__author__ = 'Nispand' import random import time def get_lang(): Lang = ["English","Spanish"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and contract_id < 1000: contract_id = "0"+str(contract_id) if contract_id >27 and contract_id <100: contract_id = "00" + str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = ["Mail order gap ","Mail order ","In Network gap ","In Network "] days = ["30 days","60 days","90 days"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return code """ print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() """ def setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs) < noq: fields = ["Lang","segment_id","contract_id","plan_id","contract_year","tier_level","tier_type_desc","sentences_sort_order","category_code"] query = "Select " no_f = random.randint(1,9) #print no_f if no_f == 9 : query = query + "*" elif no_f == 1: f_id = random.randint(0,8) #print f_id query = query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query + fields[f_id[i]] + "," query = query + fields[f_id[i+1]] query = query + " from mytable LIMIT " + str(lim) if query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time = time.time() for j in range(1,noq): #print "query number:" + str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time) + "seconds" return query
2.71875
3
tomomibot/session.py
adzialocha/tomomibot
28
12790607
<gh_stars>10-100 import os import random import threading import time from keras.models import load_model from sklearn.cluster import KMeans import librosa import numpy as np import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for max density PLAY_DELAY_EXP = 5 # Exponent for maximum density delay RESET_PROPABILITY = 0.1 # Percentage of chance for resetting sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing logic self._sequence = [] self._wavs = [] self._density = 0.0 self.is_running = False # Load model & make it ready for being used in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was trained with a different ' 'amount of classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice "{}" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property def interval(self): return self._interval @interval.setter def interval(self, value): with self._lock: self._interval = value @property def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running = True # Start reading audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\n') def stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1: # Get next wav file to play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample "{}" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played sample from our queue self._wavs = self._wavs[1:] def tick(self): """Main routine for live sessions""" # Read current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when possible slices = [] if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for y in slices: y_slice = y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector) # Check for too long sequences, cut it if necessary penalty = self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if we already have enough data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True: # Play all possible subsequences min_index = max_index - self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do anything when this is silence if self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when able to work with dynamics etc. if self._voice.version == 2: smiley = '☺' if wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event from sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY: self._sequence = [] self.ctx.vlog('')
2.125
2
course-1:basic-building-blocks/subject-6:conditionals/lesson-5.0:Password puzzle.py
regnart-tech-club/python
0
12790608
# Write a program that takes in an input, # prints 'You may enter.' if the input matches a password of one's own choosing, and # prints 'Begone!' otherwise.
3
3
driver/tests/collision_avoidance.py
IDP-L211/controllers
3
12790609
# Test script for drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [ ("move", [0, 0]), "scan", ("move", [0.9, 0.9]), ("move", [0.9, -0.9]), ("move", [-0.9, 0.9]), ("move", [-0.9, -0.9]), ("move", [0, 0]) ] robot.action_queue = action_queue # Main loop, perform simulation steps until Webots is stopping the controller while robot.step(timestep) != -1: robot.execute_next_action()
2.671875
3
asch/server/server.py
DavidMChan/asch
0
12790610
<filename>asch/server/server.py import random import pymongo from flask import Flask, render_template from flask_restful import Api from asch.config import Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if we don't hit an API call, we'll redirect to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the application if __name__ == '__main__': app.run(port=8080, debug=True)
2.484375
2
tests/unit_tests/test_time_integrator/test_time_integrator.py
cwentland0/perform
6
12790611
<reponame>cwentland0/perform import unittest from perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict = {} self.param_dict["dt"] = 1e-7 self.param_dict["time_scheme"] = "bdf" self.param_dict["time_order"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, "bdf") self.assertEqual(time_integrator.time_order, 2) self.assertEqual(time_integrator.subiter, 0)
2.953125
3
vivit.py
rishikksh20/ViViT-pytorch
204
12790612
<filename>vivit.py import torch from torch import nn, einsum import torch.nn.functional as F from einops import rearrange, repeat from einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x): for attn, ff in self.layers: x = attn(x) + x x = ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth = 4, heads = 3, pool = 'cls', in_channels = 3, dim_head = 64, dropout = 0., emb_dropout = 0., scale_dim = 4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool type must be either cls (cls token) or mean (mean pooling)' assert image_size % patch_size == 0, 'Image dimensions must be divisible by the patch size.' num_patches = (image_size // patch_size) ** 2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w p2) -> b t (h w) (p1 p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b t n d', b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x = rearrange(x, 'b t n d -> (b t) n d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if __name__ == "__main__": img = torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img) print("Shape of out :", out.shape) # [B, num_classes]
2.359375
2
web/app/djrq/templates/admin/requests.py
bmillham/djrq2
1
12790613
<reponame>bmillham/djrq2 # encoding: cinje : from ..template import page as _page : from .. import table_class, table_style, caption_args : from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang="en" : table_class.append('sortable') <table class="#{' '.join(table_class)}" style="#{' '.join(table_style)}" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r in enumerate(requestlist) : try : use requestrow ctx, r : except AttributeError # TODO: Ignore missing songs for now, but this should probably be an error! : print('Missing song', r.song_id) <td colspan=7>Came across a bad row in the requests list for song id ${r.song_id}</td></tr> : end : if i % 50 : flush : end : end </tbody> </table> : end : end : def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class="btn-group"> <button type="button" class="btn btn-xs btn-primary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> ${row.status.capitalize()}<span class="caret"></span> </button> <ul class="dropdown-menu"> : for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{"/admin/?change_status&id={}&status={}".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> : end </tr> : end
2.125
2
visualize_architecture.py
yonycherkos/dl4cv_exercise
0
12790614
# import the necessary packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3, 3) plot_model(model, show_shapes=True, to_file="lenet.png")
2.140625
2
geco/mips/max_cut/generic.py
FreestyleBuild/GeCO
8
12790615
import itertools import pyscipopt as scip import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model("Naive MaxCut") node_variables = {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype="B") edge_variables = {} all_non_negative = True for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d["weight"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype="B" ) if weight < 0: all_non_negative = False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def triangle(graph): model = scip.Model("Triangle MaxCut") edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)["weight"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype="B" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name = naming.undirected_edge_name(u, v) return edge_variables[edge_name]
2.703125
3
problemtools/tex2html.py
jsannemo/problemtools
0
12790616
<reponame>jsannemo/problemtools #! /usr/bin/env python3 # -*- coding: utf-8 -*- import re import os.path import string import argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured out any way of stopping the plasTeX # renderer from generating a .paux file if os.path.isfile('.paux'): os.remove('.paux')
2.15625
2
Player.py
sglyon/reversi.py
0
12790617
import time import socket import sys from board import Board INF = 1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS = [(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS = [(0, 1), (0, 6), (1, 0), (1, 1), (1, 6), (1, 7), (6, 0), (6, 1), (6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES = [(0, 2), (0, 3), (0, 4), (0, 5), (2, 0), (3, 0), (4, 0), (5, 0), (2, 7), (3, 7), (4, 7), (5, 7), (7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)] class Player(object): def __init__(self, me, you): self.me, self.you = me, you self.round = 0 # handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self, state, player=None): """ state is: (p1_placed, p2_placed, whose_turn) """ if player is None: player = state[2] if self.round < 4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split("\n")): print("when init got {} and {}".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split("\n") turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print("turn", turn) print("current time:", time.time()) print("round:", self.round) print("t1:", self.t1) print("t2:", self.t2) count = 4 self.mine = 0 self.foe = 0 for i in range(8): for j in range(8): color = int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i, j)] count += 1 # update board if self.me == 1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn # create a random number generator sock = init_client(hostname) while True: turn = read_message(sock) if turn == self.me: print("============") print("Round: ", self.round) # print("Valid moves: ", valid_moves) print("mine: ", self.mine) print("FOE: ", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print("My move: ", my_move) msg = "{}\n{}\n".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return self.you else: return self.me def pack_state(self, turn): if self.me == 1: return self.mine, self.foe, turn else: return self.foe, self.mine, turn def save_tree(self): pass def load_tree(self): pass
3.21875
3
1865.py
gabzin/beecrowd
3
12790618
for i in range(int(input())): nome,numero=input().split() print("Y") if nome=="Thor" else print("N")
3.734375
4
aqa/customers/migrations/0002_auto_20201025_1305.py
ksemilla/aqa
0
12790619
<gh_stars>0 # Generated by Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='customers.Customer')), ], ), ]
1.953125
2
Monolith/UM.py
Nouranium/Monolith
0
12790620
<reponame>Nouranium/Monolith<filename>Monolith/UM.py ''' Created on 15 Aug 2015 Universal Machines @author: NoNotCar ''' import Object import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2("UM/Incinerator.png") doc="Burns fuel to destroy items (such as fish poo). IO: Input (2 recommended)" hasio="input" updatable=True fuels={"Woodpile":60,"WoodpileSp":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio="both" doc="Turns stuff into other stuff. This shouldn't be in the game. IO: Both" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name="Wood Chips" img=Img.imgret2("UM/WoodChip.png") value=20 class Flour(Entity.ResourceB): name="Flour" img=Img.imgret2("UM/Flour.png") value=10 class FrozenFish(Entity.ResourceB): name="Frozen Fish" img=Img.imgret2("UM/FroFish.png") value=40 class FrozenSpecialFish(Entity.ResourceB): name="Frozen Special Fish" img=Img.imgret2("UM/FroSpFish.png") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2("UM/Grinder%s.png" % str(n)) for n in range(5)+range(5)[::-1]] doc="Grinds items into 2 powder items. Consumes 500W while operating. IO: Both" recipes={"Woodpile":[WoodChips,240],"WoodpileSp":[WoodChips,240],"Wheat":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2("UM/Fridge.png") doc="Freezes items (a slow process). Can freeze up to 10 items at once. Consumes 500W when starting, and 100W to keep cool. IO: Both" temperature=20 updatable=True recipes={"Fish":[FrozenFish,3600],"Special Fish":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2("UM/logo.png") iscat=True doc="Universal Machines" def __init__(self): self.menu=[Buyers.ObjBuyer(Incinerator,1000),Buyers.ObjBuyer(Grinder,500),Buyers.ObjBuyer(Fridge,200)]
2.515625
3
froide/publicbody/migrations/0012_auto_20180227_1926.py
manonthemat/froide
0
12790621
<reponame>manonthemat/froide # -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email'), ), ]
1.539063
2
loops.py
neuralline/python-project
0
12790622
<gh_stars>0 companies = ["google", "apple", "microsoft"] for element in companies: print(element) """ """ items = [0, 12, 30] total = 0 for element in items: total = total + element print(element) print("total =", total) """ """ c = list(range(1, 5)) print(c)
3.71875
4
pylock/__init__.py
waveaccounting/pylock
6
12790623
<gh_stars>1-10 from importlib import import_module import logging from six.moves.urllib import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): """ Distributed locking. Usage:: with Lock('my_lock'): print "Critical section" :param key: The key against which the lock will be held. :param expires: We consider any existing lock older than ``expires`` seconds to be invalid in order to detect crashed clients. This value must be higher than it takes the critical section to execute. :param timeout: If another client has already obtained the lock, sleep for a maximum of ``timeout`` seconds before giving up. A value of 0 means no wait (give up right away). """ def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info("Using {0} lock backend".format(backend_class.__name__)) key = "{0}{1}".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured("%s isn't a pylock backend module." % import_path) module, classname = import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module %s: "%s"' % (module, e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module "%s" does not define a "%s" class.' % (module, classname)) def parse_url(url, url_scheme): """Parses a distributed lock backend URL.""" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration. connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port': url.port } return connection_info
2.4375
2
ogbg/mol/main.py
icmlsubmission-spec/spec-gnn
0
12790624
<reponame>icmlsubmission-spec/spec-gnn<filename>ogbg/mol/main.py import os import random import torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy as np from tqdm import tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config import process_config, get_args from utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader, desc="Train iteration")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader, desc="Eval iteration")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {"y_true": y_true, "y_pred": y_pred} return evaluator.eval(input_dict) import time def main(): args = get_args() config = process_config(args) print(config) if config.get('seeds') is not None: for seed in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print("Bases total: {}".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx["train"], dataset) ### automatic evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx["train"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx["valid"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx["test"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if "classification" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \ + str(config.commit_id[0:7]) + '_' \ + str(config.norm) \ + 'E' + str(config.epsilon) \ + 'P' + str(config.power) \ + 'I' + str(config.get('identity', 1)) + '_' \ + str(config.architecture.pooling) + '_' \ + str(config.architecture.layers) + '_' \ + str(config.architecture.hidden) + '_' \ + str(config.architecture.dropout) + '_' \ + str(config.hyperparams.learning_rate) + '_' \ + str(config.hyperparams.milestones) + '_' \ + str(config.hyperparams.decay_rate) + '_' \ + 'B' + str(config.hyperparams.batch_size) \ + 'S' + str(config.seed) \ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not None: print("Loading model from {}...".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print("Model loaded.") print("Epoch {} evaluating...".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print("Epoch {} training...".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_' \ + str(config.time_stamp) + '_' \ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename = filename_header + 'curr.tar' print("Saving model as {}...".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print("Model saved.") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if __name__ == "__main__": main()
2.1875
2
app/main.py
sto/flask-formvalidatorjs
1
12790625
# Aplicación de validación de formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: The above copyright # notice and this permission notice shall be included in all copies or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from flask import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=["POST"]) def display(): return jsonify(request.form)
1.835938
2
code/02_modeling/ui_utils/ui_annotation.py
Azure-Samples/MachineLearningSamples-AMLVisionPackage-ISICImageClassification
1
12790626
<reponame>Azure-Samples/MachineLearningSamples-AMLVisionPackage-ISICImageClassification<gh_stars>1-10 from ipywidgets import widgets, Layout, IntSlider import io from cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython, random #from IPython.display import display #from bqplot import pyplot as bqPyplot #sys.path.append("C:\\Users\\pabuehle\\Desktop\\PROJECTS\\pythonLibrary") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, "rb").read() return img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in range(0, len(list_1D), n)] else: raise Exception('Unknown list split method') return list_2D # ------------------------------------------------ # Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object and define instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size = grid_size # Set images to be shown (in that order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name] = l # Initialize what images are on what image page # (page == grid of images on the right side of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() # Update / redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = "Image #: {}".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all UI elements def update_ui(self): self.bo_updating_ui = True # indicate code is in updating-UI state # Update image grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored and not accessible later in code w_label.description = "Image " + str(img_index) w_button.description = "Zoom" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI elements def create_ui(self): # ------------ # Callbacks # ------------ # Callback for image label dropdown menu def dropdown_changed(obj): # Note that updating the dropdown label in code (e.g. in the update_ui() function) # also triggers this change event. Hence need to check if self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for "zoom" button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for "next images" or "previous images" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for "image page" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init self.bo_updating_ui = False # ------------ # UI - image grid # ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description="") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text="Image 0", description="Image 0") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description="Image id: ", value="") w_button.layout.width = "100px" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window # ------------ w_next_page_button = widgets.Button(description="Next images", value="1") w_next_page_button.value = "1" # should not be necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description="Previous images", value="-1", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = "-1" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text("") self.w_zoom_header.layout.width = "100px" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual images self.update_ui() return (tabs_ui)
2.25
2
refence/script1.py
caoghui/python
0
12790627
<reponame>caoghui/python<gh_stars>0 #A first python script import sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x * 8) input('Press Enter to exit')
2.890625
3
lib/writer.py
qwertytam/going-the-extra-mile
0
12790628
import inspect # Indent level for writer _INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to create source files with consistent formatting''' def __init__(self, path): ''' Args: path (handle): File name and path to write to ''' self._path = path self._indent_level = 0 self._start_of_line = True def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception that triggered the exit exception_value: Value of exception that triggered the exit traceback: Traceback when exit was triggered ''' # Clear the path if an uncaught exception occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one level To be used in a similiar fashion to the write() function in this class. See documentation on the write() function for further explanation. ''' self._indent_level += 1 return self def dedent(self): '''Dedent the writer by one level To be used in a similiar fashion to the write() function in this class. See documentation on the write() function for further explanation. ''' if self._indent_level > 0: self._indent_level -= 1 return self def write(self, content='', end_in_newline=True): ''' Write content to the file open(path, 'w') needs to be called prior to calling this function, typically by ````with open(file, 'w') as f: self.write_fn(f)```` where `self` is a higher level object and `write_fn(self, file)` would look something like ````def _write_html(self, file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to write, as a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write a newline at the end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if the start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) # Write a new line if there's still more content if index < len(lines) - 1: self._path.write('\n') self._start_of_line = True # If the content should end in a newline, write it if end_in_newline: self._path.write('\n') self._start_of_line = True else: self._start_of_line = False return self
4.3125
4
main.py
kmirzavaziri/ma-nqp
0
12790629
<reponame>kmirzavaziri/ma-nqp<filename>main.py import math import os import random import multiprocessing as mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def __init__(self, data=None): global QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for key in range(2 * QUEENS - 1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0): self.cost += diagonal - 1 def __str__(self): return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other): return self.cost > other.cost def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list, start: int, end: int) -> list: dimension = len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end] i = end for v in father_data[end:] + father_data[:end]: if v not in data: if i == start: i = end if i == dimension: i = 0 data[i] = v i += 1 return data def solved(self): return self.cost == 0 class Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self): n = len(self.__data) roulette = sum([[i] * (i + 1) for i in range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for p in processes: p.start() for p in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1] for i in range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data = [~c for c in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for c in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f"Iteration: {i}") if PRINT_ITERATION_BEST_ANSWER: print(f"Best Answer: {population.answer().cost}") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f"All Answers: {population.answers()}") population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process took {t_end - t_start}')
2.859375
3
models/ME_objectives.py
MrHuff/DIF-NLDL
0
12790630
import torch from torch import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): """Implementation of x ↦ log(1 + exp(x)).""" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = ([email protected]).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates the level to which two variables vary together. If we examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}` is the covariance of `x_i` and `x_j`. The element `C_{ii}` is the variance of `x_i`. Args: m: A 1-D or 2-D array containing multiple variables and observations. Each row of `m` represents a variable, and each column a single observation of all those variables. rowvar: If `rowvar` is True, then each row represents a variable, with observations in the columns. Otherwise, the relationship is transposed: each column represents a variable, while the rows contain observations. Returns: The covariance matrix of the variables. ''' if m.dim() > 2: raise ValueError('m has more than 2 dimensions') if m.dim() < 2: m = m.view(1, -1) if not rowvar and m.size(0) != 1: m = m.t() # m = m.type(torch.double) # uncomment this line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y - 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x + n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar cov_X = k_X.t() @ k_X return cov_X, x_bar, k_X, kX
2.640625
3
src/test.py
Felix-Huang11/rembg-greenscreen
0
12790631
from rembg.multiprocessing import parallel_greenscreen if __name__ == "__main__": parallel_greenscreen("/Users/zihao/Desktop/zero/video/group15B_Short.avi", 3, 1, "u2net_human_seg", frame_limit=300)
1.6875
2
Python/cube-can-sat-2016/soft/desktop/src/thermodynamic_parameters/src.py
Misha91908/Portfolio
0
12790632
<filename>Python/cube-can-sat-2016/soft/desktop/src/thermodynamic_parameters/src.py import matplotlib.pyplot as plt def reading_data(): func_dat = [] f = open("/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT", 'r') for line in f: if(line != '\n'): func_dat.append(line) return func_dat def sort_data(func_dat): """ функция, парсящая данные из списка, которые считываются в другие списки """ log_dat = [] counter = 0 data_temp_in = [0] # финальный список данных о температуре внутри аппарата data_temp_out = [0] # финальный список данных о температуре окружающей среды data_pressure = [0] # финальный список данных с датчика давления basic_elements = {'.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): # извлечение данных из списка и распределение их по трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa') plt.grid() plt.tight_layout() plt.savefig("../../img/termo.png", fmt='png')
2.953125
3
webspider/spiders/sheJiYuanSpider.py
JohnnieFucker/WebSpider
0
12790633
# -*- coding: utf-8 -*- import scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers = { 'Accept': 'text / html, application / xhtml + xml, application / xml;q = 0.9, image / webp, image / apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head + '%s.html' % i self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class="content"]/div/text()').extract() name = response.xpath('//h1[@class="title"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel'] = content[7][content[7].index(':')+1:] yield item
2.703125
3
tests/wordplay_tests.py
deostroll/sengine
0
12790634
<reponame>deostroll/sengine from unittest import TestCase from context import * import pdb import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, "fail: \n" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, "should fail") def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, "should not accept") def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], "should accept turn") def test_first_turn_score(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t = next(tile for tile in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], "should be false") def test_compute_lqueue_score(self): game = self.game # lqueue is the queue of tiles and their positions # on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score = game._computeQueue(lqueue) self.assertEquals(score, 10)
3.25
3
T2GEORES/model_conf.py
jejimenezm/T2GEORES
0
12790635
from datetime import datetime, timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':"../input/area/polygon.shp", "set_inac_from_poly":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\ 2:'white',\ 3:'yellow',\ 4:'blue',\ 5:'green',\ 6:'purple',\ 7:'#ff69b4',\ 8:'darkorange',\ 9:'cyan',\ 10:'magenta',\ 11:'#faebd7',\ 12:'#2e8b57',\ 13:'#eeefff',\ 14:'#da70d6',\ 15:'#ff7f50',\ 16:'#cd853f',\ 17:'#bc8f8f',\ 18:'#5f9ea0',\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to take out the function to_GIS from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model has another mesh setup
2.09375
2
helper.py
Wings30306/the-writers-club
0
12790636
import os from flask import Flask, app, flash, session from flask_pymongo import PyMongo from datetime import date, datetime app = Flask(__name__) app.config["MONGO_DBNAME"] = os.getenv('MONGO_DBNAME') app.config["MONGO_URI"] = os.getenv('MONGO_URI') app.config["SECRET_KEY"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) """Collections""" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None """Helper functions""" def list_by_type(): list_by_type = {} ratings = [] genres = [] fandoms = [] authors = [] if session.get('is_adult') == True: selection = stories_collection.find() else: selection = stories_collection.find( {"rating": {"$nin": ["R/Adult/NSFW", "Adult/NSFW"]}}) for story in selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story: fandom else: fandom = "Fandom not added" author = story['author'] if rating not in ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({"ratings": ratings, "genres": genres, "fandoms": fandoms, "authors": authors}) return list_by_type def story_count(): story_count = [] ratings_list = list_by_type()["ratings"] genres_list = list_by_type()["genres"] fandoms_list = list_by_type()["fandoms"] authors_list = list_by_type()["authors"] for rating in ratings_list: count = stories_collection.count_documents({"rating": rating}) count_rating = {"rating": rating, "total": count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({"genres": genre}) count_genre = {"genre": genre, "total": count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({"fandoms": fandom}) count_fandom = {"fandom": fandom, "total": count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({"author": author}) count_author = {"author": author, "total": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({"url": this_story}, {'$push': {"reports": {"item_reported": item, "reported_by": reported_by, "reason_given": reason_given}}}, upsert=True) return flash("Report sent to admins.") def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day) < (bday.month, bday.day)) return age
2.671875
3
tests/test_saga/test_executions/test_commit.py
Clariteia/minos_microservice_saga
4
12790637
import unittest from unittest.mock import ( AsyncMock, call, ) from uuid import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {"foo"}), LocalSagaStepExecution(definition, {"bar"}), ConditionalSagaStepExecution( definition, {"bar"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {"foo"}), RemoteSagaStepExecution(definition, {"foobar"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, "bar"), (self.execution_uuid, "foo"), (self.execution_uuid, "foobar"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic="ReserveBarTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="ReserveFooTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="ReserveFoobarTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="CommitBarTransaction"), call(data=self.execution_uuid, topic="CommitFooTransaction"), call(data=self.execution_uuid, topic="CommitFoobarTransaction"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic="ReserveBarTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="ReserveFooTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="ReserveFoobarTransaction", reply_topic="TheReplyTopic"), call(data=self.execution_uuid, topic="RejectBarTransaction"), call(data=self.execution_uuid, topic="RejectFooTransaction"), call(data=self.execution_uuid, topic="RejectFoobarTransaction"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic="RejectBarTransaction"), call(data=self.execution_uuid, topic="RejectFooTransaction"), call(data=self.execution_uuid, topic="RejectFoobarTransaction"), ], send_mock.call_args_list, ) if __name__ == "__main__": unittest.main()
2.234375
2
examples/ForPatching/MediumOverCapacitiesLargeRunner.py
supermihi/scgen
1
12790638
<filename>examples/ForPatching/MediumOverCapacitiesLargeRunner.py from examples.exampleRunner import runExample runExample("ForPatching/MediumOverCapacitiesLarge.json", withExcel=False)
1.328125
1
import.py
calzoneman/MarkovBot
3
12790639
<gh_stars>1-10 #!/usr/bin/python3 import argparse import pymk import sys import time def import_file(session, ns, f, batch_size=1000): links = [] i = 0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\r%d links imported in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter() - start), i ), end='') start = time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or "-" to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if __name__ == '__main__': main()
2.71875
3
mee6_py_api/exceptions.py
hyperevo/mee6_python_api
14
12790640
# # HTTP # class HTTPRequestError(Exception): ''' Base error for any exceptions caused by communication with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local api # class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by this api ''' pass
2.21875
2
do_py/common/managed_list.py
timdavis3991/do-py
0
12790641
""" :date_created: 2020-06-28 """ from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): """ Use this when you need a restriction for a list of DataObject's. """ _restriction = R(list, type(None)) @property def schema_value(self): """ :rtype: list[dict] """ return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): """ :param obj_cls: The DO to check each value in the list against. :type obj_cls: DataObject :param nullable: Valid values are a list of Do's or a NoneType. :type nullable: bool """ super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self): if self.data is not None: items = [] for item in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): """ :param obj_cls: DataObject class reference to wrap each object in list. :type nullable: bool :type key: function :type reverse: bool """ self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): """ Sort the data list after ManagedList does its work. """ super(OrderedManagedList, self).manage() self.data = sorted(self.data, key=self.key, reverse=self.reverse)
2.6875
3
tests/test_bfs.py
Akards/Parallel-Python
1
12790642
#!/usr/bin/env python3 import unittest import networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)
3.078125
3
lib/misc.py
lawrencesim/templatize-python
0
12790643
<filename>lib/misc.py import collections, collections.abc _types = { "UNDEFINED": -1, "NULL": 0, "NONE": 0, "VALUE": 1, "STRING": 1, "NUMBER": 1, "ARRAY": 2, "LIST": 2, "OBJECT": 3, "DICTIONARY": 3, "FUNCTION": 4 } _NT_types = collections.namedtuple("_NT_TYPES", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context: context = {} try: val = func i = 0 while callable(val): i += 1 if i >= OVERFLOW: break val = val(context, root) return val except Exception as e: if not handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None: return "" if format_op: if format_op in ("raw", "html"): value = str(value) escape_html = False elif format_op == "encode": value = str(value) escape_html = True elif format_op in ("allcaps", "caps", "upper"): value = str(value).upper() elif format_op in ("lower",): value = str(value).lower() elif format_op == "capitalize": value = str(value) new_value = "" for i, c in enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value += c value = new_value else: if format_op[0] == "$": format_op = "${0:"+format_op[1:]+"}" else: format_op = "{0:"+format_op+"}" value = format_op.format(value) else: value = str(value) if escape_html: value = value.replace("&", "&amp;") \ .replace("<", "&lt;") \ .replace(">", "&gt;") \ .replace("\"", "&quot;") \ .replace("'", "&#039;") return value
2.796875
3
pybg/tenor.py
bondgeek/pybg
1
12790644
''' Tenor class @author: <NAME> @copyright: BG Research LLC, 2011 @modified: July 2012 to replace SWIG Quantlib bindings with pyQL Cython code. ''' from datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y': 1} # useful for sorting def __init__(self, txt): firstNum = True firstCh = True numTxt = "" unit="Y" for i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt + i if firstNum: firstNum = False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh = False else: pass if(firstNum): numTxt="0" self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = "".join((str(int(round(years_))),"Y")) else: t = "".join((str(int(round(years_*12.))),"M")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return "<Tenor:"+self.__str__()+">" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in the tenor based on the given frequency. ''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate to maturity with a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in sched] return sched @property def term(self): ''' Length of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def tuple(self): return (self.length, self.timeunit)
2.359375
2
test_txamqpr/test_txamqp_client.py
aliowka/txamqpr
0
12790645
<gh_stars>0 # Copyright 2015 <NAME> <EMAIL> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = "test-txamqpr-client-%s" % random.randint(0, sys.maxint) rabbitmq_conf = { "prefetch": 10, "exchange_conf": { "exchange": self.random_name, "type": "fanout", "durable": False, "auto_delete": True}, "queue_declare_conf": { "queue": self.random_name, "durable": False, "exclusive": False, "arguments": {"x-expires": 180000}}, "queue_binding_conf": { "exchange": self.random_name, "queue": self.random_name, "routing_key": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name != "get-empty": self.fetched_counter += 1 self.assertEqual(msg.content.body, "Test message") else: if hasattr(self, "disconnector"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print "GET", self.fetched_counter return msg def on_error(*args): print "Basic get failed:", args if no_ack: ack_callback = lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish("Test message", None) print "PUT", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper def tearDown(self): self.tx.stopTrying() self.tx.p.transport.loseConnection()
1.953125
2
default_resolver.py
dlcs/river-annotations
0
12790646
<reponame>dlcs/river-annotations<filename>default_resolver.py import hashlib import settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + "/anno/" + hashlib.md5( arguments.get('canvas')).hexdigest() + "/{{line_number}}" } return details
2.09375
2
src/spn/tests/test_EM.py
nicoladimauro/SPFlow
0
12790647
import unittest from spn.algorithms.EM import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if __name__ == '__main__': unittest.main()
2.390625
2
src/Vehicle_detection.py
AmudhanManisekaran/AI-Cop
0
12790648
<filename>src/Vehicle_detection.py import tensorflow as tf from utils import backbone from api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.* or later!') input_video = "./input_footage/trim.mp4" # input_video="rtsp://admin:admin@[email protected]/cam/realmonitor?channel=1&subtype=1" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height = 1028 is_color_recognition_enabled = 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) # counting all the objects
2.03125
2
developing/misc/pyc_remove.py
Pyro4Bot-RoboLab/PYRobot
0
12790649
#!/usr/bin/env python3 import subprocess try: subprocess.call(["pyclean", ".."]) except: print("error") else: print("*.pyc borrados")
1.695313
2
mobilealerts/gateway.py
PlusPlus-ua/python-mobilealerts
0
12790650
"""MobileAlerts internet gataway.""" from typing import Any, Awaitable, Callable, Dict, List, Optional import asyncio import logging import socket import struct import time from ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are broadcasts BROADCAST_ADDR = "255.255.255.255" #: UDP port used by the gateway for comunnications PORT = 8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway in the local network FIND_GATEWAY = 2 #: Find a single available gateway in the local network GET_CONFIG = 3 #: Request the configuration of the gateway SET_CONFIG = 4 #: Set a new configuration. Gateway takes a few seconds to do the update REBOOT = 5 #: A reboot takes about 10s for the gateway to be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark preserved original proxy settings class Gateway: """Controls MobileAlerts internet gataway.""" def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version = "1.50" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name: Any = None self._server: Any = None self._use_proxy: Any = None self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized = False async def init( self, config: Optional[bytes] = None, ) -> None: if config is None: config = await self.get_config() if config is not None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise Exception("Gateway is not initialized") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: """Prepares UDP socket to comunicate with the gateway.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind(("", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: """Prepares command UDP packet to send.""" packet = struct.pack(">H6sH", command, gateway_id, 10) return packet async def send_command( self, command: int, wait_for_result: bool = False, timeout: int = 2 ) -> Optional[bytes]: """Sends command and optional data to the gateway.""" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else: return None finally: sock.close() async def get_config(self, timeout: int = 2) -> Optional[bytes]: """Obtains configuration from the gateway.""" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool: return ( config is not None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], "big")) ) def parse_config(self, config: bytes) -> bool: """Parses configuration obtained from the gateway.""" result = self.check_config(config) and ( (self._id is None) or (self._id == config[2:8]) ) if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode("utf-8") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode("utf-8") self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode("utf-8") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], "big") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], "big") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode("utf-8") self._last_seen = time.time() self._initialized = True return result async def update_config(self, timeout: int = 2) -> bool: """Updates configuration from the gateway.""" config = await self.get_config(timeout) if config is not None: return self.parse_config(config) else: return False def set_config(self) -> None: """Set configuration to the gateway.""" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, "utf-8") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, "big") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, "big") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, "big") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, "utf-8") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, "utf-8") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), "utf-8") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, "big") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: """Reset configuration of the gateway to default values.""" self.name = "MOBILEALERTS-Gateway" self.use_dhcp = True self.fixed_ip = "192.168.1.222" self.fixed_netmask = "255.255.255.0" self.fixed_gateway = "192.168.1.254" self.fixed_dns = "192.168.1.253" self.server = "www.data199.com" self.use_proxy = False self.proxy = "192.168.1.1" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool, timeout: int = 30) -> None: """Reboots the gateway and optional update configuration.""" config = await self.send_command(REBOOT, update_config, timeout) if update_config and config is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None, timeout: int = 2, ) -> List["Gateway"]: """Broadcasts discover packet and yeld gateway objects created from resposes.""" result = [] discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, ) -> None: """Attachs the gateway to the proxy to read measuremnts. Existing proxy settings will be preserved """ if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: """Detachs the gateway from the proxy and restore original settings.""" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: """Handle gateway's bootup update packet.""" if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( "Gateway bootup timestamp %s", time.ctime(int.from_bytes(package[1:5], "big")), ) self._version = ( str(int.from_bytes(package[11:13], "big")) + "." + str(int.from_bytes(package[13:15], "big")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None: """Add sensor object.""" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor: """Create new sensor object for given ID.""" result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor: """Return sensor object for given ID, creates the sensor if not exists.""" result = self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: """Handle update packet for one sensor.""" _LOGGER.debug( "Update package %s, checksum %s", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in package: checksum += b checksum &= 0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: """Handle update packet for few sensors.""" pos = 0 packages_len = len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63] ) pos += 64 async def handle_update(self, code: str, packages: bytes) -> None: """Handle update packets.""" if code == "00": self.handle_bootup_update(packages) elif code == "C0": await self.handle_sensors_update(packages) else: _LOGGER.error( "Unknnow update code %d, data %s", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: """Resend gateway's PUT request to cloud server.""" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content ) as response: response_content = await response.content.read() _LOGGER.debug( "Cloud response status: %s content: %s", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error("Error resending request to cloud: %r", e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) -> str: return "80" + self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter def name(self, value: str) -> None: if len(bytes(value, "utf-8")) > 20: raise ValueError("Name is too long") self._name = value @property def server(self) -> str: return str(self._server) @server.setter def server(self, value: str) -> None: if len(bytes(value, "utf-8")) > 64: raise ValueError("Server address is too long") self._server = value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy = value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if len(bytes(value, "utf-8")) > 64: raise ValueError("Proxy server address is too long") self._proxy = value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if value < 0 or value >= 64 * 1024: raise ValueError("Invalid proxy port number") self._proxy_port = value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str: """Return a formal representation of the gateway.""" return ( "%s.%s(%s(%s), " "gateway_id=%s, " "version=%r, " "last_seen=%r, " "attached=%r, " "send_data_to_cloud=%r, " "dhcp_ip=%r, " "use_dhcp=%r, " "fixed_ip=%r, " "fixed_netmask=%r, " "fixed_gateway=%r, " "fixed_dns=%r, " "server=%r, " "use_proxy=%r, " "proxy=%r, " "proxy_port=%r, " "orig_use_proxy=%r, " "orig_proxy=%r, " "orig_proxy_port=%r" ")" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else "never", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: """Return a readable representation of the gateway.""" return ( "%s V%s, SerialNo: %s (id: %s)\n" "Use DHCP: %s\n" "DHCP IP: %s\n" "Fixed IP: %s\n" "Fixed Netmask: %s\n" "Fixed Gateway: %s\n" "Fixed DNS: %s\n" "Cloud Server: %s\n" "Use Proxy: %s\n" "Proxy Server: %s\n" "Proxy Port: %s\n" "Send data to cloud: %s\n" "Last Contact: %s" ) % ( self.name, self.version, self.serial, self.gateway_id, "Yes" if self.use_dhcp else "No", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, "Yes" if self.use_proxy else "No", self.proxy, self.proxy_port, "Yes" if self.send_data_to_cloud else "No", time.ctime(self.last_seen) if self.last_seen is not None else "never", )
2.28125
2