blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
744b2b5f9edcfd6d59f3a65ebfda69a83917795e | 8c4ef53ec6c7df2eeeb633a53d1d931558596366 | /propertyestimator/properties/solvation.py | 846f77dd90fa87534dec104a50d994e4dbc33f4f | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MSchauperl/propertyestimator | ff7bf2d3b6bc441141258483ec991f8806b09469 | 9a67cb61498024c511f9bbe55536ac8e1a3c93be | refs/heads/master | 2020-09-08T07:04:39.660322 | 2019-11-08T21:15:23 | 2019-11-08T21:15:23 | 221,055,340 | 0 | 0 | NOASSERTION | 2019-11-14T21:47:11 | 2019-11-11T19:34:28 | null | UTF-8 | Python | false | false | 8,120 | py | """
A collection of physical property definitions relating to
solvation free energies.
"""
from propertyestimator import unit
from propertyestimator.properties import PhysicalProperty
from propertyestimator.properties.plugins import register_estimable_property
from propertyestimator.protocols import coordinates, forcefield, miscellaneous, yank, simulation, groups
from propertyestimator.substances import Substance
from propertyestimator.thermodynamics import Ensemble
from propertyestimator.workflow import WorkflowOptions
from propertyestimator.workflow.schemas import WorkflowSchema
from propertyestimator.workflow.utils import ProtocolPath
@register_estimable_property()
class SolvationFreeEnergy(PhysicalProperty):
"""A class representation of a solvation free energy property."""
@staticmethod
def get_default_workflow_schema(calculation_layer, options=None):
if calculation_layer == 'SimulationLayer':
# Currently reweighting is not supported.
return SolvationFreeEnergy.get_default_simulation_workflow_schema(options)
return None
@staticmethod
def get_default_simulation_workflow_schema(options=None):
"""Returns the default workflow to use when estimating this property
from direct simulations.
Parameters
----------
options: WorkflowOptions
The default options to use when setting up the estimation workflow.
Returns
-------
WorkflowSchema
The schema to follow when estimating this property.
"""
# Setup the fully solvated systems.
build_full_coordinates = coordinates.BuildCoordinatesPackmol('build_solvated_coordinates')
build_full_coordinates.substance = ProtocolPath('substance', 'global')
build_full_coordinates.max_molecules = 2000
assign_full_parameters = forcefield.BuildSmirnoffSystem(f'assign_solvated_parameters')
assign_full_parameters.force_field_path = ProtocolPath('force_field_path', 'global')
assign_full_parameters.substance = ProtocolPath('substance', 'global')
assign_full_parameters.coordinate_file_path = ProtocolPath('coordinate_file_path',
build_full_coordinates.id)
# Perform a quick minimisation of the full system to give
# YANK a better starting point for its minimisation.
energy_minimisation = simulation.RunEnergyMinimisation('energy_minimisation')
energy_minimisation.system_path = ProtocolPath('system_path', assign_full_parameters.id)
energy_minimisation.input_coordinate_file = ProtocolPath('coordinate_file_path',
build_full_coordinates.id)
equilibration_simulation = simulation.RunOpenMMSimulation('equilibration_simulation')
equilibration_simulation.ensemble = Ensemble.NPT
equilibration_simulation.steps_per_iteration = 100000
equilibration_simulation.output_frequency = 10000
equilibration_simulation.timestep = 2.0 * unit.femtosecond
equilibration_simulation.thermodynamic_state = ProtocolPath('thermodynamic_state', 'global')
equilibration_simulation.system_path = ProtocolPath('system_path', assign_full_parameters.id)
equilibration_simulation.input_coordinate_file = ProtocolPath('output_coordinate_file',
energy_minimisation.id)
# Create a substance which only contains the solute (e.g. for the
# vacuum phase simulations).
filter_solvent = miscellaneous.FilterSubstanceByRole('filter_solvent')
filter_solvent.input_substance = ProtocolPath('substance', 'global')
filter_solvent.component_role = Substance.ComponentRole.Solvent
filter_solute = miscellaneous.FilterSubstanceByRole('filter_solute')
filter_solute.input_substance = ProtocolPath('substance', 'global')
filter_solute.component_role = Substance.ComponentRole.Solute
# Setup the solute in vacuum system.
build_vacuum_coordinates = coordinates.BuildCoordinatesPackmol('build_vacuum_coordinates')
build_vacuum_coordinates.substance = ProtocolPath('filtered_substance', filter_solute.id)
build_vacuum_coordinates.max_molecules = 1
assign_vacuum_parameters = forcefield.BuildSmirnoffSystem(f'assign_parameters')
assign_vacuum_parameters.force_field_path = ProtocolPath('force_field_path', 'global')
assign_vacuum_parameters.substance = ProtocolPath('filtered_substance', filter_solute.id)
assign_vacuum_parameters.coordinate_file_path = ProtocolPath('coordinate_file_path',
build_vacuum_coordinates.id)
# Set up the protocol to run yank.
run_yank = yank.SolvationYankProtocol('run_solvation_yank')
run_yank.solute = ProtocolPath('filtered_substance', filter_solute.id)
run_yank.solvent_1 = ProtocolPath('filtered_substance', filter_solvent.id)
run_yank.solvent_2 = Substance()
run_yank.thermodynamic_state = ProtocolPath('thermodynamic_state', 'global')
run_yank.steps_per_iteration = 500
run_yank.checkpoint_interval = 50
run_yank.solvent_1_coordinates = ProtocolPath('output_coordinate_file', equilibration_simulation.id)
run_yank.solvent_1_system = ProtocolPath('system_path', assign_full_parameters.id)
run_yank.solvent_2_coordinates = ProtocolPath('coordinate_file_path', build_vacuum_coordinates.id)
run_yank.solvent_2_system = ProtocolPath('system_path', assign_vacuum_parameters.id)
# Set up the group which will run yank until the free energy has been determined to within
# a given uncertainty
conditional_group = groups.ConditionalGroup(f'conditional_group')
conditional_group.max_iterations = 20
if options.convergence_mode != WorkflowOptions.ConvergenceMode.NoChecks:
condition = groups.ConditionalGroup.Condition()
condition.condition_type = groups.ConditionalGroup.ConditionType.LessThan
condition.right_hand_value = ProtocolPath('target_uncertainty', 'global')
condition.left_hand_value = ProtocolPath('estimated_free_energy.uncertainty',
conditional_group.id,
run_yank.id)
conditional_group.add_condition(condition)
# Define the total number of iterations that yank should run for.
total_iterations = miscellaneous.MultiplyValue('total_iterations')
total_iterations.value = 2000
total_iterations.multiplier = ProtocolPath('current_iteration', conditional_group.id)
# Make sure the simulations gets extended after each iteration.
run_yank.number_of_iterations = ProtocolPath('result',
total_iterations.id)
conditional_group.add_protocols(total_iterations, run_yank)
# Define the full workflow schema.
schema = WorkflowSchema(property_type=SolvationFreeEnergy.__name__)
schema.id = '{}{}'.format(SolvationFreeEnergy.__name__, 'Schema')
schema.protocols = {
build_full_coordinates.id: build_full_coordinates.schema,
assign_full_parameters.id: assign_full_parameters.schema,
energy_minimisation.id: energy_minimisation.schema,
equilibration_simulation.id: equilibration_simulation.schema,
filter_solvent.id: filter_solvent.schema,
filter_solute.id: filter_solute.schema,
build_vacuum_coordinates.id: build_vacuum_coordinates.schema,
assign_vacuum_parameters.id: assign_vacuum_parameters.schema,
conditional_group.id: conditional_group.schema
}
schema.final_value_source = ProtocolPath('estimated_free_energy', conditional_group.id, run_yank.id)
return schema
| [
"[email protected]"
] | |
b10bd3e6fce28ba55ca234a9dcb7dd608cd4763a | 0de115b69243361e7926d0a5400c1fb475a642f5 | /4.5.4 CodingExercise2.py | 7769a572921fc132cf0a40d0db1879e526643fc9 | [] | no_license | Bill-Fujimoto/Intro-to-Python-Course | f475f1c578e33ac37a796038fdaa6ad247876c55 | afe365b0233c4fadb78b2818164ab5726ecd92bb | refs/heads/master | 2020-04-12T21:19:08.688112 | 2018-12-21T21:50:09 | 2018-12-21T21:50:09 | 162,759,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,977 | py | #Recall last exercise that you wrote a function, word_lengths,
#which took in a string and returned a dictionary where each
#word of the string was mapped to an integer value of how
#long it was.
#
#This time, write a new function called length_words so that
#the returned dictionary maps an integer, the length of a
#word, to a list of words from the sentence with that length.
#If a word occurs more than once, add it more than once. The
#words in the list should appear in the same order in which
#they appeared in the sentence.
#
#For example:
#
# length_words("I ate a bowl of cereal out of a dog bowl today.")
# -> {3: ['ate', 'dog', 'out'], 1: ['a', 'a', 'i'],
# 5: ['today'], 2: ['of', 'of'], 4: ['bowl'], 6: ['cereal']}
#
#As before, you should remove any punctuation and make the
#string lowercase.
#
#Hint: To create a new list as the value for a dictionary key,
#use empty brackets: lengths[wordLength] = []. Then, you would
#be able to call lengths[wordLength].append(word). Note that
#if you try to append to the list before creating it for that
#key, you'll receive a KeyError.
#Write your function here!
def length_words(string):
to_replace = ".,'!?"
for mark in to_replace:
string = string.replace(mark, "")
string=string.lower()
word_list=string.split()
len_words={}
for word in word_list:
if not len(word)in len_words:
len_words[len(word)] = []
len_words[len(word)].append(word)
return len_words
#Below are some lines of code that will test your function.
#You can change the value of the variable(s) to test your
#function with different inputs.
#
#If your function works correctly, this will originally
#print:
#{1: ['i', 'a', 'a'], 2: ['of', 'of'], 3: ['ate', 'out', 'dog'], 4: ['bowl', 'bowl'], 5: ['today'], 6: ['cereal']}
#
#The keys may appear in a different order, but within each
#list the words should appear in the order shown above.
print(length_words("I ate a bowl of cereal out of a dog bowl today."))
| [
"@vfr1200f1#"
] | @vfr1200f1# |
3b91d9f42ee1ecda8632567b35ac5caa51d497c7 | 35053a371d85c2d45a4f52239d8a70b38194ef48 | /Count of Matches in Tournament.py | 96c8b115113e1096f964d3dcc4f40e3f4b7f16a1 | [] | no_license | Kuehar/LeetCode | 51d169c81a2e572ea854399fc78e1130220388f9 | 4555c20455f181f9dd7b3aba2a8779dea795edfb | refs/heads/master | 2023-04-16T10:13:03.584541 | 2023-04-06T11:47:21 | 2023-04-06T11:47:21 | 243,361,421 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | class Solution:
def numberOfMatches(self, n: int) -> int:
return n-1
# O(1) Solution.
# Always this answer is n-1. Sum of matches are always equals to sum of loser.
# Runtime: 28 ms, faster than 82.44% of Python3 online submissions for Count of Matches in Tournament.
# Memory Usage: 14.3 MB, less than 40.04% of Python3 online submissions for Count of Matches in Tournament.
| [
"[email protected]"
] | |
c7a6bbfb9e4f4606a0720e7f9c0efa56e7d90f30 | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/DataQuality/DataQualityConfigurations/python/TCTDisplay.py | 6fa11e45427f043ea1f2b19da409200372d1fc14 | [] | no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,330 | py | # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
from DataQualityUtils.DQWebDisplayConfig import DQWebDisplayConfig
dqconfig = DQWebDisplayConfig()
dqconfig.config = "TCT"
dqconfig.hcfg = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Collisions/collisions_run.1.41.hcfg"
dqconfig.hcfg_min10 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Collisions/collisions_minutes10.1.9.hcfg"
dqconfig.hcfg_min30 = "/afs/cern.ch/user/a/atlasdqm/dqmdisk/tier0/han_config/Collisions/collisions_minutes30.1.5.hcfg"
dqconfig.hanResultsDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/han_results"
dqconfig.htmlDir = "/afs/cern.ch/atlas/offline/external/FullChainTest/tier0/dqm/www"
dqconfig.htmlWeb = "http://atlas-project-fullchaintest.web.cern.ch/atlas-project-FullChainTest/tier0/dqm/www"
dqconfig.runlist = "runlist_TCT.xml"
dqconfig.indexFile = "results_TCT.html"
dqconfig.lockFile = "DQWebDisplay_TCT.lock"
dqconfig.dbConnection = "sqlite://;schema=MyCOOL_histo.db;dbname=OFLP200"
dqconfig.dqmfOfl = "/GLOBAL/DETSTATUS/DQMFOFL"
dqconfig.dbConnectionHisto = "sqlite://;schema=MyCOOL_histo.db;dbname=OFLP200"
dqconfig.dqmfOflHisto = "/GLOBAL/DETSTATUS/DQMFOFLH"
dqconfig.dbTagName = "DetStatusDQMFOFL-TCT"
| [
"[email protected]"
] | |
c20a34f0a583217bc2954583f5023db885908a21 | 6dd08ec6b4f6351de8450a3d7e592fd6b4994119 | /cbase/server/cbase-1.8.1/testrunner/lib/cli_interface.py | e6a6f9806a3859205b951f3f754ca879f82d6278 | [
"Apache-2.0"
] | permissive | zhgwenming/appstack | d015e96b911fe318f9fba1bdeeea9d888d57dfba | 8fe6c1dfc2f5ed4a36c335e86ae28c17b3769276 | refs/heads/master | 2021-01-23T13:30:19.507537 | 2015-11-09T06:48:35 | 2015-11-09T06:48:35 | 7,576,644 | 1 | 2 | null | 2016-01-05T09:16:22 | 2013-01-12T15:13:21 | C | UTF-8 | Python | false | false | 6,194 | py | #!/usr/bin/env python
#
# Copyright 2010 Membase, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# PYTHONPATH needs to be set up to point to mc_bin_client
import os
import subprocess
DEF_USERNAME = "Administrator"
DEF_PASSWORD = "password"
DEF_KIND = "json"
DEF_MOXI_PORT = 11211
DEF_HTTP_PORT = 8091
DEF_RAMSIZE = 256
DEF_REPLICA = 1
CLI_EXE_LOC = "../membase-cli/membase"
SSH_EXE_LOC = "/opt/membase/bin/cli/membase"
class CLIInterface(object):
def __init__(self, server, http_port=DEF_HTTP_PORT, username=DEF_USERNAME, password=DEF_PASSWORD, kind=DEF_KIND, debug=False, ssh=False, sshkey=None):
self.server = server
self.http_port = http_port
self.username = username
self.password = password
self.kind = kind
self.debug = debug
self.ssh = ssh
self.sshkey = sshkey
if (debug):
self.acting_server_args = "-c %s:%d -u %s -p %s -o %s -d" % (self.server, self.http_port, self.username, self.password, self.kind)
else:
self.acting_server_args = "-c %s:%d -u %s -p %s -o %s" % (self.server, self.http_port, self.username, self.password, self.kind)
def server_list(self):
cmd = " server-list " + self.acting_server_args
return self.execute_command(cmd)
def server_info(self):
cmd = " server-info " + self.acting_server_args
return self.execute_command(cmd)
def server_add(self, server_to_add, rebalance=False):
if (rebalance):
cmd = " rebalance " + self.acting_server_args + " --server-add=%s:%d --server-add-username=%s --server-add-password=%s"\
% (server_to_add, self.http_port, self.username, self.password)
else:
cmd = " server-add " + self.acting_server_args + " --server-add=%s:%d --server-add-username=%s --server-add-password=%s"\
% (server_to_add, self.http_port, self.username, self.password)
return self.execute_command(cmd)
def server_readd(self, server_to_readd):
cmd = " server-readd " + self.acting_server_args + " --server-add=%s:%d --server-add-username=%s --server-add-password=%s"\
% (server_to_readd, self.http_port, self.username, self.password)
return self.execute_command(cmd)
def rebalance(self):
cmd = " rebalance " + self.acting_server_args
return self.execute_command(cmd)
def rebalance_stop(self):
cmd = " reblance-stop " + self.acting_server_args
return self.execute_command(cmd)
def rebalance_status(self):
cmd = " rebalance-status " + self.acting_server_args
return self.execute_command(cmd)
def failover(self, server_to_failover):
cmd = " failover " + self.acting_server_args + " --server-failover %s" % (server_to_failover)
return self.execute_command(cmd)
def cluster_init(self, c_username=DEF_USERNAME, c_password=DEF_PASSWORD, c_port=DEF_HTTP_PORT, c_ramsize=DEF_RAMSIZE):
cmd = " cluster-init " + self.acting_server_args\
+ " --cluster-init-username=%s --cluster-init-password=%s --cluster-init-port=%d --cluster-init-ramsize=%d"\
% (c_username, c_password, c_port, c_ramsize)
return self.execute_command(cmd)
def node_init(self, path):
cmd = " node-init " + self.acting_server_args + " --node-init-data-path=%s" % (path)
return self.execute_command(cmd)
def bucket_list(self):
cmd = " bucket-list " + self.acting_server_args
return self.execute_command(cmd)
def bucket_create(self, bucket_name, bucket_type, bucket_port, bucket_password="", bucket_ramsize=DEF_RAMSIZE, replica_count=DEF_REPLICA):
cmd = " bucket-create " + self.acting_server_args\
+ " --bucket=%s --bucket-type=%s --bucket-port=%d --bucket-password=%s --bucket-ramsize=%d --bucket-replica=%d"\
% (bucket_name, bucket_type, bucket_port, bucket_password, bucket_ramsize, replica_count)
return self.execute_command(cmd)
def bucket_edit(self, bucket_name, bucket_type, bucket_port, bucket_password, bucket_ramsize, replica_count):
cmd = " bucket-edit " + self.acting_server_args\
+ " --bucket=%s --bucket-type=%s --bucket-port=%d --bucket-password=%s --bucket-ramsize=%d --bucket-replica=%d"\
% (bucket_name, bucket_type, bucket_port, bucket_password, bucket_ramsize, replica_count)
return self.execute_command(cmd)
def bucket_delete(self, bucket_name):
cmd = " bucket-delete " + self.acting_server_args + " --bucket=%s" % (bucket_name)
return self.execute_command(cmd)
def bucket_flush(self):
return "I don't work yet :-("
def execute_command(self, cmd):
if (self.ssh):
return self.execute_ssh(SSH_EXE_LOC + cmd)
else:
return self.execute_local(CLI_EXE_LOC + cmd)
def execute_local(self, cmd):
rtn = ""
process = subprocess.Popen(cmd ,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
stdoutdata,stderrdata=process.communicate()
rtn += stdoutdata
return rtn
def execute_ssh(self, cmd):
rtn=""
if (self.sshkey == None):
process = subprocess.Popen("ssh root@%s \"%s\"" % (self.server,cmd),shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
else:
process = subprocess.Popen("ssh -i %s root@%s \"%s\"" % (self.sshkey, self.server, cmd),shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
stdoutdata,stderrdata=process.communicate()
rtn += stdoutdata
return rtn
| [
"[email protected]"
] | |
875a564377d75822b6c87a33792ad8d32b40b7b6 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/datacatalog/outputs.py | 26d9e4bddb4ce2d56c83f67f19a73cd325ca56ef | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,362 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from ._enums import *
__all__ = [
'PrincipalsResponse',
]
@pulumi.output_type
class PrincipalsResponse(dict):
"""
User principals.
"""
def __init__(__self__, *,
object_id: Optional[str] = None,
upn: Optional[str] = None):
"""
User principals.
:param str object_id: Object Id for the user
:param str upn: UPN of the user.
"""
if object_id is not None:
pulumi.set(__self__, "object_id", object_id)
if upn is not None:
pulumi.set(__self__, "upn", upn)
@property
@pulumi.getter(name="objectId")
def object_id(self) -> Optional[str]:
"""
Object Id for the user
"""
return pulumi.get(self, "object_id")
@property
@pulumi.getter
def upn(self) -> Optional[str]:
"""
UPN of the user.
"""
return pulumi.get(self, "upn")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
494c1e3a8da4af904b0d96a5540e85b475400cc2 | 0e4860fecfdd34a3255003cc8c8df086c14083dd | /python/practise/带你学Django资料及源码/课堂与博客代码/peace_blog/blog/admin.py | 9c1fb6228842fe4ec5d8931dc4a0aad2aa044aa9 | [] | no_license | anzhihe/learning | 503ab9a58f280227011da5eaa4b14b46c678e6f3 | 66f7f801e1395207778484e1543ea26309d4b354 | refs/heads/master | 2023-08-08T11:42:11.983677 | 2023-07-29T09:19:47 | 2023-07-29T09:19:47 | 188,768,643 | 1,443 | 617 | null | 2023-08-24T02:10:34 | 2019-05-27T04:04:10 | Python | UTF-8 | Python | false | false | 289 | py | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Banner)
admin.site.register(Category)
admin.site.register(Tag)
admin.site.register(Article)
admin.site.register(FriendLink)
admin.site.register(Comment)
admin.site.register(BlogUser)
| [
"[email protected]"
] | |
b3b23e56815e22c59025e95c60b6cbda2ae81e07 | 9fbe90eab4cb25022e7c93776da3a5733656a09a | /examples/chat/status.py | 9f517a087999e1a586d64cffee8075515a5e83ea | [
"MIT"
] | permissive | Nathanator/networkzero | 453e218d6e0b8080158cb968f4acc5e0cb0fb65c | e6bf437f424660c32cf1ef81f83d9eee925f44e7 | refs/heads/master | 2021-01-15T13:14:53.101742 | 2016-04-07T20:32:28 | 2016-04-07T20:32:28 | 55,724,894 | 0 | 0 | null | 2016-04-07T20:12:18 | 2016-04-07T20:12:17 | null | UTF-8 | Python | false | false | 467 | py | import networkzero as nw0
updates = nw0.discover("chat-updates")
while True:
action, message = nw0.wait_for_notification(updates)
print(action, message)
if action == "JOIN":
print("%s has joined" % message)
elif action == "LEAVE":
print("%s has left" % message)
elif action == "SPEAK":
[person, words] = message
print("%s says: %s" % (person, words))
else:
print("!! Unexpected message: %s" % message)
| [
"[email protected]"
] | |
ef82571b3a9d413818632a92cb1e3edb2d75dab3 | 385a63d3c9e6f5815979165001f78ec3d7b90cd2 | /DrivingTDM_SetupMatlabOOP/headerAndFunctionsMotor/ximc/python-profiles/STANDA/8MT195X-540-4.py | 391e7db3d811458155873424999b6ceb86b43093 | [
"BSD-2-Clause"
] | permissive | Rasedujjaman/matlabOOP | 5abb6ec94998fda5e9214ed94cf67a42bf243d4f | e1f025ab9b00a3646719df23852079736d2b5701 | refs/heads/main | 2023-07-23T21:40:53.905045 | 2021-08-31T16:12:39 | 2021-08-31T16:12:39 | 378,249,559 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 22,654 | py | def set_profile_8MT195X_540_4(lib, id):
worst_result = Result.Ok
result = Result.Ok
feedback_settings = feedback_settings_t()
feedback_settings.IPS = 4000
class FeedbackType_:
FEEDBACK_ENCODER_MEDIATED = 6
FEEDBACK_NONE = 5
FEEDBACK_EMF = 4
FEEDBACK_ENCODER = 1
feedback_settings.FeedbackType = FeedbackType_.FEEDBACK_EMF
class FeedbackFlags_:
FEEDBACK_ENC_TYPE_BITS = 192
FEEDBACK_ENC_TYPE_DIFFERENTIAL = 128
FEEDBACK_ENC_TYPE_SINGLE_ENDED = 64
FEEDBACK_ENC_REVERSE = 1
FEEDBACK_ENC_TYPE_AUTO = 0
feedback_settings.FeedbackFlags = FeedbackFlags_.FEEDBACK_ENC_TYPE_SINGLE_ENDED | FeedbackFlags_.FEEDBACK_ENC_TYPE_AUTO
feedback_settings.CountsPerTurn = 4000
result = lib.set_feedback_settings(id, byref(feedback_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
home_settings = home_settings_t()
home_settings.FastHome = 500
home_settings.uFastHome = 0
home_settings.SlowHome = 500
home_settings.uSlowHome = 0
home_settings.HomeDelta = 500
home_settings.uHomeDelta = 0
class HomeFlags_:
HOME_USE_FAST = 256
HOME_STOP_SECOND_BITS = 192
HOME_STOP_SECOND_LIM = 192
HOME_STOP_SECOND_SYN = 128
HOME_STOP_SECOND_REV = 64
HOME_STOP_FIRST_BITS = 48
HOME_STOP_FIRST_LIM = 48
HOME_STOP_FIRST_SYN = 32
HOME_STOP_FIRST_REV = 16
HOME_HALF_MV = 8
HOME_MV_SEC_EN = 4
HOME_DIR_SECOND = 2
HOME_DIR_FIRST = 1
home_settings.HomeFlags = HomeFlags_.HOME_USE_FAST | HomeFlags_.HOME_STOP_SECOND_REV | HomeFlags_.HOME_STOP_FIRST_BITS | HomeFlags_.HOME_DIR_SECOND
result = lib.set_home_settings(id, byref(home_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
move_settings = move_settings_t()
move_settings.Speed = 1000
move_settings.uSpeed = 0
move_settings.Accel = 2000
move_settings.Decel = 4000
move_settings.AntiplaySpeed = 1000
move_settings.uAntiplaySpeed = 0
class MoveFlags_:
RPM_DIV_1000 = 1
result = lib.set_move_settings(id, byref(move_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
engine_settings = engine_settings_t()
engine_settings.NomVoltage = 1
engine_settings.NomCurrent = 2100
engine_settings.NomSpeed = 2000
engine_settings.uNomSpeed = 0
class EngineFlags_:
ENGINE_LIMIT_RPM = 128
ENGINE_LIMIT_CURR = 64
ENGINE_LIMIT_VOLT = 32
ENGINE_ACCEL_ON = 16
ENGINE_ANTIPLAY = 8
ENGINE_MAX_SPEED = 4
ENGINE_CURRENT_AS_RMS = 2
ENGINE_REVERSE = 1
engine_settings.EngineFlags = EngineFlags_.ENGINE_LIMIT_RPM | EngineFlags_.ENGINE_ACCEL_ON | EngineFlags_.ENGINE_REVERSE
engine_settings.Antiplay = 575
class MicrostepMode_:
MICROSTEP_MODE_FRAC_256 = 9
MICROSTEP_MODE_FRAC_128 = 8
MICROSTEP_MODE_FRAC_64 = 7
MICROSTEP_MODE_FRAC_32 = 6
MICROSTEP_MODE_FRAC_16 = 5
MICROSTEP_MODE_FRAC_8 = 4
MICROSTEP_MODE_FRAC_4 = 3
MICROSTEP_MODE_FRAC_2 = 2
MICROSTEP_MODE_FULL = 1
engine_settings.MicrostepMode = MicrostepMode_.MICROSTEP_MODE_FRAC_256
engine_settings.StepsPerRev = 200
result = lib.set_engine_settings(id, byref(engine_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
entype_settings = entype_settings_t()
class EngineType_:
ENGINE_TYPE_BRUSHLESS = 5
ENGINE_TYPE_TEST = 4
ENGINE_TYPE_STEP = 3
ENGINE_TYPE_2DC = 2
ENGINE_TYPE_DC = 1
ENGINE_TYPE_NONE = 0
entype_settings.EngineType = EngineType_.ENGINE_TYPE_STEP | EngineType_.ENGINE_TYPE_NONE
class DriverType_:
DRIVER_TYPE_EXTERNAL = 3
DRIVER_TYPE_INTEGRATE = 2
DRIVER_TYPE_DISCRETE_FET = 1
entype_settings.DriverType = DriverType_.DRIVER_TYPE_INTEGRATE
result = lib.set_entype_settings(id, byref(entype_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
power_settings = power_settings_t()
power_settings.HoldCurrent = 50
power_settings.CurrReductDelay = 1000
power_settings.PowerOffDelay = 60
power_settings.CurrentSetTime = 300
class PowerFlags_:
POWER_SMOOTH_CURRENT = 4
POWER_OFF_ENABLED = 2
POWER_REDUCT_ENABLED = 1
power_settings.PowerFlags = PowerFlags_.POWER_SMOOTH_CURRENT | PowerFlags_.POWER_OFF_ENABLED | PowerFlags_.POWER_REDUCT_ENABLED
result = lib.set_power_settings(id, byref(power_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
secure_settings = secure_settings_t()
secure_settings.LowUpwrOff = 800
secure_settings.CriticalIpwr = 4000
secure_settings.CriticalUpwr = 5500
secure_settings.CriticalT = 800
secure_settings.CriticalIusb = 450
secure_settings.CriticalUusb = 520
secure_settings.MinimumUusb = 420
class Flags_:
ALARM_ENGINE_RESPONSE = 128
ALARM_WINDING_MISMATCH = 64
USB_BREAK_RECONNECT = 32
ALARM_FLAGS_STICKING = 16
ALARM_ON_BORDERS_SWAP_MISSET = 8
H_BRIDGE_ALERT = 4
LOW_UPWR_PROTECTION = 2
ALARM_ON_DRIVER_OVERHEATING = 1
secure_settings.Flags = Flags_.ALARM_ENGINE_RESPONSE | Flags_.ALARM_FLAGS_STICKING | Flags_.ALARM_ON_BORDERS_SWAP_MISSET | Flags_.H_BRIDGE_ALERT | Flags_.ALARM_ON_DRIVER_OVERHEATING
result = lib.set_secure_settings(id, byref(secure_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
edges_settings = edges_settings_t()
class BorderFlags_:
BORDERS_SWAP_MISSET_DETECTION = 8
BORDER_STOP_RIGHT = 4
BORDER_STOP_LEFT = 2
BORDER_IS_ENCODER = 1
edges_settings.BorderFlags = BorderFlags_.BORDER_STOP_RIGHT | BorderFlags_.BORDER_STOP_LEFT
class EnderFlags_:
ENDER_SW2_ACTIVE_LOW = 4
ENDER_SW1_ACTIVE_LOW = 2
ENDER_SWAP = 1
edges_settings.EnderFlags = EnderFlags_.ENDER_SWAP
edges_settings.LeftBorder = 175
edges_settings.uLeftBorder = 0
edges_settings.RightBorder = 25825
edges_settings.uRightBorder = 0
result = lib.set_edges_settings(id, byref(edges_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
pid_settings = pid_settings_t()
pid_settings.KpU = 0
pid_settings.KiU = 0
pid_settings.KdU = 0
pid_settings.Kpf = 0.003599999938160181
pid_settings.Kif = 0.03799999877810478
pid_settings.Kdf = 2.8000000384054147e-05
result = lib.set_pid_settings(id, byref(pid_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
sync_in_settings = sync_in_settings_t()
class SyncInFlags_:
SYNCIN_GOTOPOSITION = 4
SYNCIN_INVERT = 2
SYNCIN_ENABLED = 1
sync_in_settings.ClutterTime = 4
sync_in_settings.Position = 0
sync_in_settings.uPosition = 0
sync_in_settings.Speed = 0
sync_in_settings.uSpeed = 0
result = lib.set_sync_in_settings(id, byref(sync_in_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
sync_out_settings = sync_out_settings_t()
class SyncOutFlags_:
SYNCOUT_ONPERIOD = 64
SYNCOUT_ONSTOP = 32
SYNCOUT_ONSTART = 16
SYNCOUT_IN_STEPS = 8
SYNCOUT_INVERT = 4
SYNCOUT_STATE = 2
SYNCOUT_ENABLED = 1
sync_out_settings.SyncOutFlags = SyncOutFlags_.SYNCOUT_ONSTOP | SyncOutFlags_.SYNCOUT_ONSTART
sync_out_settings.SyncOutPulseSteps = 100
sync_out_settings.SyncOutPeriod = 2000
sync_out_settings.Accuracy = 0
sync_out_settings.uAccuracy = 0
result = lib.set_sync_out_settings(id, byref(sync_out_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
extio_settings = extio_settings_t()
class EXTIOSetupFlags_:
EXTIO_SETUP_INVERT = 2
EXTIO_SETUP_OUTPUT = 1
extio_settings.EXTIOSetupFlags = EXTIOSetupFlags_.EXTIO_SETUP_OUTPUT
class EXTIOModeFlags_:
EXTIO_SETUP_MODE_OUT_BITS = 240
EXTIO_SETUP_MODE_OUT_MOTOR_ON = 64
EXTIO_SETUP_MODE_OUT_ALARM = 48
EXTIO_SETUP_MODE_OUT_MOVING = 32
EXTIO_SETUP_MODE_OUT_ON = 16
EXTIO_SETUP_MODE_IN_BITS = 15
EXTIO_SETUP_MODE_IN_ALARM = 5
EXTIO_SETUP_MODE_IN_HOME = 4
EXTIO_SETUP_MODE_IN_MOVR = 3
EXTIO_SETUP_MODE_IN_PWOF = 2
EXTIO_SETUP_MODE_IN_STOP = 1
EXTIO_SETUP_MODE_IN_NOP = 0
EXTIO_SETUP_MODE_OUT_OFF = 0
extio_settings.EXTIOModeFlags = EXTIOModeFlags_.EXTIO_SETUP_MODE_IN_STOP | EXTIOModeFlags_.EXTIO_SETUP_MODE_IN_NOP | EXTIOModeFlags_.EXTIO_SETUP_MODE_OUT_OFF
result = lib.set_extio_settings(id, byref(extio_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
brake_settings = brake_settings_t()
brake_settings.t1 = 300
brake_settings.t2 = 500
brake_settings.t3 = 300
brake_settings.t4 = 400
class BrakeFlags_:
BRAKE_ENG_PWROFF = 2
BRAKE_ENABLED = 1
brake_settings.BrakeFlags = BrakeFlags_.BRAKE_ENG_PWROFF
result = lib.set_brake_settings(id, byref(brake_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
control_settings = control_settings_t()
control_settings.MaxSpeed[0] = 100
control_settings.MaxSpeed[1] = 1000
control_settings.MaxSpeed[2] = 0
control_settings.MaxSpeed[3] = 0
control_settings.MaxSpeed[4] = 0
control_settings.MaxSpeed[5] = 0
control_settings.MaxSpeed[6] = 0
control_settings.MaxSpeed[7] = 0
control_settings.MaxSpeed[8] = 0
control_settings.MaxSpeed[9] = 0
control_settings.uMaxSpeed[0] = 0
control_settings.uMaxSpeed[1] = 0
control_settings.uMaxSpeed[2] = 0
control_settings.uMaxSpeed[3] = 0
control_settings.uMaxSpeed[4] = 0
control_settings.uMaxSpeed[5] = 0
control_settings.uMaxSpeed[6] = 0
control_settings.uMaxSpeed[7] = 0
control_settings.uMaxSpeed[8] = 0
control_settings.uMaxSpeed[9] = 0
control_settings.Timeout[0] = 1000
control_settings.Timeout[1] = 1000
control_settings.Timeout[2] = 1000
control_settings.Timeout[3] = 1000
control_settings.Timeout[4] = 1000
control_settings.Timeout[5] = 1000
control_settings.Timeout[6] = 1000
control_settings.Timeout[7] = 1000
control_settings.Timeout[8] = 1000
control_settings.MaxClickTime = 300
class Flags_:
CONTROL_BTN_RIGHT_PUSHED_OPEN = 8
CONTROL_BTN_LEFT_PUSHED_OPEN = 4
CONTROL_MODE_BITS = 3
CONTROL_MODE_LR = 2
CONTROL_MODE_JOY = 1
CONTROL_MODE_OFF = 0
control_settings.Flags = Flags_.CONTROL_MODE_LR | Flags_.CONTROL_MODE_OFF
control_settings.DeltaPosition = 1
control_settings.uDeltaPosition = 0
result = lib.set_control_settings(id, byref(control_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
joystick_settings = joystick_settings_t()
joystick_settings.JoyLowEnd = 0
joystick_settings.JoyCenter = 5000
joystick_settings.JoyHighEnd = 10000
joystick_settings.ExpFactor = 100
joystick_settings.DeadZone = 50
class JoyFlags_:
JOY_REVERSE = 1
result = lib.set_joystick_settings(id, byref(joystick_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
ctp_settings = ctp_settings_t()
ctp_settings.CTPMinError = 3
class CTPFlags_:
CTP_ERROR_CORRECTION = 16
REV_SENS_INV = 8
CTP_ALARM_ON_ERROR = 4
CTP_BASE = 2
CTP_ENABLED = 1
ctp_settings.CTPFlags = CTPFlags_.CTP_ERROR_CORRECTION
result = lib.set_ctp_settings(id, byref(ctp_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
uart_settings = uart_settings_t()
uart_settings.Speed = 115200
class UARTSetupFlags_:
UART_STOP_BIT = 8
UART_PARITY_BIT_USE = 4
UART_PARITY_BITS = 3
UART_PARITY_BIT_MARK = 3
UART_PARITY_BIT_SPACE = 2
UART_PARITY_BIT_ODD = 1
UART_PARITY_BIT_EVEN = 0
uart_settings.UARTSetupFlags = UARTSetupFlags_.UART_PARITY_BIT_EVEN
result = lib.set_uart_settings(id, byref(uart_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
controller_name = controller_name_t()
controller_name.ControllerName = bytes([0, 113, 252, 118, 36, 0, 72, 0, 3, 0, 0, 0, 104, 101, 103, 0])
class CtrlFlags_:
EEPROM_PRECEDENCE = 1
result = lib.set_controller_name(id, byref(controller_name))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
emf_settings = emf_settings_t()
emf_settings.L = 0.013000000268220901
emf_settings.R = 2.5999999046325684
emf_settings.Km = 0.015599999576807022
class BackEMFFlags_:
BACK_EMF_KM_AUTO = 4
BACK_EMF_RESISTANCE_AUTO = 2
BACK_EMF_INDUCTANCE_AUTO = 1
result = lib.set_emf_settings(id, byref(emf_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
engine_advansed_setup = engine_advansed_setup_t()
engine_advansed_setup.stepcloseloop_Kw = 50
engine_advansed_setup.stepcloseloop_Kp_low = 1000
engine_advansed_setup.stepcloseloop_Kp_high = 33
result = lib.set_engine_advansed_setup(id, byref(engine_advansed_setup))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
extended_settings = extended_settings_t()
extended_settings.Param1 = 0
result = lib.set_extended_settings(id, byref(extended_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_name = stage_name_t()
stage_name.PositionerName = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_stage_name(id, byref(stage_name))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_information = stage_information_t()
stage_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
stage_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_stage_information(id, byref(stage_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_settings = stage_settings_t()
stage_settings.LeadScrewPitch = 0
stage_settings.Units = bytes([0, 0, 0, 0, 0, 0, 0, 0])
stage_settings.MaxSpeed = 0
stage_settings.TravelRange = 0
stage_settings.SupplyVoltageMin = 0
stage_settings.SupplyVoltageMax = 0
stage_settings.MaxCurrentConsumption = 0
stage_settings.HorizontalLoadCapacity = 0
stage_settings.VerticalLoadCapacity = 0
result = lib.set_stage_settings(id, byref(stage_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
motor_information = motor_information_t()
motor_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
motor_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_motor_information(id, byref(motor_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
motor_settings = motor_settings_t()
class MotorType_:
MOTOR_TYPE_BLDC = 3
MOTOR_TYPE_DC = 2
MOTOR_TYPE_STEP = 1
MOTOR_TYPE_UNKNOWN = 0
motor_settings.MotorType = MotorType_.MOTOR_TYPE_UNKNOWN
motor_settings.ReservedField = 0
motor_settings.Poles = 0
motor_settings.Phases = 0
motor_settings.NominalVoltage = 0
motor_settings.NominalCurrent = 0
motor_settings.NominalSpeed = 0
motor_settings.NominalTorque = 0
motor_settings.NominalPower = 0
motor_settings.WindingResistance = 0
motor_settings.WindingInductance = 0
motor_settings.RotorInertia = 0
motor_settings.StallTorque = 0
motor_settings.DetentTorque = 0
motor_settings.TorqueConstant = 0
motor_settings.SpeedConstant = 0
motor_settings.SpeedTorqueGradient = 0
motor_settings.MechanicalTimeConstant = 0
motor_settings.MaxSpeed = 0
motor_settings.MaxCurrent = 0
motor_settings.MaxCurrentTime = 0
motor_settings.NoLoadCurrent = 0
motor_settings.NoLoadSpeed = 0
result = lib.set_motor_settings(id, byref(motor_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
encoder_information = encoder_information_t()
encoder_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
encoder_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_encoder_information(id, byref(encoder_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
encoder_settings = encoder_settings_t()
encoder_settings.MaxOperatingFrequency = 0
encoder_settings.SupplyVoltageMin = 0
encoder_settings.SupplyVoltageMax = 0
encoder_settings.MaxCurrentConsumption = 0
encoder_settings.PPR = 0
class EncoderSettings_:
ENCSET_REVOLUTIONSENSOR_ACTIVE_HIGH = 256
ENCSET_REVOLUTIONSENSOR_PRESENT = 64
ENCSET_INDEXCHANNEL_PRESENT = 16
ENCSET_PUSHPULL_OUTPUT = 4
ENCSET_DIFFERENTIAL_OUTPUT = 1
result = lib.set_encoder_settings(id, byref(encoder_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
hallsensor_information = hallsensor_information_t()
hallsensor_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
hallsensor_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_hallsensor_information(id, byref(hallsensor_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
hallsensor_settings = hallsensor_settings_t()
hallsensor_settings.MaxOperatingFrequency = 0
hallsensor_settings.SupplyVoltageMin = 0
hallsensor_settings.SupplyVoltageMax = 0
hallsensor_settings.MaxCurrentConsumption = 0
hallsensor_settings.PPR = 0
result = lib.set_hallsensor_settings(id, byref(hallsensor_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
gear_information = gear_information_t()
gear_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
gear_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_gear_information(id, byref(gear_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
gear_settings = gear_settings_t()
gear_settings.ReductionIn = 0
gear_settings.ReductionOut = 0
gear_settings.RatedInputTorque = 0
gear_settings.RatedInputSpeed = 0
gear_settings.MaxOutputBacklash = 0
gear_settings.InputInertia = 0
gear_settings.Efficiency = 0
result = lib.set_gear_settings(id, byref(gear_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
accessories_settings = accessories_settings_t()
accessories_settings.MagneticBrakeInfo = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
accessories_settings.MBRatedVoltage = 0
accessories_settings.MBRatedCurrent = 0
accessories_settings.MBTorque = 0
class MBSettings_:
MB_POWERED_HOLD = 2
MB_AVAILABLE = 1
accessories_settings.TemperatureSensorInfo = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
accessories_settings.TSMin = 0
accessories_settings.TSMax = 0
accessories_settings.TSGrad = 0
class TSSettings_:
TS_AVAILABLE = 8
TS_TYPE_BITS = 7
TS_TYPE_SEMICONDUCTOR = 2
TS_TYPE_THERMOCOUPLE = 1
TS_TYPE_UNKNOWN = 0
accessories_settings.TSSettings = TSSettings_.TS_TYPE_UNKNOWN
class LimitSwitchesSettings_:
LS_SHORTED = 16
LS_SW2_ACTIVE_LOW = 8
LS_SW1_ACTIVE_LOW = 4
LS_ON_SW2_AVAILABLE = 2
LS_ON_SW1_AVAILABLE = 1
result = lib.set_accessories_settings(id, byref(accessories_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
return worst_result
| [
"[email protected]"
] | |
1aeaca94f2d4d9feb9733db3c8cad22d7ff94e80 | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /examples/conditional_format.py | 868eec6890126a075a32371064be80ab9628e826 | [
"BSD-2-Clause"
] | permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,956 | py | ###############################################################################
#
# Example of how to add conditional formatting to an XlsxWriter file.
#
# Conditional formatting allows you to apply a format to a cell or a
# range of cells based on certain criteria.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright 2013-2021, John McNamara, [email protected]
#
import xlsxwriter
workbook = xlsxwriter.Workbook('conditional_format.xlsx')
worksheet1 = workbook.add_worksheet()
worksheet2 = workbook.add_worksheet()
worksheet3 = workbook.add_worksheet()
worksheet4 = workbook.add_worksheet()
worksheet5 = workbook.add_worksheet()
worksheet6 = workbook.add_worksheet()
worksheet7 = workbook.add_worksheet()
worksheet8 = workbook.add_worksheet()
worksheet9 = workbook.add_worksheet()
# Add a format. Light red fill with dark red text.
format1 = workbook.add_format({'bg_color': '#FFC7CE',
'font_color': '#9C0006'})
# Add a format. Green fill with dark green text.
format2 = workbook.add_format({'bg_color': '#C6EFCE',
'font_color': '#006100'})
# Some sample data to run the conditional formatting against.
data = [
[34, 72, 38, 30, 75, 48, 75, 66, 84, 86],
[6, 24, 1, 84, 54, 62, 60, 3, 26, 59],
[28, 79, 97, 13, 85, 93, 93, 22, 5, 14],
[27, 71, 40, 17, 18, 79, 90, 93, 29, 47],
[88, 25, 33, 23, 67, 1, 59, 79, 47, 36],
[24, 100, 20, 88, 29, 33, 38, 54, 54, 88],
[6, 57, 88, 28, 10, 26, 37, 7, 41, 48],
[52, 78, 1, 96, 26, 45, 47, 33, 96, 36],
[60, 54, 81, 66, 81, 90, 80, 93, 12, 55],
[70, 5, 46, 14, 71, 19, 66, 36, 41, 21],
]
###############################################################################
#
# Example 1.
#
caption = ('Cells with values >= 50 are in light red. '
'Values < 50 are in light green.')
# Write the data.
worksheet1.write('A1', caption)
for row, row_data in enumerate(data):
worksheet1.write_row(row + 2, 1, row_data)
# Write a conditional format over a range.
worksheet1.conditional_format('B3:K12', {'type': 'cell',
'criteria': '>=',
'value': 50,
'format': format1})
# Write another conditional format over the same range.
worksheet1.conditional_format('B3:K12', {'type': 'cell',
'criteria': '<',
'value': 50,
'format': format2})
###############################################################################
#
# Example 2.
#
caption = ('Values between 30 and 70 are in light red. '
'Values outside that range are in light green.')
worksheet2.write('A1', caption)
for row, row_data in enumerate(data):
worksheet2.write_row(row + 2, 1, row_data)
worksheet2.conditional_format('B3:K12', {'type': 'cell',
'criteria': 'between',
'minimum': 30,
'maximum': 70,
'format': format1})
worksheet2.conditional_format('B3:K12', {'type': 'cell',
'criteria': 'not between',
'minimum': 30,
'maximum': 70,
'format': format2})
###############################################################################
#
# Example 3.
#
caption = ('Duplicate values are in light red. '
'Unique values are in light green.')
worksheet3.write('A1', caption)
for row, row_data in enumerate(data):
worksheet3.write_row(row + 2, 1, row_data)
worksheet3.conditional_format('B3:K12', {'type': 'duplicate',
'format': format1})
worksheet3.conditional_format('B3:K12', {'type': 'unique',
'format': format2})
###############################################################################
#
# Example 4.
#
caption = ('Above average values are in light red. '
'Below average values are in light green.')
worksheet4.write('A1', caption)
for row, row_data in enumerate(data):
worksheet4.write_row(row + 2, 1, row_data)
worksheet4.conditional_format('B3:K12', {'type': 'average',
'criteria': 'above',
'format': format1})
worksheet4.conditional_format('B3:K12', {'type': 'average',
'criteria': 'below',
'format': format2})
###############################################################################
#
# Example 5.
#
caption = ('Top 10 values are in light red. '
'Bottom 10 values are in light green.')
worksheet5.write('A1', caption)
for row, row_data in enumerate(data):
worksheet5.write_row(row + 2, 1, row_data)
worksheet5.conditional_format('B3:K12', {'type': 'top',
'value': '10',
'format': format1})
worksheet5.conditional_format('B3:K12', {'type': 'bottom',
'value': '10',
'format': format2})
###############################################################################
#
# Example 6.
#
caption = ('Cells with values >= 50 are in light red. '
'Values < 50 are in light green. Non-contiguous ranges.')
# Write the data.
worksheet6.write('A1', caption)
for row, row_data in enumerate(data):
worksheet6.write_row(row + 2, 1, row_data)
# Write a conditional format over a range.
worksheet6.conditional_format('B3:K6', {'type': 'cell',
'criteria': '>=',
'value': 50,
'format': format1,
'multi_range': 'B3:K6 B9:K12'})
# Write another conditional format over the same range.
worksheet6.conditional_format('B3:K6', {'type': 'cell',
'criteria': '<',
'value': 50,
'format': format2,
'multi_range': 'B3:K6 B9:K12'})
###############################################################################
#
# Example 7.
#
caption = 'Examples of color scales with default and user colors.'
data = range(1, 13)
worksheet7.write('A1', caption)
worksheet7.write('B2', "2 Color Scale")
worksheet7.write('D2', "2 Color Scale + user colors")
worksheet7.write('G2', "3 Color Scale")
worksheet7.write('I2', "3 Color Scale + user colors")
for row, row_data in enumerate(data):
worksheet7.write(row + 2, 1, row_data)
worksheet7.write(row + 2, 3, row_data)
worksheet7.write(row + 2, 6, row_data)
worksheet7.write(row + 2, 8, row_data)
worksheet7.conditional_format('B3:B14', {'type': '2_color_scale'})
worksheet7.conditional_format('D3:D14', {'type': '2_color_scale',
'min_color': "#FF0000",
'max_color': "#00FF00"})
worksheet7.conditional_format('G3:G14', {'type': '3_color_scale'})
worksheet7.conditional_format('I3:I14', {'type': '3_color_scale',
'min_color': "#C5D9F1",
'mid_color': "#8DB4E3",
'max_color': "#538ED5"})
###############################################################################
#
# Example 8.
#
caption = 'Examples of data bars.'
worksheet8.write('A1', caption)
worksheet8.write('B2', "Default data bars")
worksheet8.write('D2', "Bars only")
worksheet8.write('F2', "With user color")
worksheet8.write('H2', "Solid bars")
worksheet8.write('J2', "Right to left")
worksheet8.write('L2', "Excel 2010 style")
worksheet8.write('N2', "Negative same as positive")
data = range(1, 13)
for row, row_data in enumerate(data):
worksheet8.write(row + 2, 1, row_data)
worksheet8.write(row + 2, 3, row_data)
worksheet8.write(row + 2, 5, row_data)
worksheet8.write(row + 2, 7, row_data)
worksheet8.write(row + 2, 9, row_data)
data = [-1, -2, -3, -2, -1, 0, 1, 2, 3, 2, 1, 0]
for row, row_data in enumerate(data):
worksheet8.write(row + 2, 11, row_data)
worksheet8.write(row + 2, 13, row_data)
worksheet8.conditional_format('B3:B14', {'type': 'data_bar'})
worksheet8.conditional_format('D3:D14', {'type': 'data_bar',
'bar_only': True})
worksheet8.conditional_format('F3:F14', {'type': 'data_bar',
'bar_color': '#63C384'})
worksheet8.conditional_format('H3:H14', {'type': 'data_bar',
'bar_solid': True})
worksheet8.conditional_format('J3:J14', {'type': 'data_bar',
'bar_direction': 'right'})
worksheet8.conditional_format('L3:L14', {'type': 'data_bar',
'data_bar_2010': True})
worksheet8.conditional_format('N3:N14', {'type': 'data_bar',
'bar_negative_color_same': True,
'bar_negative_border_color_same': True})
###############################################################################
#
# Example 9.
#
caption = 'Examples of conditional formats with icon sets.'
data = [
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3, 4],
[1, 2, 3, 4, 5],
[1, 2, 3, 4, 5],
]
worksheet9.write('A1', caption)
for row, row_data in enumerate(data):
worksheet9.write_row(row + 2, 1, row_data)
worksheet9.conditional_format('B3:D3', {'type': 'icon_set',
'icon_style': '3_traffic_lights'})
worksheet9.conditional_format('B4:D4', {'type': 'icon_set',
'icon_style': '3_traffic_lights',
'reverse_icons': True})
worksheet9.conditional_format('B5:D5', {'type': 'icon_set',
'icon_style': '3_traffic_lights',
'icons_only': True})
worksheet9.conditional_format('B6:D6', {'type': 'icon_set',
'icon_style': '3_arrows'})
worksheet9.conditional_format('B7:E7', {'type': 'icon_set',
'icon_style': '4_arrows'})
worksheet9.conditional_format('B8:F8', {'type': 'icon_set',
'icon_style': '5_arrows'})
worksheet9.conditional_format('B9:F9', {'type': 'icon_set',
'icon_style': '5_ratings'})
workbook.close()
| [
"[email protected]"
] | |
cb2811ebb7323dde07db3204b7cbb018b4aa24df | b5aef1178c9153ca0c4dd9823e5fa2a2bc64649f | /sqlalchemy_to_ormar/maps.py | 1a9e860b78fc123c5831dcea9f9bd6c03d9d63d5 | [
"MIT"
] | permissive | collerek/sqlalchemy-to-ormar | 970a56c69ff03b7e32b11e4b1ebcb00c3b8d903c | 07c1595297221b31db86b3d34b3aad54fa3967da | refs/heads/main | 2023-04-23T10:41:04.426391 | 2021-05-16T14:10:38 | 2021-05-16T14:10:38 | 355,256,537 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,602 | py | from typing import Dict, Set, Type
import ormar
from ormar import Model
FIELD_MAP = {
"integer": ormar.Integer,
"tinyint": ormar.Integer,
"smallint": ormar.Integer,
"bigint": ormar.Integer,
"small_integer": ormar.Integer,
"big_integer": ormar.BigInteger,
"string": ormar.String,
"char": ormar.String,
"varchar": ormar.String,
"text": ormar.Text,
"mediumtext": ormar.Text,
"longtext": ormar.Text,
"float": ormar.Float,
"decimal": ormar.Decimal,
"date": ormar.Date,
"datetime": ormar.DateTime,
"timestamp": ormar.DateTime,
"time": ormar.Time,
"boolean": ormar.Boolean,
"bit": ormar.Boolean,
}
TYPE_SPECIFIC_PARAMETERS: Dict[str, Dict] = {
"string": {"max_length": {"key": "length", "default": 255}},
"varchar": {"max_length": {"key": "length", "default": 255}},
"char": {"max_length": {"key": "length", "default": 255}},
"decimal": {
"max_digits": {"key": "precision", "default": 18},
"decimal_places": {"key": "scale", "default": 6},
},
}
COMMON_PARAMETERS: Dict[str, Dict] = dict(
name={"key": "name", "default": None},
primary_key={"key": "primary_key", "default": False},
autoincrement={"key": "autoincrement", "default": False},
index={"key": "index", "default": False},
unique={"key": "unique", "default": False},
nullable={"key": "nullable", "default": None},
default={"key": "default", "default": None},
server_default={"key": "server_default", "default": None},
)
PARSED_MODELS: Dict[Type, Type[Model]] = dict()
CURRENTLY_PROCESSED: Set = set()
| [
"[email protected]"
] | |
d35605db5bdf283207a2c171638328c4c8b53252 | 4e30d990963870478ed248567e432795f519e1cc | /tests/api/v3_1_1/test_nbar_app.py | 13a1bcd9798917799871178339c1315dd3a03d61 | [
"MIT"
] | permissive | CiscoISE/ciscoisesdk | 84074a57bf1042a735e3fc6eb7876555150d2b51 | f468c54998ec1ad85435ea28988922f0573bfee8 | refs/heads/main | 2023-09-04T23:56:32.232035 | 2023-08-25T17:31:49 | 2023-08-25T17:31:49 | 365,359,531 | 48 | 9 | MIT | 2023-08-25T17:31:51 | 2021-05-07T21:43:52 | Python | UTF-8 | Python | false | false | 9,399 | py | # -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI nbar_app API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.1.1', reason='version does not match')
def is_valid_get_nbar_apps(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
assert hasattr(obj, 'status_code')
json_schema_validate('jsd_1e8a476ad8455fdebad0d8973c810495_v3_1_1').validate(obj.response)
return True
def get_nbar_apps(api):
endpoint_result = api.nbar_app.get_nbar_apps(
filter='value1,value2',
filter_type='string',
page=0,
size=0,
sort='string',
sort_by='string'
)
return endpoint_result
@pytest.mark.nbar_app
def test_get_nbar_apps(api, validator):
try:
assert is_valid_get_nbar_apps(
validator,
get_nbar_apps(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_nbar_apps_default(api):
endpoint_result = api.nbar_app.get_nbar_apps(
filter=None,
filter_type=None,
page=None,
size=None,
sort=None,
sort_by=None
)
return endpoint_result
@pytest.mark.nbar_app
def test_get_nbar_apps_default(api, validator):
try:
assert is_valid_get_nbar_apps(
validator,
get_nbar_apps_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_create_nbar_app(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
assert hasattr(obj, 'status_code')
json_schema_validate('jsd_ccc30178afce5e51a65e96cd95ca1773_v3_1_1').validate(obj.response)
return True
def create_nbar_app(api):
endpoint_result = api.nbar_app.create_nbar_app(
active_validation=False,
description='string',
id='string',
name='string',
network_identities=[{'ports': 'string', 'protocol': 'string'}],
payload=None
)
return endpoint_result
@pytest.mark.nbar_app
def test_create_nbar_app(api, validator):
try:
assert is_valid_create_nbar_app(
validator,
create_nbar_app(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def create_nbar_app_default(api):
endpoint_result = api.nbar_app.create_nbar_app(
active_validation=False,
description=None,
id=None,
name=None,
network_identities=None,
payload=None
)
return endpoint_result
@pytest.mark.nbar_app
def test_create_nbar_app_default(api, validator):
try:
assert is_valid_create_nbar_app(
validator,
create_nbar_app_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_nbar_app_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
assert hasattr(obj, 'status_code')
json_schema_validate('jsd_61e99726f3745554a07ee102f74fe3bd_v3_1_1').validate(obj.response)
return True
def get_nbar_app_by_id(api):
endpoint_result = api.nbar_app.get_nbar_app_by_id(
id='string'
)
return endpoint_result
@pytest.mark.nbar_app
def test_get_nbar_app_by_id(api, validator):
try:
assert is_valid_get_nbar_app_by_id(
validator,
get_nbar_app_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_nbar_app_by_id_default(api):
endpoint_result = api.nbar_app.get_nbar_app_by_id(
id='string'
)
return endpoint_result
@pytest.mark.nbar_app
def test_get_nbar_app_by_id_default(api, validator):
try:
assert is_valid_get_nbar_app_by_id(
validator,
get_nbar_app_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_update_nbar_app_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
assert hasattr(obj, 'status_code')
json_schema_validate('jsd_b55622f1671359919573b261ba16ea71_v3_1_1').validate(obj.response)
return True
def update_nbar_app_by_id(api):
endpoint_result = api.nbar_app.update_nbar_app_by_id(
active_validation=False,
description='string',
id='string',
name='string',
network_identities=[{'ports': 'string', 'protocol': 'string'}],
payload=None
)
return endpoint_result
@pytest.mark.nbar_app
def test_update_nbar_app_by_id(api, validator):
try:
assert is_valid_update_nbar_app_by_id(
validator,
update_nbar_app_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def update_nbar_app_by_id_default(api):
endpoint_result = api.nbar_app.update_nbar_app_by_id(
active_validation=False,
id='string',
description=None,
name=None,
network_identities=None,
payload=None
)
return endpoint_result
@pytest.mark.nbar_app
def test_update_nbar_app_by_id_default(api, validator):
try:
assert is_valid_update_nbar_app_by_id(
validator,
update_nbar_app_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_delete_nbar_app_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
assert hasattr(obj, 'status_code')
json_schema_validate('jsd_44d289d5685350f5b00f130db0a45142_v3_1_1').validate(obj.response)
return True
def delete_nbar_app_by_id(api):
endpoint_result = api.nbar_app.delete_nbar_app_by_id(
id='string'
)
return endpoint_result
@pytest.mark.nbar_app
def test_delete_nbar_app_by_id(api, validator):
try:
assert is_valid_delete_nbar_app_by_id(
validator,
delete_nbar_app_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def delete_nbar_app_by_id_default(api):
endpoint_result = api.nbar_app.delete_nbar_app_by_id(
id='string'
)
return endpoint_result
@pytest.mark.nbar_app
def test_delete_nbar_app_by_id_default(api, validator):
try:
assert is_valid_delete_nbar_app_by_id(
validator,
delete_nbar_app_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| [
"[email protected]"
] | |
435f09a949e10d5926b47462513ec6a935159a57 | ba4f68fb01aa32970dadea67cc8d039b4c0f6d9e | /python/facebook_abcs/graphs/bfs_short_reach.py | d7e090dc241a595327009effbf8e195b8a27e16d | [] | no_license | campbellmarianna/Code-Challenges | 12a7808563e36b1a2964f10ae64618c0be41b6c0 | 12e21c51665d81cf1ea94c2005f4f9d3584b66ec | refs/heads/master | 2021-08-03T23:23:58.297437 | 2020-05-15T07:13:46 | 2020-05-15T07:13:46 | 168,234,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,585 | py | '''
Prompt:
Consider an undirected graph where each edge is the same weight. Each of the nodes is labeled consecutively.
You will be given a number of queries. For each query, you will be given a list of edges describing an undirected graph. After you create a representation of the graph, you must determine and report the shortest distance to each of the other nodes from a given starting position using the breadth-first search algorithm (BFS). Distances are to be reported in node number order, ascending. If a node is unreachable, print for that node. Each of the edges weighs 6 units of distance.
For example, given a graph with nodes and edges, , a visual representation is:
image
The start node for the example is node . Outputs are calculated for distances to nodes through : . Each edge is units, and the unreachable node has the required return distance of .
Function Description
Complete the bfs function in the editor below. It must return an array of integers representing distances from the start node to each other node in node ascending order. If a node is unreachable, its distance is .
bfs has the following parameter(s):
n: the integer number of nodes
m: the integer number of edges
edges: a 2D array of start and end nodes for edges
s: the node to start traversals from
Input Format
The first line contains an integer , the number of queries. Each of the following sets of lines has the following format:
The first line contains two space-separated integers and , the number of nodes and edges in the graph.
Each line of the subsequent lines contains two space-separated integers, and , describing an edge connecting node to node .
The last line contains a single integer, , denoting the index of the starting node.
Constraints
Output Format
For each of the queries, print a single line of space-separated integers denoting the shortest distances to each of the other nodes from starting position . These distances should be listed sequentially by node number (i.e., ), but should not include node . If some node is unreachable from , print as the distance to that node.
Sample Input
2 # the number of queries
4 2 # n: number of nodes m: number of edges in the graph
1 2 # u and v: describing an edge connecting node u to node v
1 3
1
3 1
2 3
2 # s: denoting the index of the starting node.
Sample Output
6 6 -1
-1 6
'''
# Very helpful Bread First Search is looping through a sorted array and adding to a queue
# https: // www.youtube.com/watch?v = -uR7BSfNJko
# Getting user input Iteration #1
# N = int(input())
# print(N)
# for _ in range(N):
# parts = input().strip().split(' ')
# print(parts)
for line in fileinput.input():
parts = line.strip().split(' ')
print(parts)
# Along with Breadth First Search Algorithm by lorisrossi https://www.hackerrank.com/challenges/bfsshortreach/forum
def bfs(n, m, edges, s):
from collections import deque
# Build graph
graph = {}
for num in range(1, n+1):
graph[num] = set()
for l, r in edges:
graph[l].add(r)
graph[r].add(l)
reached = {}
# Explore graph once
frontier = deque([(s, 0)])
seen = {s}
while frontier:
curr_node, curr_cost = frontier.popleft()
for nbour in graph[curr_node]:
if nbour not in seen:
seen.add(nbour)
reached[nbour] = curr_cost+6
frontier.append((nbour, curr_cost+6))
result = []
for node in range(1, n+1):
if s != node:
result.append(reached.get(node, -1))
return result
| [
"[email protected]"
] | |
fa97ee9fd2838b1142288a25b7c3b07d01df9382 | 80f622252281e6288d24b101dda0d4ee3634faed | /Titanic/model/model.py | 92f1eea0ae9e1af59615e0f34f8ec795553013ab | [] | no_license | jalondono/HandsOn-MachineLearning | c7cd7ce967180b84dffc2953d9ad5894c2bfc46e | eb3a3f2d6e490a827aa8b50cfb6e606cb3e85c5d | refs/heads/master | 2023-01-03T01:10:32.836434 | 2020-10-29T15:47:27 | 2020-10-29T15:47:27 | 300,308,942 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,158 | py | import pandas as pd
import numpy as np
import tensorflow.keras as K
import mlflow.tensorflow
import sys
import logging
import zipfile
# mlflow server --backend-store-uri mlruns/ --default-artifact-root mlruns/ --host 0.0.0.0 --port 5000
def getting_data(zipfolder, filename, cols):
"""
Get the data from a zip file
:param path: direction to zip file
:return: train dataset
"""
with zipfile.ZipFile(zipfolder, 'r') as zip_ref:
zip_ref.extractall()
data = pd.read_csv(filename, usecols=cols)
print('data set shape: ', data.shape, '\n')
print(data.head())
return data
def process_args(argv):
"""
convert the data arguments into the needed format
:param argv: Parameters
:return: converted parameters
"""
data_path = sys.argv[1] if len(sys.argv) > 1 else '../data'
debug = sys.argv[2].lower() if len(sys.argv) > 1 else 'false'
model_type = sys.argv[3] if len(sys.argv) > 1 else [256, 128]
model_type = model_type[1:-1].split(',')
splited_network = [int(x) for x in model_type]
alpha = float(sys.argv[4]) if len(sys.argv) > 1 else 0.5
l1_ratio = float(sys.argv[5]) if len(sys.argv) > 2 else 0
return data_path, debug, splited_network, alpha, l1_ratio
def create_model(network):
model = K.models.Sequential()
model.add(K.layers.Dense(units=256, input_dim=6,
kernel_initializer='ones',
kernel_regularizer=K.regularizers.l1(l1_ratio),
))
for units in network[1:]:
model.add(K.layers.Dense(units=units,
kernel_initializer='ones',
kernel_regularizer=K.regularizers.l1(l1_ratio),
))
model.add(K.layers.Dense(units=1, activation='sigmoid'))
opt = K.optimizers.Adam(learning_rate=alpha)
model.compile(optimizer=opt, loss='binary_crossentropy',
metrics=['accuracy'], )
print(model.summary())
return model
def train_model(model, X_train, Y_train, batch_size=128,
epoch=80, val_split=0.1):
"""
Perform the training of the model
:param model: model previously compiled
:return: history
"""
history = model.fit(x=X_train,
y=Y_train,
batch_size=128,
epochs=80,
validation_split=0.1)
return history
if __name__ == '__main__':
logging.basicConfig(level=logging.WARN)
logger = logging.getLogger(__name__)
# mlflow
mlflow.tensorflow.autolog()
# Utils cols from data
train_cols = ['Survived', 'Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare']
test_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare']
X_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare']
Y_cols = ['Survived']
# Get value arguments
data_path, debug, network, alpha, l1_ratio = process_args(sys.argv)
# train Data
filename = 'train.csv'
data = getting_data(data_path, filename, train_cols)
data['Sex_b'] = pd.factorize(data.Sex)[0]
data = data.drop(['Sex'], axis=1)
data = data.rename(columns={"Sex_b": "Sex"})
# testing data
filename = 'test.csv'
test = getting_data(data_path, filename, test_cols)
test['Sex_b'] = pd.factorize(test.Sex)[0]
test = test.drop(['Sex'], axis=1)
test = test.rename(columns={"Sex_b": "Sex"})
# filling train na values with mean
column_means = data.mean()
data = data.fillna(column_means)
# filling test na values with mean
column_means = test.mean()
test = test.fillna(column_means)
input_data = np.array(data[X_cols])
label_date = np.array(data[Y_cols])
test_input_data = np.array(test[X_cols])
X_train = input_data
Y_train = label_date
# definition of the model
model = create_model(network)
# training model
history = train_model(model, X_train, Y_train)
# predicting
score = model.predict(test_input_data, batch_size=32, verbose=1)
print("Test score:", score[0])
print("Test accuracy:", score[1])
| [
"[email protected]"
] | |
08a65bb7db851c3827f50ea795ce9e58ad45c818 | 7eebbfaee45fdc57c4fc6ba32c87c35be1e62b14 | /airbyte-integrations/connectors/source-facebook-pages/source_facebook_pages/streams.py | 717fb1c76800fc295cff19b40b475069c0e2914a | [
"MIT",
"Elastic-2.0"
] | permissive | Velocity-Engineering/airbyte | b6e1fcead5b9fd7c74d50b9f27118654604dc8e0 | 802a8184cdd11c1eb905a54ed07c8732b0c0b807 | refs/heads/master | 2023-07-31T15:16:27.644737 | 2021-09-28T08:43:51 | 2021-09-28T08:43:51 | 370,730,633 | 0 | 1 | MIT | 2021-06-08T05:58:44 | 2021-05-25T14:55:43 | Java | UTF-8 | Python | false | false | 4,651 | py | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from abc import ABC
from typing import Any, Iterable, Mapping, MutableMapping, Optional
import requests
from airbyte_cdk.sources.streams.http import HttpStream
from source_facebook_pages.metrics import PAGE_FIELDS, PAGE_METRICS, POST_FIELDS, POST_METRICS
class FacebookPagesStream(HttpStream, ABC):
url_base = "https://graph.facebook.com/v11.0/"
primary_key = "id"
data_field = "data"
def __init__(
self,
access_token: str = None,
page_id: str = None,
**kwargs,
):
super().__init__(**kwargs)
self._access_token = access_token
self._page_id = page_id
@property
def path_param(self):
return self.name[:-1]
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
data = response.json()
if not data.get("data") or not data.get("paging"):
return {}
return {
"limit": 100,
"after": data.get("paging", {}).get("cursors", {}).get("after"),
}
def request_params(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, any] = None,
next_page_token: Mapping[str, Any] = None,
) -> MutableMapping[str, Any]:
next_page_token = next_page_token or {}
params = {"access_token": self._access_token, **next_page_token}
return params
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
if not self.data_field:
yield response.json()
records = response.json().get(self.data_field, [])
for record in records:
yield record
class Page(FacebookPagesStream):
"""
API docs: https://developers.facebook.com/docs/graph-api/reference/page/,
"""
data_field = ""
def path(self, **kwargs) -> str:
return self._page_id
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
return None
def request_params(self, **kwargs) -> MutableMapping[str, Any]:
params = super().request_params(**kwargs)
# we have to define which fields will return from Facebook API
# because FB API doesn't provide opportunity to get fields dynamically without delays
# so in PAGE_FIELDS we define fields that user can get from API
params["fields"] = PAGE_FIELDS
return params
class Post(FacebookPagesStream):
"""
https://developers.facebook.com/docs/graph-api/reference/v11.0/page/feed,
"""
def path(self, **kwargs) -> str:
return f"{self._page_id}/posts"
def request_params(self, **kwargs) -> MutableMapping[str, Any]:
params = super().request_params(**kwargs)
params["fields"] = POST_FIELDS
return params
class PageInsights(FacebookPagesStream):
"""
API docs: https://developers.facebook.com/docs/graph-api/reference/page/insights/,
"""
def path(self, **kwargs) -> str:
return f"{self._page_id}/insights"
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
return None
def request_params(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, any] = None,
next_page_token: Mapping[str, Any] = None,
) -> MutableMapping[str, Any]:
params = super().request_params(stream_state, stream_slice, next_page_token)
params["metric"] = ",".join(PAGE_METRICS)
return params
class PostInsights(FacebookPagesStream):
"""
API docs: https://developers.facebook.com/docs/graph-api/reference/post/insights/,
"""
def path(self, **kwargs) -> str:
return f"{self._page_id}/posts"
def request_params(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, any] = None,
next_page_token: Mapping[str, Any] = None,
) -> MutableMapping[str, Any]:
params = super().request_params(stream_state, stream_slice, next_page_token)
params["fields"] = f'insights.metric({",".join(POST_METRICS)})'
return params
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
# unique case so we override this method
records = response.json().get(self.data_field) or []
for insights in records:
if insights.get("insights"):
data = insights.get("insights").get("data")
for insight in data:
yield insight
else:
yield insights
| [
"[email protected]"
] | |
8a7ff0ad022e61991efae1db238130da5169b004 | 7259dbcc9e32502945d362caa43d4ad380cd04ea | /OIT_SpiderCode/OYT_zujuan_Param/OYT_Scrapy_Param/spiders/new_zujuan_English_middle_spiderparam.py | 27bc29f69ebc3bbe7b018e3cdfcf6fd90583eb7c | [
"MIT"
] | permissive | Doraying1230/Python-Study | daa143c133262f4305624d180b38205afe241163 | 8dccfa2108002d18251053147ccf36551d90c22b | refs/heads/master | 2020-03-29T13:46:13.061373 | 2018-07-26T15:19:32 | 2018-07-26T15:19:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,915 | py | #coding:utf-8
import scrapy
from ..common.BaseObject import BaseObject
from scrapy.spider import CrawlSpider
from scrapy.selector import Selector
from scrapy.http import Request,FormRequest
from scrapy.selector import Selector
from scrapy.http.cookies import CookieJar
from fake_useragent import UserAgent
import time
import re
import os
class ZuQuanLoadData(BaseObject,CrawlSpider):
name = 'zujuan_english_middle_param'
custom_settings = {
'DOWNLOAD_DELAY': 3, 'CONCURRENT_REQUESTS_PER_IP': 5,
'ITEM_PIPELINES': {'OIT_ScrapyData.pipelines.OitScrapydataPipeline': None, }
}
def __init__(self):
ua = UserAgent()
user_agent = ua.random
self.file_name='zujuan_english_middle_param'
self.cookieValue = {'xd': '75519cb9f2bf90d001c0560f5c40520062a60ada9cb38350078f83e04ee38a31a%3A2%3A%7Bi%3A0%3Bs%3A2%3A%22xd%22%3Bi%3A1%3Bi%3A2%3B%7D',
'isdialog': 'bad3c21672f08107d1d921526d191f58bd47d79e7dbb432bd32624a836b42e85a%3A2%3A%7Bi%3A0%3Bs%3A8%3A%22isdialog%22%3Bi%3A1%3Bs%3A4%3A%22show%22%3B%7D',
'_csrf': '34c90a094ad3b3ab53cb75751fcab02bf693c164a6f5dfa244a6aec61e2f187ca%3A2%3A%7Bi%3A0%3Bs%3A5%3A%22_csrf%22%3Bi%3A1%3Bs%3A32%3A%22YlTOGIyOfskw0gy-voJy0vbGw4VVswCs%22%3B%7D',
'device': '310bdaba05b30bb632f66fde9bf3e2b91ebc4d607c250c2e1a1d9e0dfb900f01a%3A2%3A%7Bi%3A0%3Bs%3A6%3A%22device%22%3Bi%3A1%3BN%3B%7D',
'PHPSESSID': 'utuj4csehjg3q9inhnuhptugk6',
'_sync_login_identity': '771bfb9f524cb8005c68374bdf39c9f22c36d71cf21d91082b96e7bd7a21e9eea%3A2%3A%7Bi%3A0%3Bs%3A20%3A%22_sync_login_identity%22%3Bi%3A1%3Bs%3A50%3A%22%5B1285801%2C%22YwmDuM6ftsN7jeMH7VDdT4OI-SvOisii%22%2C86400%5D%22%3B%7D',
'chid': '14e5d5f939c71d411898b3ee4671b5e06472c56cd9cffb59cc071e18732212f1a%3A2%3A%7Bi%3A0%3Bs%3A4%3A%22chid%22%3Bi%3A1%3Bs%3A1%3A%224%22%3B%7D',
'_identity': '95b973f53ecb67fdb27fe40c5660df1bbdb9c168cac8d1999dc6d0772a9ea122a%3A2%3A%7Bi%3A0%3Bs%3A9%3A%22_identity%22%3Bi%3A1%3Bs%3A50%3A%22%5B1285801%2C%22fa26ed63eeec36f3e1682f05b68cd887%22%2C86400%5D%22%3B%7D',
'Hm_lvt_6de0a5b2c05e49d1c850edca0c13051f': '1515666025',
'Hm_lpvt_6de0a5b2c05e49d1c850edca0c13051f': '1515666640'}
self.hearders = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Connection': 'keep - alive',
# 'Referer': 'http://www.zujuan.com/question /index?chid = 3 & xd = 1',
'User-Agent': user_agent#'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36'
}
print(self.hearders)
self.domain = 'http://www.zujuan.com'
def start_requests(self):
start_url = 'http://www.zujuan.com/question/index?chid=4&xd=2'
return [Request(url=start_url,cookies=self.cookieValue,headers=self.hearders,callback=self.parse_version)]
def parse_version(self,response):
result = response.body.decode()
resu = Selector(text=result)
versionTexts = resu.xpath('//div[@class="type-items"][1]/div/div/div/a/text()').extract()
versionUrls = resu.xpath('//div[@class="type-items"][1]/div/div/div/a/@href').extract()
version = dict(zip(versionTexts, versionUrls))
print(version)#{'人教版': '/question?bookversion=11740&chid=3&xd=1', '青岛版六三制': '/question?bookversion=23087&chid=3&xd=1', '北师大版': '/question?bookversion=23313&chid=3&xd=1', '苏教版': '/question?bookversion=25571&chid=3&xd=1', '西师大版': '/question?bookversion=47500&chid=3&xd=1', '青岛版五四制': '/question?bookversion=70885&chid=3&xd=1', '浙教版': '/question?bookversion=106060&chid=3&xd=1'}
for text in version :
if ('牛津' in text):
manURL =self.domain+version[text]#http://www.zujuan.com/question?bookversion=25571&chid=3&xd=1
deliver_param = {'version':'牛津译林版'}
deliver_param['course'] = '英语'
return [Request(url=manURL, meta=deliver_param,cookies=self.cookieValue, headers=self.hearders,callback=self.parse_categories)]
elif('沪教' in text):
manURL = self.domain + version[text] # http://www.zujuan.com/question?bookversion=25571&chid=3&xd=1
deliver_param = {'version': '沪教版'}
deliver_param['course'] = '英语'
return [Request(url=manURL,meta=deliver_param, cookies=self.cookieValue, headers=self.hearders,
callback=self.parse_categories)]
else:
pass
def parse_categories(self,response):
print(123,response.meta)
result = response.body.decode()
resu = Selector(text=result)
categoriesTexts = resu.xpath('//div[@class="type-items"][2]/div/div/div/a/text()').extract()
categoriesUrls = resu.xpath('//div[@class="type-items"][2]/div/div/div/a/@href').extract()
#http://www.zujuan.com/question?categories=25576&bookversion=25571&nianji=25576&chid=3&xd=1
categories = dict(zip(categoriesTexts, categoriesUrls))
print(123,categories)
categories_list = []
# print(categories)# {'一年级上册': '/question?categories=25572&bookversion=25571&nianji=25572&chid=3&xd=1', '一年级下册': '/question?categories=25573&bookversion=25571&nianji=25573&chid=3&xd=1', '二年级上册': '/question?categories=25574&bookversion=25571&nianji=25574&chid=3&xd=1', '二年级下册': '/question?categories=25575&bookversion=25571&nianji=25575&chid=3&xd=1', '三年级上册': '/question?categories=25576&bookversion=25571&nianji=25576&chid=3&xd=1', '三年级下册': '/question?categories=25577&bookversion=25571&nianji=25577&chid=3&xd=1', '四年级上册': '/question?categories=25578&bookversion=25571&nianji=25578&chid=3&xd=1', '四年级下册': '/question?categories=25579&bookversion=25571&nianji=25579&chid=3&xd=1', '五年级上册': '/question?categories=25580&bookversion=25571&nianji=25580&chid=3&xd=1', '五年级下册': '/question?categories=25581&bookversion=25571&nianji=25581&chid=3&xd=1', '六年级上册': '/question?categories=25582&bookversion=25571&nianji=25582&chid=3&xd=1', '六年级下册': '/question?categories=25592&bookversion=25571&nianji=25592&chid=3&xd=1'}
for text in categories:
categories_list.append(text)
comment = 0
while comment < len(categories_list):
text = categories_list[comment]
nianjiContentUrl = self.domain + categories[text]
print(12,nianjiContentUrl)
nianjiContentUrl =self.domain+categories[text]
comment += 1
response.meta['nianji'] = text
yield Request(url=nianjiContentUrl,meta=response.meta,cookies=self.cookieValue, headers=self.hearders,callback=self.parse_categories_content)
def parse_categories_content(self,response):
print(123,response.meta)
result = response.body.decode()
resu = Selector(text=result)
sectionsText = resu.xpath('//div[@id="J_Tree"]/div/a/text()').extract()
sectionsUrl = resu.xpath('//div[@id="J_Tree"]/div/a/@href').extract()
sections = dict(zip(sectionsText,sectionsUrl))
print(sections)
self.make_file()
sections_Text = []
sections_number = []
for text in sections:
sections_Text.append(text)
categoriesNumber = sections[text]
print(type(categoriesNumber),categoriesNumber)
ret = re.findall(r'categories=(\d*)&',categoriesNumber)
sections_number.append(ret[0])
print(123, ret)
need_sections_dict = dict(zip(sections_Text, sections_number))
nianji = response.meta ['nianji']
response.meta[nianji] = need_sections_dict
need_sections_str = str(response.meta)
with open('d:\\xiti10001\\zujuan\\{0}\\{1}\\categories_english_{0}.txt'.format(time.strftime('%Y%m%d',time.localtime(time.time())),self.file_name),'a') as f:
f.write(need_sections_str)
f.write('\n')
# categoriesNumber_s = categoriesNumber.find('=')
# print(categoriesNumber_s)
# categoriesNumber_e = categoriesNumber.find('&')
# print(categoriesNumber_e)
# categoriesNumbers = categoriesNumber[categoriesNumber_s,categoriesNumber_e]
def make_file(self):
path = 'd:\\xiti10001\\zujuan\\{0}\\{1}'.format(time.strftime('%Y%m%d',time.localtime(time.time())),self.file_name)
isExists = os.path.exists(path)
if (isExists):
pass;
else:
os.makedirs(path)
| [
"[email protected]"
] | |
ff48c9f51db42b5415104dcad82dcc5e7180f1a0 | a097ecf40fee329cfa9e3f77e4b6e9e29a8f148a | /5_section/5_c4.py | ad4129556566f3c699ab43db88f59f5c50ed0ab1 | [] | no_license | FumihisaKobayashi/The_self_taught_python | 1e7008b17050db3e615c2f3aa68df2edc7f93192 | 329d376689029b75da73a6f98715cc7e83e8cc2c | refs/heads/master | 2021-01-06T16:04:13.382955 | 2020-07-28T14:39:24 | 2020-07-28T14:39:24 | 241,389,313 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | fumi = {
"身長": "1.73m",
"好きな色": "緑",
"好きな人": "Hideki Matsui"
}
answer = input("身長,好きな色 or 好きな人")
if answer in fumi:
a = fumi[answer]
print(a)
#:注意 | [
"[email protected]"
] | |
3d1e771da9ec0f32bfd297a1b19794e9054adce4 | 1825283527f5a479204708feeaf55f4ab6d1290b | /leetcode/python/45/sol.py | 3db6f97188dd189aef4c4caf07b43524d9f7f299 | [] | no_license | frankieliu/problems | b82c61d3328ffcc1da2cbc95712563355f5d44b5 | 911c6622448a4be041834bcab25051dd0f9209b2 | refs/heads/master | 2023-01-06T14:41:58.044871 | 2019-11-24T03:47:22 | 2019-11-24T03:47:22 | 115,065,956 | 1 | 0 | null | 2023-01-04T07:25:52 | 2017-12-22T02:06:57 | HTML | UTF-8 | Python | false | false | 2,156 | py |
10-lines C++ (16ms) / Python BFS Solutions with Explanations
https://leetcode.com/problems/jump-game-ii/discuss/18019
* Lang: python3
* Author: jianchao-li
* Votes: 71
This problem has a nice BFS structure. Let's illustrate it using the example `nums = [2, 3, 1, 1, 4]` in the problem statement. We are initially at position `0`. Then we can move at most `nums[0]` steps from it. So, after one move, we may reach `nums[1] = 3` or `nums[2] = 1`. So these nodes are reachable in `1` move. From these nodes, we can further move to `nums[3] = 1` and `nums[4] = 4`. Now you can see that the target `nums[4] = 4` is reachable in `2` moves.
Putting these into codes, we keep two pointers `start` and `end` that record the current range of the starting nodes. Each time after we make a move, update `start` to be `end + 1` and `end` to be the farthest index that can be reached in `1` move from the current `[start, end]`.
To get an accepted solution, it is important to handle all the edge cases. And the following codes handle all of them in a unified way without using the unclean `if` statements :-)
----------
**C++**
class Solution {
public:
int jump(vector<int>& nums) {
int n = nums.size(), step = 0, start = 0, end = 0;
while (end < n - 1) {
step++;
int maxend = end + 1;
for (int i = start; i <= end; i++) {
if (i + nums[i] >= n - 1) return step;
maxend = max(maxend, i + nums[i]);
}
start = end + 1;
end = maxend;
}
return step;
}
};
----------
**Python**
class Solution:
# @param {integer[]} nums
# @return {integer}
def jump(self, nums):
n, start, end, step = len(nums), 0, 0, 0
while end < n - 1:
step += 1
maxend = end + 1
for i in range(start, end + 1):
if i + nums[i] >= n - 1:
return step
maxend = max(maxend, i + nums[i])
start, end = end + 1, maxend
return step
| [
"[email protected]"
] | |
13f1896c22ae2a9880e175bd288981ebe1216ccf | 8d5ba6747531cbd43d63d32265fd608f9081c3b7 | /.venv/lib/python2.7/site-packages/indico/modules/events/logs/controllers.py | a436382fa8b13d29f35d97c1b401f0e523a58dd9 | [] | no_license | Collinsnyamao/indico | 0e433b78803afae5b1ac90483db1f3d90ce2fddb | 32adf8123e266eb81439b654abc993b98e0cd7f2 | refs/heads/master | 2020-03-18T04:55:40.386595 | 2018-06-02T13:45:47 | 2018-06-02T13:45:47 | 134,314,163 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,324 | py | # This file is part of Indico.
# Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.modules.events.logs.models.entries import EventLogEntry
from indico.modules.events.logs.views import WPEventLogs
from indico.modules.events.management.controllers import RHManageEventBase
class RHEventLogs(RHManageEventBase):
"""Shows the modification/action log for the event"""
def _process(self):
entries = self.event.log_entries.order_by(EventLogEntry.logged_dt.desc()).all()
realms = {e.realm for e in entries}
return WPEventLogs.render_template('logs.html', self.event, entries=entries, realms=realms)
| [
"[email protected]"
] | |
0ac4cdf0dc4d0068c5d28f7e139bf35bbae92bca | c1ed1b90f7e914aee1a17cd9b5bb83cf288f7e85 | /usersAccount/apps.py | 7e257ce953933d0d4ded1fea4b4a19236a69a80c | [] | no_license | tanaychaulinsec/User-authentication | 87e111f3731b57f9057554a58781d1a1705e351c | 6652e72a5b639174cb20ccdae1c49883bdcc8514 | refs/heads/master | 2022-12-12T10:41:25.172936 | 2020-08-25T15:39:00 | 2020-08-25T15:39:00 | 289,565,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | from django.apps import AppConfig
class UsersaccountConfig(AppConfig):
name = 'usersAccount'
| [
"[email protected]"
] | |
d1878d336619c62c219f42222f728c8e4ed65c83 | 7d768b5be4213c3ac90648d48d1a322fb8c5c433 | /python_code/chuanzhi/python_advance/19/process_pool.py | e42b0da91f0fd4f73e665517b8f08d73f03c0eeb | [] | no_license | googleliyang/gitbook_cz_python | 7da5070b09e760d5e099aeae468c08e705b7da78 | c82b7d435dc11016e24cde2bdc4a558f507cb668 | refs/heads/master | 2020-04-02T17:47:58.400424 | 2018-12-22T09:48:59 | 2018-12-22T09:48:59 | 154,672,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @File : process_pool.py
# @Author: ly
# @Date : 2018/12/8
| [
"[email protected]"
] | |
c35827798e41b221d01c7605547d9563c1b93e01 | c040de12811afa588a23ad6c0cd4fdc849ab469f | /saklient/cloud/errors/usernotspecifiedexception.py | 4bd94f412d92c987223a12491a2dad83d3c4cda1 | [
"MIT"
] | permissive | toshitanian/saklient.python | 3707d1113744122c5ab1ae793f22c6c3a0f65bc4 | 287c56915dd825d676eddc538cbb33b483803dc2 | refs/heads/master | 2021-05-28T08:13:16.851101 | 2014-10-09T09:54:03 | 2014-10-09T09:54:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | # -*- coding:utf-8 -*-
from ...errors.httpforbiddenexception import HttpForbiddenException
# module saklient.cloud.errors.usernotspecifiedexception
class UserNotSpecifiedException(HttpForbiddenException):
## 要求された操作は許可されていません。このAPIはユーザを特定できる認証方法でアクセスする必要があります。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(UserNotSpecifiedException, self).__init__(status, code, "要求された操作は許可されていません。このAPIはユーザを特定できる認証方法でアクセスする必要があります。" if message is None or message == "" else message)
| [
"[email protected]"
] | |
c6eafbbe4676917c6f23a05bc73e21e549c0ba3f | 43842089122512e6b303ebd05fc00bb98066a5b2 | /dynamic_programming/120_triangle.py | 99985fab0c45baef506be9737699a9531b32e925 | [] | no_license | mistrydarshan99/Leetcode-3 | a40e14e62dd400ddb6fa824667533b5ee44d5f45 | bf98c8fa31043a45b3d21cfe78d4e08f9cac9de6 | refs/heads/master | 2022-04-16T11:26:56.028084 | 2020-02-28T23:04:06 | 2020-02-28T23:04:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,507 | py | """
Given a triangle, find the minimum path sum from top to bottom. Each step you may move to adjacent numbers on the row below.
For example, given the following triangle
[
[2],
[3,4],
[6,5,7],
[4,1,8,3]
]
The minimum path sum from top to bottom is 11 (i.e., 2 + 3 + 5 + 1 = 11).
"""
class Solution(object):
def minimumTotal_1(self, triangle):
"""
:type triangle: List[List[int]]
:rtype: int
"""
result = []
for line in range(1, len(triangle)):
result.append([0] * line)
result.append(triangle[-1])
for i in reversed(range(len(triangle))):
for j in range(i):
result[i - 1][j] = min(result[i][j], result[i][j+1]) + triangle[i - 1][j]
return result[0][0]
def minimumTotal_2(self, triangle):
# modify the triangle in place
if not triangle:
return
for i in range(len(triangle)-2, -1, -1):
for j in range(len(triangle[i])):
triangle[i][j] = min(triangle[i+1][j], triangle[i+1][j+1]) + triangle[i][j]
return triangle[0][0]
def minimumTotal_3(self, triangle):
# O(n) space
if not triangle:
return
result = triangle[-1]
for i in range(len(triangle) - 2, -1, -1):
for j in range(len(triangle[i])):
result[j] = min(result[j], result[j+1]) + triangle[i][j]
return result[0]
triangle_1 = [[2],[3,4],[6,5,7],[4,1,8,3]]
| [
"[email protected]"
] | |
12431f449479c4225d285315b7a3bb921570c910 | efcd21234f3291e8fc561f49a7c88fc57a63e952 | /tests/unit/language/ast/test_directive_definition.py | b356575d34de9eab8e68c11d4445ef82a42fc23c | [
"MIT"
] | permissive | tartiflette/tartiflette | 146214a43847d2f423bf74594643c1fdefc746f1 | 421c1e937f553d6a5bf2f30154022c0d77053cfb | refs/heads/master | 2023-09-01T02:40:05.974025 | 2022-01-20T14:55:31 | 2022-01-20T14:55:31 | 119,035,565 | 586 | 39 | MIT | 2023-09-11T07:49:27 | 2018-01-26T09:56:10 | Python | UTF-8 | Python | false | false | 6,673 | py | import pytest
from tartiflette.language.ast import DirectiveDefinitionNode
def test_directivedefinitionnode__init__():
directive_definition_node = DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
)
assert directive_definition_node.name == "directiveDefinitionName"
assert (
directive_definition_node.locations == "directiveDefinitionLocations"
)
assert (
directive_definition_node.description
== "directiveDefinitionDescription"
)
assert (
directive_definition_node.arguments == "directiveDefinitionArguments"
)
assert directive_definition_node.location == "directiveDefinitionLocation"
@pytest.mark.parametrize(
"directive_definition_node,other,expected",
[
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
Ellipsis,
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionNameBis",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocationsBis",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescriptionBis",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArgumentsBis",
location="directiveDefinitionLocation",
),
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocationBis",
),
False,
),
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
True,
),
],
)
def test_directivedefinitionnode__eq__(
directive_definition_node, other, expected
):
assert (directive_definition_node == other) is expected
@pytest.mark.parametrize(
"directive_definition_node,expected",
[
(
DirectiveDefinitionNode(
name="directiveDefinitionName",
locations="directiveDefinitionLocations",
description="directiveDefinitionDescription",
arguments="directiveDefinitionArguments",
location="directiveDefinitionLocation",
),
"DirectiveDefinitionNode("
"description='directiveDefinitionDescription', "
"name='directiveDefinitionName', "
"arguments='directiveDefinitionArguments', "
"locations='directiveDefinitionLocations', "
"location='directiveDefinitionLocation')",
)
],
)
def test_directivedefinitionnode__repr__(directive_definition_node, expected):
assert directive_definition_node.__repr__() == expected
| [
"[email protected]"
] | |
3273285dc5118a47952c40dfdd26e29bd612aa47 | 46f03a8353b3fd0cd1ca35e0d322c4a53649596b | /try.py | 193887977e7feaeaa8f466637561399d7a348948 | [] | no_license | dragikamov/Video_Converter | d7d73a948853c99840606b89fc79dbcf8e1bde97 | e0233f9c190618e30bb85bcfa9df881f0eee058e | refs/heads/master | 2020-04-30T15:50:35.037923 | 2019-03-30T22:35:29 | 2019-03-30T22:35:29 | 176,931,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,925 | py | import cv2
import numpy as np
import os
from canny_edge import *
import threading
from os.path import isfile, join
# Function for converting an image to grayscale
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.299, 0.587, 0.114])
# Export of video
def exportVid():
frame_array = []
files = [f for f in os.listdir('data/') if isfile(join('data/', f))]
files.sort(key = lambda x: int(x[5:-4]))
for i in range(len(files)):
filename = 'data/' + files[i]
img = cv2.imread(filename)
height, width, _ = img.shape
size = (width,height)
print(filename)
frame_array.append(img)
fourcc = cv2.VideoWriter_fourcc(*'DIVX')
out = cv2.VideoWriter('export.avi', fourcc, 24.0, (width,height))
for i in range(len(frame_array)):
out.write(frame_array[i])
out.release()
def thread(i, imgs):
t1 = threading.Thread(target=detect, args=(imgs[0], i + 1))
t2 = threading.Thread(target=detect, args=(imgs[1], i + 2))
t3 = threading.Thread(target=detect, args=(imgs[2], i + 3))
t4 = threading.Thread(target=detect, args=(imgs[3], i + 4))
t5 = threading.Thread(target=detect, args=(imgs[4], i + 5))
t6 = threading.Thread(target=detect, args=(imgs[5], i + 6))
t7 = threading.Thread(target=detect, args=(imgs[6], i + 7))
t8 = threading.Thread(target=detect, args=(imgs[7], i + 8))
t9 = threading.Thread(target=detect, args=(imgs[8], i + 9))
t10 = threading.Thread(target=detect, args=(imgs[9], i + 10))
t11 = threading.Thread(target=detect, args=(imgs[10], i + 11))
t12 = threading.Thread(target=detect, args=(imgs[11], i + 12))
t13 = threading.Thread(target=detect, args=(imgs[12], i + 13))
t14 = threading.Thread(target=detect, args=(imgs[13], i + 14))
t15 = threading.Thread(target=detect, args=(imgs[14], i + 15))
t16 = threading.Thread(target=detect, args=(imgs[15], i + 16))
t17 = threading.Thread(target=detect, args=(imgs[16], i + 17))
t18 = threading.Thread(target=detect, args=(imgs[17], i + 18))
t19 = threading.Thread(target=detect, args=(imgs[18], i + 19))
t20 = threading.Thread(target=detect, args=(imgs[19], i + 20))
t21 = threading.Thread(target=detect, args=(imgs[20], i + 21))
t22 = threading.Thread(target=detect, args=(imgs[21], i + 22))
t23 = threading.Thread(target=detect, args=(imgs[22], i + 23))
t24 = threading.Thread(target=detect, args=(imgs[23], i + 24))
t25 = threading.Thread(target=detect, args=(imgs[24], i + 25))
t26 = threading.Thread(target=detect, args=(imgs[25], i + 26))
t27 = threading.Thread(target=detect, args=(imgs[26], i + 27))
t28 = threading.Thread(target=detect, args=(imgs[27], i + 28))
t29 = threading.Thread(target=detect, args=(imgs[28], i + 29))
t30 = threading.Thread(target=detect, args=(imgs[29], i + 30))
t31 = threading.Thread(target=detect, args=(imgs[30], i + 31))
t32 = threading.Thread(target=detect, args=(imgs[31], i + 32))
t33 = threading.Thread(target=detect, args=(imgs[32], i + 33))
t34 = threading.Thread(target=detect, args=(imgs[33], i + 34))
t35 = threading.Thread(target=detect, args=(imgs[34], i + 35))
t36 = threading.Thread(target=detect, args=(imgs[35], i + 36))
t37 = threading.Thread(target=detect, args=(imgs[36], i + 37))
t38 = threading.Thread(target=detect, args=(imgs[37], i + 38))
t39 = threading.Thread(target=detect, args=(imgs[38], i + 39))
t40 = threading.Thread(target=detect, args=(imgs[39], i + 40))
t41 = threading.Thread(target=detect, args=(imgs[40], i + 41))
t42 = threading.Thread(target=detect, args=(imgs[41], i + 42))
t43 = threading.Thread(target=detect, args=(imgs[42], i + 43))
t44 = threading.Thread(target=detect, args=(imgs[43], i + 44))
t45 = threading.Thread(target=detect, args=(imgs[44], i + 45))
t46 = threading.Thread(target=detect, args=(imgs[45], i + 46))
t47 = threading.Thread(target=detect, args=(imgs[46], i + 47))
t48 = threading.Thread(target=detect, args=(imgs[47], i + 48))
t49 = threading.Thread(target=detect, args=(imgs[48], i + 49))
t50 = threading.Thread(target=detect, args=(imgs[49], i + 50))
t51 = threading.Thread(target=detect, args=(imgs[50], i + 51))
t52 = threading.Thread(target=detect, args=(imgs[51], i + 52))
t53 = threading.Thread(target=detect, args=(imgs[52], i + 53))
t54 = threading.Thread(target=detect, args=(imgs[53], i + 54))
t55 = threading.Thread(target=detect, args=(imgs[54], i + 55))
t56 = threading.Thread(target=detect, args=(imgs[55], i + 56))
t57 = threading.Thread(target=detect, args=(imgs[56], i + 57))
t58 = threading.Thread(target=detect, args=(imgs[57], i + 58))
t59 = threading.Thread(target=detect, args=(imgs[58], i + 59))
t60 = threading.Thread(target=detect, args=(imgs[59], i + 60))
t1.start()
t2.start()
t3.start()
t4.start()
t5.start()
t6.start()
t7.start()
t8.start()
t9.start()
t10.start()
t11.start()
t12.start()
t13.start()
t14.start()
t15.start()
t16.start()
t17.start()
t18.start()
t19.start()
t20.start()
t21.start()
t22.start()
t23.start()
t24.start()
t25.start()
t26.start()
t27.start()
t28.start()
t29.start()
t30.start()
t31.start()
t32.start()
t33.start()
t34.start()
t35.start()
t36.start()
t37.start()
t38.start()
t39.start()
t40.start()
t41.start()
t42.start()
t43.start()
t44.start()
t45.start()
t46.start()
t47.start()
t48.start()
t49.start()
t50.start()
t51.start()
t52.start()
t53.start()
t54.start()
t55.start()
t56.start()
t57.start()
t58.start()
t59.start()
t60.start()
t1.join()
t2.join()
t3.join()
t4.join()
t5.join()
t6.join()
t7.join()
t8.join()
t9.join()
t10.join()
t11.join()
t12.join()
t13.join()
t14.join()
t15.join()
t16.join()
t17.join()
t18.join()
t19.join()
t20.join()
t21.join()
t22.join()
t23.join()
t24.join()
t25.join()
t26.join()
t27.join()
t28.join()
t29.join()
t30.join()
t31.join()
t32.join()
t33.join()
t34.join()
t35.join()
t36.join()
t37.join()
t38.join()
t39.join()
t40.join()
t41.join()
t42.join()
t43.join()
t44.join()
t45.join()
t46.join()
t47.join()
t48.join()
t49.join()
t50.join()
t51.join()
t52.join()
t53.join()
t54.join()
t55.join()
t56.join()
t57.join()
t58.join()
t59.join()
t60.join()
# Loading the video into python
cap = cv2.VideoCapture('bunny.mp4')
# Making a folder for the edited frames
try:
if not os.path.exists('data'):
os.makedirs('data')
except OSError:
print ('Error: Creating directory of data')
currentFrame = 0
imgs = []
height = 0
width = 0
n = 0
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
if not ret:
if(len(imgs) != 0):
for i in range(len(imgs)):
detect(img[i], currentFrame)
break
# Converting the frame to grayscale and adding it to a list
name = './data/frame' + str(currentFrame) + '.jpg'
print ('Slicing and converting to grayscale...' + name)
imgs.append(rgb2gray(frame))
if(currentFrame % 60 == 0 and currentFrame != 0):
thread((currentFrame / 60) - 1, imgs)
imgs = []
# Find height and width
height, width, _ = frame.shape
currentFrame += 1
image_folder = 'data'
images = [img for img in os.listdir(image_folder) if img.endswith(".jpg")]
frame = cv2.imread(os.path.join(image_folder, images[0]))
height, width, _ = frame.shape
exportVid()
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
ee0ea350d13c32438c662a8a258423d9b8287956 | 20c4a239e000b15131251d372ccad9110063a961 | /setup.py | 91ea45b7093ebde7a34cf7d5eb933f7529893fdf | [
"MIT"
] | permissive | Partidani/hdlConvertor | 9d0e382e6e087ac240502538b63f8667004a7715 | 36d3b58e2641e39c323ed9ee337135e49c64d076 | refs/heads/master | 2023-04-06T00:03:31.505727 | 2021-04-19T07:28:25 | 2021-04-19T07:28:25 | 366,418,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,810 | py | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
from setuptools import find_packages
try:
from skbuild import setup
except ImportError:
raise ImportError("Missing scikit-build, (should be automatically installed by pip)")
import sys
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, "README.md")) as f:
long_description = f.read()
deps = ["typing", "future"] if sys.version_info[0] == 2 else []
setup(
cmake_args=[
# '-DCMAKE_BUILD_TYPE=Debug'
],
name='hdlConvertor',
version='2.2',
description='VHDL and System Verilog parser written in c++',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Nic30/hdlConvertor',
author='Michal Orsak',
author_email='[email protected]',
keywords=['hdl', 'vhdl', 'verilog', 'system verilog',
'parser', 'preprocessor', 'antlr4'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Topic :: Software Development :: Build Tools',
'Programming Language :: C++',
'Programming Language :: Cython',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
],
install_requires=[
'hdlConvertorAst>=0.7',
] + deps,
license="MIT",
packages=find_packages(exclude=["tests", ]),
test_suite="tests.main_test_suite",
test_runner="tests:TimeLoggingTestRunner",
tests_require=deps,
)
| [
"[email protected]"
] | |
8bfa5c02a3089abb03156a6609bfed1a989474e9 | d5f8ca3c13f681d147b7614f1902df7ba34e06f9 | /Graduate/model/densenet.py | 38359413ab29892a7c8f412c5fc1741039a65696 | [] | no_license | hhjung1202/OwnAdaptation | 29a6c0a603ab9233baf293096fb9e7e956647a10 | 50805730254419f090f4854387be79648a01fbb4 | refs/heads/master | 2021-06-25T22:31:15.437642 | 2020-11-26T18:19:55 | 2020-11-26T18:19:55 | 176,670,379 | 1 | 0 | null | 2020-06-11T07:35:55 | 2019-03-20T06:36:19 | Python | UTF-8 | Python | false | false | 7,429 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from collections import OrderedDict
from torch import Tensor
import itertools
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
class _Gate_selection(nn.Sequential):
phase = 2
def __init__(self, num_input_features, growth_rate, count, reduction=4):
super(_Gate_selection, self).__init__()
self.actual = (count+1) // 2
LongTensor = torch.cuda.LongTensor if torch.cuda.is_available() else torch.LongTensor
self.init = LongTensor([i for i in range(num_input_features)]).view(1, -1)
s = num_input_features
arr = []
for j in range(count):
arr += [[i for i in range(s, s + growth_rate)]]
s+=growth_rate
self.arr = LongTensor(arr)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
channels = num_input_features + growth_rate * count
self.fc1 = nn.Linear(channels, channels//reduction)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Linear(channels//reduction, count)
self.sigmoid = nn.Sigmoid()
self.flat = Flatten()
def forward(self, x, x_norm):
b, _, w, h = x_norm.size()
out = self.avg_pool(x_norm) # batch, channel 합친거, w, h
out = self.flat(out)
out = self.relu(self.fc1(out))
out = self.sigmoid(self.fc2(out))
_, sort = out.sort()
indices = sort[:,:self.actual] # batch, sort # shuffle
indices = indices[:, torch.randperm(indices.size(1))]
select = self.init.repeat(b,1)
select = torch.cat([select, self.arr[indices].view(b,-1)], 1)
select = select.view(select.size(0), -1, 1, 1).repeat(1,1,w,h)
x = x.gather(1, select)
return x
class _Bottleneck(nn.Sequential):
def __init__(self, num_input_features, growth_rate, count=1):
super(_Bottleneck, self).__init__()
self.norm1 = nn.BatchNorm2d(num_input_features)
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(num_input_features, 4 * growth_rate,
kernel_size=1, stride=1, bias=False)
self.norm2 = nn.BatchNorm2d(4 * growth_rate)
self.conv2 = nn.Conv2d(4 * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)
self.count = count
def forward(self, x):
if isinstance(x, Tensor):
x = [x]
out = torch.cat(x,1)
out = self.norm1(out)
out = self.relu(out)
out = self.conv1(out)
out = self.norm2(out)
out = self.relu(out)
out = self.conv2(out)
return out
class _Basic(nn.Sequential):
def __init__(self, num_input_features, growth_rate):
super(_Basic, self).__init__()
self.norm1 = nn.BatchNorm2d(num_input_features)
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(num_input_features, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)
self.count = count
def forward(self, x):
if isinstance(x, Tensor):
x = [x]
out = torch.cat(x,1)
out = self.norm1(out)
out = self.relu(out)
out = self.conv1(out)
return out
class _DenseLayer(nn.Module):
def __init__(self, num_input_features, growth_rate, num_layers, Block):
super(_DenseLayer, self).__init__()
self.num_layers = num_layers
self.init_block = Block(num_input_features, growth_rate)
for i in range(1, num_layers):
j = (i-1)//2 + 1
setattr(self, 'layer{}'.format(i), Block(num_input_features + growth_rate * j, growth_rate))
setattr(self, 'norm{}'.format(i), nn.BatchNorm2d(num_input_features + growth_rate * (i+1)))
setattr(self, 'gate{}'.format(i), _Gate_selection(num_input_features, growth_rate, i+1, reduction=4))
def forward(self, x):
out = self.init_block(x)
x = [x] + [out]
out = torch.cat(x,1)
for i in range(1, self.num_layers):
out = getattr(self, 'layer{}'.format(i))(out)
x += [out]
x_cat = torch.cat(x,1)
x_norm = getattr(self, 'norm{}'.format(i))(x_cat)
out = getattr(self, 'gate{}'.format(i))(x_cat, x_norm)
return x_cat
class _Transition(nn.Sequential):
def __init__(self, num_input_features, tr_features):
super(_Transition, self).__init__()
self.norm = nn.BatchNorm2d(tr_features)
self.relu = nn.ReLU(inplace=True)
self.conv = nn.Conv2d(tr_features, num_input_features // 2,
kernel_size=1, stride=1, bias=False)
self.pool = nn.AvgPool2d(kernel_size=2, stride=2)
def forward(self, x):
# out = torch.cat(x,1)
out = self.norm(x)
out = self.relu(out)
out = self.conv(out)
out = self.pool(out)
return out
class DenseNet(nn.Module):
def __init__(self, growth_rate=12,
num_init_features=24, num_classes=10, is_bottleneck=True, layer=28):
super(DenseNet, self).__init__()
if layer is 28:
block_config=[4,4,4]
elif layer is 40:
block_config=[6,6,6]
elif layer is 52:
block_config=[8,8,8]
elif layer is 64:
block_config=[10,10,10]
if is_bottleneck:
Block = _Bottleneck
else:
Block = _Basic
block_config = [2*x for x in block_config]
self.features = nn.Sequential()
self.features.add_module('conv0', nn.Conv2d(3, num_init_features, kernel_size=3, stride=1, padding=1, bias=False))
num_features = num_init_features
for i in range(len(block_config)):
self.features.add_module('layer%d' % (i + 1), _DenseLayer(num_features, growth_rate, block_config[i], Block))
tr_features = num_features + block_config[i] * growth_rate
num_features = num_features + block_config[i] * growth_rate // 2
if i != len(block_config) - 1:
self.features.add_module('transition%d' % (i + 1), _Transition(num_features, tr_features))
num_features = num_features // 2
# Final batch norm
self.norm = nn.BatchNorm2d(tr_features)
self.relu = nn.ReLU(inplace=True)
self.pool = nn.AvgPool2d(kernel_size=8, stride=1)
self.fc = nn.Linear(tr_features, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
# Linear layer
# Official init from torch repo.
def forward(self, x):
out = self.features(x)
# out = torch.cat(out,1)
out = self.norm(out)
out = self.relu(out)
out = self.pool(out)
out = out.view(out.size(0), -1)
out = self.fc(out)
return out
if __name__=='__main__':
x = torch.randn(4,3,32,32)
model = DenseNet(growth_rate=12, num_init_features=24, num_classes=10, is_bottleneck=True, layer=40)
y = model(x)
print(y.size()) | [
"[email protected]"
] | |
da39ff189fd2c0d2ba922949117085f9ce98e2fa | 85be450530138c8b66c513c4283bcb1d58caeeb0 | /apps/funcionarios/migrations/0005_funcionario_imagem.py | bc149c39e59bf25051a7e604642ca132a0e9a4c1 | [] | no_license | fgomesc/gestao_teste | 6be81a263fddb1b1e5d6a2d768387fc024e9bdc3 | b2890ffa99361dd30b002706c94d1e5299651315 | refs/heads/master | 2021-09-25T06:21:51.602878 | 2021-09-14T18:27:13 | 2021-09-14T18:27:13 | 236,030,673 | 0 | 0 | null | 2021-06-10T22:31:09 | 2020-01-24T15:42:59 | JavaScript | UTF-8 | Python | false | false | 446 | py | # Generated by Django 2.1.1 on 2018-11-17 12:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('funcionarios', '0004_auto_20181029_2313'),
]
operations = [
migrations.AddField(
model_name='funcionario',
name='imagem',
field=models.ImageField(default=1, upload_to='fotos'),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
041cf40053b8f029ba5b1f64754d2048cbb70f5e | 2af6a5c2d33e2046a1d25ae9dd66d349d3833940 | /res_bw/scripts/common/lib/idlelib/grepdialog.py | 05f4b74a7d37f75455c785428aa681b07d431a4b | [] | no_license | webiumsk/WOT-0.9.12-CT | e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2 | 2506e34bd6634ad500b6501f4ed4f04af3f43fa0 | refs/heads/master | 2021-01-10T01:38:38.080814 | 2015-11-11T00:08:04 | 2015-11-11T00:08:04 | 45,803,240 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 4,154 | py | # 2015.11.10 21:36:11 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/idlelib/GrepDialog.py
import os
import fnmatch
import sys
from Tkinter import *
from idlelib import SearchEngine
from idlelib.SearchDialogBase import SearchDialogBase
def grep(text, io = None, flist = None):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, '_grepdialog'):
engine._grepdialog = GrepDialog(root, engine, flist)
dialog = engine._grepdialog
searchphrase = text.get('sel.first', 'sel.last')
dialog.open(text, searchphrase, io)
class GrepDialog(SearchDialogBase):
title = 'Find in Files Dialog'
icon = 'Grep'
needwrapbutton = 0
def __init__(self, root, engine, flist):
SearchDialogBase.__init__(self, root, engine)
self.flist = flist
self.globvar = StringVar(root)
self.recvar = BooleanVar(root)
def open(self, text, searchphrase, io = None):
SearchDialogBase.open(self, text, searchphrase)
if io:
path = io.filename or ''
else:
path = ''
dir, base = os.path.split(path)
head, tail = os.path.splitext(base)
if not tail:
tail = '.py'
self.globvar.set(os.path.join(dir, '*' + tail))
def create_entries(self):
SearchDialogBase.create_entries(self)
self.globent = self.make_entry('In files:', self.globvar)
def create_other_buttons(self):
f = self.make_frame()
btn = Checkbutton(f, anchor='w', variable=self.recvar, text='Recurse down subdirectories')
btn.pack(side='top', fill='both')
btn.select()
def create_command_buttons(self):
SearchDialogBase.create_command_buttons(self)
self.make_button('Search Files', self.default_command, 1)
def default_command(self, event = None):
prog = self.engine.getprog()
if not prog:
return
path = self.globvar.get()
if not path:
self.top.bell()
return
from idlelib.OutputWindow import OutputWindow
save = sys.stdout
try:
sys.stdout = OutputWindow(self.flist)
self.grep_it(prog, path)
finally:
sys.stdout = save
def grep_it(self, prog, path):
dir, base = os.path.split(path)
list = self.findfiles(dir, base, self.recvar.get())
list.sort()
self.close()
pat = self.engine.getpat()
print 'Searching %r in %s ...' % (pat, path)
hits = 0
for fn in list:
try:
with open(fn) as f:
for lineno, line in enumerate(f, 1):
if line[-1:] == '\n':
line = line[:-1]
if prog.search(line):
sys.stdout.write('%s: %s: %s\n' % (fn, lineno, line))
hits += 1
except IOError as msg:
print msg
print 'Hits found: %s\n(Hint: right-click to open locations.)' % hits if hits else 'No hits.'
def findfiles(self, dir, base, rec):
try:
names = os.listdir(dir or os.curdir)
except os.error as msg:
print msg
return []
list = []
subdirs = []
for name in names:
fn = os.path.join(dir, name)
if os.path.isdir(fn):
subdirs.append(fn)
elif fnmatch.fnmatch(name, base):
list.append(fn)
if rec:
for subdir in subdirs:
list.extend(self.findfiles(subdir, base, rec))
return list
def close(self, event = None):
if self.top:
self.top.grab_release()
self.top.withdraw()
if __name__ == '__main__':
import unittest
unittest.main('idlelib.idle_test.test_grep', verbosity=2, exit=False)
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\idlelib\grepdialog.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:36:11 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
82792a3be9979e79865b11f08d068150204766e1 | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /tests/components/select/test_device_condition.py | 7c1dc443e5626cdb246bbc9a3f633cbd756d466c | [
"Apache-2.0"
] | permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 8,288 | py | """The tests for Select device conditions."""
from __future__ import annotations
import pytest
import voluptuous_serialize
from homeassistant.components import automation
from homeassistant.components.device_automation import DeviceAutomationType
from homeassistant.components.select import DOMAIN
from homeassistant.components.select.device_condition import (
async_get_condition_capabilities,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import (
config_validation as cv,
device_registry,
entity_registry,
)
from homeassistant.helpers.entity import EntityCategory
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass: HomeAssistant) -> device_registry.DeviceRegistry:
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass: HomeAssistant) -> entity_registry.EntityRegistry:
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass: HomeAssistant) -> list[ServiceCall]:
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(
hass: HomeAssistant,
device_reg: device_registry.DeviceRegistry,
entity_reg: entity_registry.EntityRegistry,
) -> None:
"""Test we get the expected conditions from a select."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "selected_option",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
"metadata": {"secondary": False},
}
]
conditions = await async_get_device_automations(
hass, DeviceAutomationType.CONDITION, device_entry.id
)
assert_lists_same(conditions, expected_conditions)
@pytest.mark.parametrize(
"hidden_by,entity_category",
(
(entity_registry.RegistryEntryHider.INTEGRATION, None),
(entity_registry.RegistryEntryHider.USER, None),
(None, EntityCategory.CONFIG),
(None, EntityCategory.DIAGNOSTIC),
),
)
async def test_get_conditions_hidden_auxiliary(
hass,
device_reg,
entity_reg,
hidden_by,
entity_category,
):
"""Test we get the expected conditions from a hidden or auxiliary entity."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
"5678",
device_id=device_entry.id,
entity_category=entity_category,
hidden_by=hidden_by,
)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition,
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
"metadata": {"secondary": True},
}
for condition in ["selected_option"]
]
conditions = await async_get_device_automations(
hass, DeviceAutomationType.CONDITION, device_entry.id
)
assert_lists_same(conditions, expected_conditions)
async def test_if_selected_option(
hass: HomeAssistant, calls: list[ServiceCall]
) -> None:
"""Test for selected_option conditions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "select.entity",
"type": "selected_option",
"option": "option1",
}
],
"action": {
"service": "test.automation",
"data": {
"result": "option1 - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "select.entity",
"type": "selected_option",
"option": "option2",
}
],
"action": {
"service": "test.automation",
"data": {
"result": "option2 - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
# Test with non existing entity
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(
"select.entity", "option1", {"options": ["option1", "option2"]}
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["result"] == "option1 - event - test_event1"
hass.states.async_set(
"select.entity", "option2", {"options": ["option1", "option2"]}
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["result"] == "option2 - event - test_event2"
async def test_get_condition_capabilities(hass: HomeAssistant) -> None:
"""Test we get the expected capabilities from a select condition."""
config = {
"platform": "device",
"domain": DOMAIN,
"type": "selected_option",
"entity_id": "select.test",
"option": "option1",
}
# Test when entity doesn't exists
capabilities = await async_get_condition_capabilities(hass, config)
assert capabilities
assert "extra_fields" in capabilities
assert voluptuous_serialize.convert(
capabilities["extra_fields"], custom_serializer=cv.custom_serializer
) == [
{
"name": "option",
"required": True,
"type": "select",
"options": [],
},
{
"name": "for",
"optional": True,
"type": "positive_time_period_dict",
},
]
# Mock an entity
hass.states.async_set("select.test", "option1", {"options": ["option1", "option2"]})
# Test if we get the right capabilities now
capabilities = await async_get_condition_capabilities(hass, config)
assert capabilities
assert "extra_fields" in capabilities
assert voluptuous_serialize.convert(
capabilities["extra_fields"], custom_serializer=cv.custom_serializer
) == [
{
"name": "option",
"required": True,
"type": "select",
"options": [("option1", "option1"), ("option2", "option2")],
},
{
"name": "for",
"optional": True,
"type": "positive_time_period_dict",
},
]
| [
"[email protected]"
] | |
8479fc36a34cd92829460ba09dac9233003f21e2 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_145/588.py | bc85913e20b14805e33519ef4c6568305d07637f | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,649 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import math
def read(f):
n = int(f.readline().strip())
for i in xrange(n):
p, q = map(int, f.readline().strip().split('/'))
yield p, q
def main(f):
for i, (p, q) in enumerate(read(f)):
if 2 ** int(math.log(q) / math.log(2)) != q:
print("Case #{0}: impossible".format(i+1))
else:
n = int(math.ceil((math.log(q) - math.log(p)) / math.log(2)))
print("Case #{0}: {1}".format(i+1, n))
_input = """
5
1/2
3/4
1/4
2/23
123/31488
""".strip()
_output = """
Case #1: 1
Case #2: 1
Case #3: 2
Case #4: impossible
Case #5: 8
""".strip()
def test_main(compare=False):
import sys
from difflib import unified_diff
from StringIO import StringIO
if compare:
stdout = sys.stdout
sys.stdout = StringIO()
try:
main(StringIO(_input))
result = sys.stdout.getvalue().strip()
finally:
sys.stdout = stdout
print(result)
for line in unified_diff(result.splitlines(), _output.splitlines(),
'Output', 'Expect', lineterm=''):
print(line)
if result == _output:
print("OK")
else:
print("NG")
else:
main(StringIO(_input))
if __name__ == '__main__':
test = False
compare = False
if test:
test_main(compare)
else:
import sys
if len(sys.argv) > 1:
f = open(sys.argv[1])
main(f)
f.close()
else:
main(sys.stdin)
| [
"[email protected]"
] | |
729aafbd622a90e8bebf023ef2424d3fcf61b70c | afea9757be324c8def68955a12be11d71ce6ad35 | /willyanealves/services/migrations/0014_auto_20201209_1623.py | aa5563d97e9d3dbc154b4da10bedc96ae1265e5e | [] | no_license | bergpb/willyane-alves | c713cac3ec3a68005f3b8145985693d2477ba706 | 8b2b9922ba35bf2043f2345228f03d80dbd01098 | refs/heads/master | 2023-02-10T19:57:50.893172 | 2021-01-11T16:17:14 | 2021-01-11T16:17:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # Generated by Django 3.1.2 on 2020-12-09 19:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('stock', '0001_initial'),
('services', '0013_remove_kititem_price'),
]
operations = [
migrations.AlterField(
model_name='kititem',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stockitem', to='stock.stock'),
),
]
| [
"[email protected]"
] | |
a4de72f9bc8c298600db4419ce1778b70f3c07b5 | 89dedd7f3c7acc81d12e2bcb2e716f9af9e5fa04 | /third_party/WebKit/Source/devtools/scripts/concatenate_application_code.py | e6984e04864e14767f6fd64ff23f1ddfb871c822 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft"
] | permissive | bino7/chromium | 8d26f84a1b6e38a73d1b97fea6057c634eff68cb | 4666a6bb6fdcb1114afecf77bdaa239d9787b752 | refs/heads/master | 2022-12-22T14:31:53.913081 | 2016-09-06T10:05:11 | 2016-09-06T10:05:11 | 67,410,510 | 1 | 3 | BSD-3-Clause | 2022-12-17T03:08:52 | 2016-09-05T10:11:59 | null | UTF-8 | Python | false | false | 9,961 | py | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Release:
- Concatenates autostart modules, application modules' module.json descriptors,
and the application loader into a single script.
- Builds app.html referencing the application script.
Debug:
- Copies the module directories into their destinations.
- Copies app.html as-is.
"""
from cStringIO import StringIO
from os import path
from os.path import join
from modular_build import read_file, write_file, bail_error
import copy
import modular_build
import os
import re
import shutil
import sys
try:
import simplejson as json
except ImportError:
import json
import rjsmin
def resource_source_url(url):
return '\n/*# sourceURL=' + url + ' */'
def minify_js(javascript):
return rjsmin.jsmin(javascript)
def concatenated_module_filename(module_name, output_dir):
return join(output_dir, module_name + '/' + module_name + '_module.js')
def symlink_or_copy_file(src, dest, safe=False):
if safe and path.exists(dest):
os.remove(dest)
if hasattr(os, 'symlink'):
os.symlink(src, dest)
else:
shutil.copy(src, dest)
def symlink_or_copy_dir(src, dest):
if path.exists(dest):
shutil.rmtree(dest)
for src_dir, dirs, files in os.walk(src):
subpath = path.relpath(src_dir, src)
dest_dir = path.normpath(join(dest, subpath))
os.mkdir(dest_dir)
for name in files:
src_name = join(os.getcwd(), src_dir, name)
dest_name = join(dest_dir, name)
symlink_or_copy_file(src_name, dest_name)
class AppBuilder:
def __init__(self, application_name, descriptors, application_dir, output_dir):
self.application_name = application_name
self.descriptors = descriptors
self.application_dir = application_dir
self.output_dir = output_dir
def app_file(self, extension):
return self.application_name + '.' + extension
def core_resource_names(self):
result = []
for module in self.descriptors.sorted_modules():
if self.descriptors.application[module].get('type') != 'autostart':
continue
resources = self.descriptors.modules[module].get('resources')
if not resources:
continue
for resource_name in resources:
result.append(path.join(module, resource_name))
return result
# Outputs:
# <app_name>.html
# <app_name>.js
# <module_name>_module.js
class ReleaseBuilder(AppBuilder):
def __init__(self, application_name, descriptors, application_dir, output_dir):
AppBuilder.__init__(self, application_name, descriptors, application_dir, output_dir)
def build_app(self):
if self.descriptors.has_html:
self._build_html()
self._build_app_script()
for module in filter(lambda desc: (not desc.get('type') or desc.get('type') == 'remote'), self.descriptors.application.values()):
self._concatenate_dynamic_module(module['name'])
def _build_html(self):
html_name = self.app_file('html')
output = StringIO()
with open(join(self.application_dir, html_name), 'r') as app_input_html:
for line in app_input_html:
if '<script ' in line or '<link ' in line:
continue
if '</head>' in line:
output.write(self._generate_include_tag(self.app_file('js')))
output.write(line)
write_file(join(self.output_dir, html_name), output.getvalue())
output.close()
def _build_app_script(self):
script_name = self.app_file('js')
output = StringIO()
self._concatenate_application_script(output)
write_file(join(self.output_dir, script_name), minify_js(output.getvalue()))
output.close()
def _generate_include_tag(self, resource_path):
if (resource_path.endswith('.js')):
return ' <script type="text/javascript" src="%s"></script>\n' % resource_path
else:
assert resource_path
def _release_module_descriptors(self):
module_descriptors = self.descriptors.modules
result = []
for name in module_descriptors:
module = copy.copy(module_descriptors[name])
module_type = self.descriptors.application[name].get('type')
# Clear scripts, as they are not used at runtime
# (only the fact of their presence is important).
resources = module.get('resources', None)
if module.get('scripts') or resources:
if module_type == 'autostart':
# Autostart modules are already baked in.
del module['scripts']
else:
# Non-autostart modules are vulcanized.
module['scripts'] = [name + '_module.js']
# Resources are already baked into scripts.
if resources is not None:
del module['resources']
result.append(module)
return json.dumps(result)
def _write_module_resources(self, resource_names, output):
for resource_name in resource_names:
resource_name = path.normpath(resource_name).replace('\\', '/')
output.write('Runtime.cachedResources["%s"] = "' % resource_name)
resource_content = read_file(path.join(self.application_dir, resource_name)) + resource_source_url(resource_name)
resource_content = resource_content.replace('\\', '\\\\')
resource_content = resource_content.replace('\n', '\\n')
resource_content = resource_content.replace('"', '\\"')
output.write(resource_content)
output.write('";\n')
def _concatenate_autostart_modules(self, output):
non_autostart = set()
sorted_module_names = self.descriptors.sorted_modules()
for name in sorted_module_names:
desc = self.descriptors.modules[name]
name = desc['name']
type = self.descriptors.application[name].get('type')
if type == 'autostart':
deps = set(desc.get('dependencies', []))
non_autostart_deps = deps & non_autostart
if len(non_autostart_deps):
bail_error('Non-autostart dependencies specified for the autostarted module "%s": %s' % (name, non_autostart_deps))
output.write('\n/* Module %s */\n' % name)
modular_build.concatenate_scripts(desc.get('scripts'), join(self.application_dir, name), self.output_dir, output)
else:
non_autostart.add(name)
def _concatenate_application_script(self, output):
runtime_contents = read_file(join(self.application_dir, 'Runtime.js'))
runtime_contents = re.sub('var allDescriptors = \[\];', 'var allDescriptors = %s;' % self._release_module_descriptors().replace('\\', '\\\\'), runtime_contents, 1)
output.write('/* Runtime.js */\n')
output.write(runtime_contents)
output.write('\n/* Autostart modules */\n')
self._concatenate_autostart_modules(output)
output.write('/* Application descriptor %s */\n' % self.app_file('json'))
output.write('applicationDescriptor = ')
output.write(self.descriptors.application_json())
output.write(';\n/* Core resources */\n')
self._write_module_resources(self.core_resource_names(), output)
output.write('\n/* Application loader */\n')
output.write(read_file(join(self.application_dir, self.app_file('js'))))
def _concatenate_dynamic_module(self, module_name):
module = self.descriptors.modules[module_name]
scripts = module.get('scripts')
resources = self.descriptors.module_resources(module_name)
module_dir = join(self.application_dir, module_name)
output = StringIO()
if scripts:
modular_build.concatenate_scripts(scripts, module_dir, self.output_dir, output)
if resources:
self._write_module_resources(resources, output)
output_file_path = concatenated_module_filename(module_name, self.output_dir)
write_file(output_file_path, minify_js(output.getvalue()))
output.close()
# Outputs:
# <app_name>.html as-is
# <app_name>.js as-is
# <module_name>/<all_files>
class DebugBuilder(AppBuilder):
def __init__(self, application_name, descriptors, application_dir, output_dir):
AppBuilder.__init__(self, application_name, descriptors, application_dir, output_dir)
def build_app(self):
if self.descriptors.has_html:
self._build_html()
js_name = self.app_file('js')
src_name = join(os.getcwd(), self.application_dir, js_name)
symlink_or_copy_file(src_name, join(self.output_dir, js_name), True)
for module_name in self.descriptors.modules:
module = self.descriptors.modules[module_name]
input_module_dir = join(self.application_dir, module_name)
output_module_dir = join(self.output_dir, module_name)
symlink_or_copy_dir(input_module_dir, output_module_dir)
def _build_html(self):
html_name = self.app_file('html')
symlink_or_copy_file(join(os.getcwd(), self.application_dir, html_name), join(self.output_dir, html_name), True)
def build_application(application_name, loader, application_dir, output_dir, release_mode):
descriptors = loader.load_application(application_name + '.json')
if release_mode:
builder = ReleaseBuilder(application_name, descriptors, application_dir, output_dir)
else:
builder = DebugBuilder(application_name, descriptors, application_dir, output_dir)
builder.build_app()
| [
"[email protected]"
] | |
c91563eee6c60960746a34671256bdc380a91e08 | af3ec207381de315f4cb6dddba727d16d42d6c57 | /dialogue-engine/test/programytest/storage/stores/nosql/mongo/store/test_sets.py | b4a1ce00829727f91194650b0127c7d2bb059299 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mcf-yuichi/cotoba-agent-oss | 02a5554fe81ce21517f33229101013b6487f5404 | ce60833915f484c4cbdc54b4b8222d64be4b6c0d | refs/heads/master | 2023-01-12T20:07:34.364188 | 2020-11-11T00:55:16 | 2020-11-11T00:55:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,711 | py | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
from programytest.storage.asserts.store.assert_sets import SetStoreAsserts
from programy.storage.stores.nosql.mongo.store.sets import MongoSetsStore
from programy.storage.stores.nosql.mongo.engine import MongoStorageEngine
from programy.storage.stores.nosql.mongo.config import MongoStorageConfiguration
import programytest.storage.engines as Engines
class MongoSetsStoreTests(SetStoreAsserts):
@unittest.skipIf(Engines.mongo is False, Engines.mongo_disabled)
def test_initialise(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assertEqual(store.storage_engine, engine)
@unittest.skipIf(Engines.mongo is False, Engines.mongo_disabled)
def test_set_storage(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_set_storage(store)
@unittest.skipIf(Engines.mongo is False, Engines.mongo_disabled)
def test_upload_from_text(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_upload_from_text(store)
@unittest.skipIf(Engines.mongo is False, Engines.mongo_disabled)
def test_upload_from_text_file(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_upload_from_text_file(store)
@unittest.skipIf(Engines.mongo is False, Engines.mongo_disabled)
def test_upload_text_files_from_directory_no_subdir(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_upload_text_files_from_directory_no_subdir(store)
@unittest.skip("CSV not supported yet")
def test_upload_from_csv_file(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_upload_from_csv_file(store)
@unittest.skip("CSV not supported yet")
def test_upload_csv_files_from_directory_with_subdir(self):
config = MongoStorageConfiguration()
engine = MongoStorageEngine(config)
engine.initialise()
store = MongoSetsStore(engine)
self.assert_upload_csv_files_from_directory_with_subdir(store)
| [
"[email protected]"
] | |
3aea4843be237c4dcdce35ea871082ef159c6872 | b9029f7e08bb93c435290e9e01dba3507714bafc | /tasks.py | a64b8ddab455bd356781035556f67836cb43532a | [
"BSD-3-Clause"
] | permissive | njwardhan/colour | 3a4bf7994e25f02e15aa16bc03d35d7f6cc61a50 | 60679360c3990bc549b5f947bfeb621383e18b5e | refs/heads/master | 2022-09-29T06:17:36.380542 | 2020-01-25T05:10:15 | 2020-01-25T05:10:15 | 253,715,920 | 0 | 0 | null | 2020-04-07T07:14:32 | 2020-04-07T07:14:31 | null | UTF-8 | Python | false | false | 13,629 | py | # -*- coding: utf-8 -*-
"""
Invoke - Tasks
==============
"""
from __future__ import unicode_literals
import sys
try:
import biblib.bib
except ImportError:
pass
import fnmatch
import os
import re
import toml
import uuid
from invoke import task
import colour
from colour.utilities import message_box
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '[email protected]'
__status__ = 'Production'
__all__ = [
'APPLICATION_NAME', 'APPLICATION_VERSION', 'PYTHON_PACKAGE_NAME',
'PYPI_PACKAGE_NAME', 'BIBLIOGRAPHY_NAME', 'clean', 'formatting', 'tests',
'quality', 'examples', 'preflight', 'docs', 'todo', 'requirements',
'build', 'virtualise', 'tag', 'release', 'sha256'
]
APPLICATION_NAME = colour.__application_name__
APPLICATION_VERSION = colour.__version__
PYTHON_PACKAGE_NAME = colour.__name__
PYPI_PACKAGE_NAME = 'colour-science'
BIBLIOGRAPHY_NAME = 'BIBLIOGRAPHY.bib'
@task
def clean(ctx, docs=True, bytecode=False):
"""
Cleans the project.
Parameters
----------
ctx : invoke.context.Context
Context.
docs : bool, optional
Whether to clean the *docs* directory.
bytecode : bool, optional
Whether to clean the bytecode files, e.g. *.pyc* files.
Returns
-------
bool
Task success.
"""
message_box('Cleaning project...')
patterns = ['build', '*.egg-info', 'dist']
if docs:
patterns.append('docs/_build')
patterns.append('docs/generated')
if bytecode:
patterns.append('**/*.pyc')
for pattern in patterns:
ctx.run("rm -rf {}".format(pattern))
@task
def formatting(ctx, yapf=False, asciify=True, bibtex=True):
"""
Formats the codebase with *Yapf*, converts unicode characters to ASCII and
cleanup the "BibTeX" file.
Parameters
----------
ctx : invoke.context.Context
Context.
yapf : bool, optional
Whether to format the codebase with *Yapf*.
asciify : bool, optional
Whether to convert unicode characters to ASCII.
bibtex : bool, optional
Whether to cleanup the *BibTeX* file.
Returns
-------
bool
Task success.
"""
if yapf:
message_box('Formatting codebase with "Yapf"...')
ctx.run('yapf -p -i -r --exclude \'.git\' .')
if asciify:
message_box('Converting unicode characters to ASCII...')
with ctx.cd('utilities'):
ctx.run('./unicode_to_ascii.py')
if bibtex and sys.version_info[:2] >= (3, 2):
message_box('Cleaning up "BibTeX" file...')
bibtex_path = BIBLIOGRAPHY_NAME
with open(bibtex_path) as bibtex_file:
bibtex = biblib.bib.Parser().parse(
bibtex_file.read()).get_entries()
for entry in sorted(bibtex.values(), key=lambda x: x.key):
try:
del entry['file']
except KeyError:
pass
for key, value in entry.items():
entry[key] = re.sub('(?<!\\\\)\\&', '\\&', value)
with open(bibtex_path, 'w') as bibtex_file:
for entry in bibtex.values():
bibtex_file.write(entry.to_bib())
bibtex_file.write('\n')
@task
def tests(ctx, nose=True):
"""
Runs the unit tests with *Nose* or *Pytest*.
Parameters
----------
ctx : invoke.context.Context
Context.
nose : bool, optional
Whether to use *Nose* or *Pytest*.
Returns
-------
bool
Task success.
"""
if nose:
message_box('Running "Nosetests"...')
ctx.run(
'nosetests --with-doctest --with-coverage --cover-package={0} {0}'.
format(PYTHON_PACKAGE_NAME),
env={'MPLBACKEND': 'AGG'})
else:
message_box('Running "Pytest"...')
ctx.run(
'py.test --disable-warnings --doctest-modules '
'--ignore={0}/examples {0}'.format(PYTHON_PACKAGE_NAME),
env={'MPLBACKEND': 'AGG'})
@task
def quality(ctx, flake8=True, rstlint=True):
"""
Checks the codebase with *Flake8* and lints various *restructuredText*
files with *rst-lint*.
Parameters
----------
ctx : invoke.context.Context
Context.
flake8 : bool, optional
Whether to check the codebase with *Flake8*.
rstlint : bool, optional
Whether to lint various *restructuredText* files with *rst-lint*.
Returns
-------
bool
Task success.
"""
if flake8:
message_box('Checking codebase with "Flake8"...')
ctx.run('flake8 {0} --exclude=examples'.format(PYTHON_PACKAGE_NAME))
if rstlint:
message_box('Linting "README.rst" file...')
ctx.run('rst-lint README.rst')
@task
def examples(ctx, plots=False):
"""
Runs the examples.
Parameters
----------
ctx : invoke.context.Context
Context.
plots : bool, optional
Whether to skip or only run the plotting examples: This a mutually
exclusive switch.
Returns
-------
bool
Task success.
"""
message_box('Running examples...')
for root, _dirnames, filenames in os.walk(
os.path.join(PYTHON_PACKAGE_NAME, 'examples')):
for filename in fnmatch.filter(filenames, '*.py'):
if not plots and ('plotting' in root or
'examples_interpolation' in filename or
'examples_contrast' in filename):
continue
if plots and ('plotting' not in root and
'examples_interpolation' not in filename and
'examples_contrast' not in filename):
continue
ctx.run('python {0}'.format(os.path.join(root, filename)))
@task(formatting, tests, quality, examples)
def preflight(ctx):
"""
Performs the preflight tasks, i.e. *formatting*, *tests*, *quality*, and
*examples*.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Finishing "Preflight"...')
@task
def docs(ctx, plots=True, html=True, pdf=True):
"""
Builds the documentation.
Parameters
----------
ctx : invoke.context.Context
Context.
plots : bool, optional
Whether to generate the documentation plots.
html : bool, optional
Whether to build the *HTML* documentation.
pdf : bool, optional
Whether to build the *PDF* documentation.
Returns
-------
bool
Task success.
"""
if plots:
with ctx.cd('utilities'):
message_box('Generating plots...')
ctx.run('./generate_plots.py')
with ctx.prefix('export COLOUR_SCIENCE_DOCUMENTATION_BUILD=True'):
with ctx.cd('docs'):
if html:
message_box('Building "HTML" documentation...')
ctx.run('make html')
if pdf:
message_box('Building "PDF" documentation...')
ctx.run('make latexpdf')
@task
def todo(ctx):
"""
Export the TODO items.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Exporting "TODO" items...')
with ctx.cd('utilities'):
ctx.run('./export_todo.py')
@task
def requirements(ctx):
"""
Export the *requirements.txt* file.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Exporting "requirements.txt" file...')
ctx.run('poetry run pip freeze | '
'egrep -v "github.com/colour-science|enum34" '
'> requirements.txt')
@task(clean, preflight, docs, todo, requirements)
def build(ctx):
"""
Builds the project and runs dependency tasks, i.e. *docs*, *todo*, and
*preflight*.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Building...')
pyproject_content = toml.load('pyproject.toml')
pyproject_content['tool']['poetry']['name'] = PYPI_PACKAGE_NAME
pyproject_content['tool']['poetry']['packages'] = [{
'include': PYTHON_PACKAGE_NAME,
'from': '.'
}]
with open('pyproject.toml', 'w') as pyproject_file:
toml.dump(pyproject_content, pyproject_file)
ctx.run('poetry build')
ctx.run('git checkout -- pyproject.toml')
with ctx.cd('dist'):
ctx.run('tar -xvf {0}-{1}.tar.gz'.format(PYPI_PACKAGE_NAME,
APPLICATION_VERSION))
ctx.run('cp {0}-{1}/setup.py ../'.format(PYPI_PACKAGE_NAME,
APPLICATION_VERSION))
ctx.run('rm -rf {0}-{1}'.format(PYPI_PACKAGE_NAME,
APPLICATION_VERSION))
with open('setup.py') as setup_file:
source = setup_file.read()
setup_kwargs = []
def sub_callable(match):
setup_kwargs.append(match)
return ''
template = """
setup({0}
)
"""
source = re.sub(
'setup_kwargs = {(.*)}.*setup\\(\\*\\*setup_kwargs\\)',
sub_callable,
source,
flags=re.DOTALL)[:-2]
setup_kwargs = setup_kwargs[0].group(1).splitlines()
for i, line in enumerate(setup_kwargs):
setup_kwargs[i] = re.sub('^\\s*(\'(\\w+)\':\\s?)', ' \\2=', line)
if setup_kwargs[i].strip().startswith('long_description'):
setup_kwargs[i] = (
' long_description=open(\'README.rst\').read(),')
source += template.format('\n'.join(setup_kwargs))
with open('setup.py', 'w') as setup_file:
setup_file.write(source)
@task
def virtualise(ctx, tests=True):
"""
Create a virtual environment for the project build.
Parameters
----------
ctx : invoke.context.Context
Context.
tests : bool, optional
Whether to run tests on the virtual environment.
Returns
-------
bool
Task success.
"""
unique_name = '{0}-{1}'.format(PYPI_PACKAGE_NAME, uuid.uuid1())
with ctx.cd('dist'):
ctx.run('tar -xvf {0}-{1}.tar.gz'.format(PYPI_PACKAGE_NAME,
APPLICATION_VERSION))
ctx.run('mv {0}-{1} {2}'.format(PYPI_PACKAGE_NAME, APPLICATION_VERSION,
unique_name))
with ctx.cd(unique_name):
ctx.run('poetry env use 3')
ctx.run('poetry install --extras "optional plotting"')
ctx.run('source $(poetry env info -p)/bin/activate')
ctx.run('python -c "import imageio;'
'imageio.plugins.freeimage.download()"')
if tests:
ctx.run('poetry run nosetests', env={'MPLBACKEND': 'AGG'})
@task
def tag(ctx):
"""
Tags the repository according to defined version using *git-flow*.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Tagging...')
result = ctx.run('git rev-parse --abbrev-ref HEAD', hide='both')
assert result.stdout.strip() == 'develop', (
'Are you still on a feature or master branch?')
with open(os.path.join(PYTHON_PACKAGE_NAME, '__init__.py')) as file_handle:
file_content = file_handle.read()
major_version = re.search("__major_version__\\s+=\\s+'(.*)'",
file_content).group(1)
minor_version = re.search("__minor_version__\\s+=\\s+'(.*)'",
file_content).group(1)
change_version = re.search("__change_version__\\s+=\\s+'(.*)'",
file_content).group(1)
version = '.'.join((major_version, minor_version, change_version))
result = ctx.run('git ls-remote --tags upstream', hide='both')
remote_tags = result.stdout.strip().split('\n')
tags = set()
for remote_tag in remote_tags:
tags.add(
remote_tag.split('refs/tags/')[1].replace('refs/tags/', '^{}'))
tags = sorted(list(tags))
assert 'v{0}'.format(version) not in tags, (
'A "{0}" "v{1}" tag already exists in remote repository!'.format(
PYTHON_PACKAGE_NAME, version))
ctx.run('git flow release start v{0}'.format(version))
ctx.run('git flow release finish v{0}'.format(version))
@task(clean, build)
def release(ctx):
"""
Releases the project to *Pypi* with *Twine*.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Releasing...')
with ctx.cd('dist'):
ctx.run('twine upload *.tar.gz')
ctx.run('twine upload *.whl')
@task
def sha256(ctx):
"""
Computes the project *Pypi* package *sha256* with *OpenSSL*.
Parameters
----------
ctx : invoke.context.Context
Context.
Returns
-------
bool
Task success.
"""
message_box('Computing "sha256"...')
with ctx.cd('dist'):
ctx.run('openssl sha256 {0}-*.tar.gz'.format(PYPI_PACKAGE_NAME))
| [
"[email protected]"
] | |
dcd0da39888cc54780f3269f3b421d663fbe0369 | 12d0f444452d3b2218cd270756283a0463d3e796 | /sg/models/genome_evaluator.py | ebfcee9c68636525d62cd1370f29350bfbce32e0 | [] | no_license | dal3006/load_forecasting-1 | 107ffdbb4648989ba85fa8ba39ecdddb9c24ddd1 | d324a711a1a0c7ccd9587e0ecf9988a12214a1a3 | refs/heads/master | 2023-03-17T07:44:43.487863 | 2015-03-12T15:24:37 | 2015-03-12T15:24:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,873 | py | """Use this program to evaluate one genome at a time, read from standard
input."""
import sys
import ast
import traceback
import random
import matplotlib.pyplot as plt
import sg.utils.pyevolve_utils as pu
import sg.utils
import ga
import sg.data.sintef.userloads as ul
import load_prediction as lp
from load_prediction_ar import *
from load_prediction_ar24 import *
from load_prediction_arima import *
from load_prediction_dshw import *
from load_prediction_esn import *
from load_prediction_esn24 import *
try:
from load_prediction_CBR import *
from load_prediction_wavelet import *
from load_prediction_wavelet24 import *
except ImportError:
print >>sys.stderr, "Genome evaluator can't import CBR/wavelet modules, probably some of the dependencies are not installed."
options = None
def get_options():
global options
parser = lp.prediction_options()
parser = lp.ga_options(parser)
parser = lp.data_options(parser)
parser.add_option("--model", dest="model", help="The model class that the genomes instantiate", default=None)
parser.add_option("--test-set", dest="test_set", action="store_true",
help="Test the genomes on the test set, rather than on the training set", default=False)
parser.add_option("--plot", dest="plot", action="store_true",
help="Make a plot (in combination with --test-set)", default=False)
(options, args) = parser.parse_args()
lp.options = options
if options.model is None:
print >>sys.stderr, "Model argument is required."
sys.exit(1)
def read_next_genome_list():
print "Enter genome to be evaluated: "
line = sys.stdin.readline()
if line == "":
print "End of input, exiting."
sys.exit(0)
return ast.literal_eval(line)
def next_indiv():
gl = read_next_genome_list()
genome = pu.AllelesGenome()
genome.setInternalList(gl)
genome.setParams(num_trials=options.num_trials)
return genome
def gene_test_loop(model):
while sys.stdin:
ga._model = model
indiv = next_indiv()
if options.test_set:
print "Evaluating genome on test set: ", indiv[:]
sys.stdout.flush()
try:
(target, predictions) = lp.parallel_test_genome(indiv, model) if options.parallel else lp.test_genome(indiv, model)
except Exception, e:
print >>sys.stderr, "Exception raised, failed to evaluate genome."
tb = " " + traceback.format_exc(limit=50)[:-1]
print >>sys.stderr, tb.replace("\n", "\n ")
continue
error = sg.utils.concat_and_calc_error(predictions, target, model.error_func)
print "Error on test phase: {}".format(error)
if options.plot:
sg.utils.plot_target_predictions(target, predictions)
plt.show()
else:
print "Evaluating genome on training set: ", indiv[:]
sys.stdout.flush()
fitness = ga._fitness(indiv)
print "Fitness:", fitness
if fitness != 0:
print "Error:", ga._fitness_to_error(fitness)
else:
print "Error not calculated for 0 fitness."
def run():
"""."""
get_options()
prev_handler = np.seterrcall(lp.float_err_handler)
prev_err = np.seterr(all='call')
np.seterr(under='ignore')
random.seed(options.seed)
np.random.seed(options.seed)
model_creator = eval(options.model + "(options)")
model = model_creator.get_model()
lp._print_sim_context(model._dataset)
print "Number of training sequences: %d" % options.num_trials
print "Start days of training sequences:", model._dataset.train_periods_desc
gene_test_loop(model)
ul.tempfeeder_exp().close()
if __name__ == "__main__":
run()
| [
"[email protected]"
] | |
7f370a2f39867e89d89ab28e23fdbd1bf78c5c33 | affb8d9028f52201dc56dff947502134dcac3066 | /class-06/demo/big_O.py | a4cb31e5067e800c86925b9dfb3be4fe661ec627 | [] | no_license | maisjamil1/amman-python-401d1 | 10aa4d81c9082fbdf18badc3de060ce1d5309e1a | 25c37a5a7c023b5a24ba7a6cc303338b62548f83 | refs/heads/master | 2022-12-28T19:23:11.143932 | 2020-10-13T11:58:30 | 2020-10-13T11:58:30 | 287,927,879 | 0 | 0 | null | 2020-08-16T11:11:27 | 2020-08-16T11:11:27 | null | UTF-8 | Python | false | false | 2,410 | py | # Measure # of operations
n = 7 #1 operation
for i in range(n):
print(i) # n operations
# n+1 operations
# n = 5 > 6
# n = 100 > 101
# n = 1000000 > 1000001
# O(n+1)
# O(n)
def testing_bigoh(n):
for i in range(n):
for j in range(n):
print(i,j) # n*n (n^2)
# testing_bigoh(8)
# O(n^2)
nums1 = [2, 5, 8, 9, 43, 7]
nums2 = [-4, 43, 7, 8, 13, 45]
# One Loop
# Return a list of all items bigger than number in unsorted list
def find_nums_above(nums_list, number):
result = [] # 1 operation
for num in nums_list: # n times
if num > number:
result.append(num) # 1 operation -- 1 extra space
elif num < number:
print("Less")
else:
print("Else")
print("Done with current iteration") # 1 operation
return result # 1 operation
print(find_nums_above(nums1, 10))
# O(2*n+1+1) => O(2n+2)
# O(n)
# O(n) spaces
def find_nums_above_loop_inside(nums_list, number):
result = [] # 1 operation
for num in nums_list: # n times
if num > number:
result.append(num) # 1 operation
elif num < number:
print("Less") # 1 op
for j in range(len(nums_list)): # n times
print("Just for fun") # 1 op
else:
print("Else") # 1 op
print("Done with current iteration") # 1 operation
return result # 1 operation
# O(1 + n (1+ (1 or 1+n or 1) ) + 1)
# O(1 + n (1+ 1+n) + 1)
# O(1 + n(2+n) +1)
# O(2 + 2n^2)
# O(2n^2)
# O(n^2)
print(find_nums_above_loop_inside(nums1, 10))
def tricky_example(a):
print("Hi") # 1 op
print (3*4*6/2) # 1 op
a.sort() # Hidden loop (n*log(n)) -- Merge sort
print(a) # 1 op
print("The end") # 1 op
# O(4 + sort-big-oh)
# O(sort-big-oh)
a = [4,7,2,9,5,0,3]
# Binary Search
# O(log n)
# We divide the array into two halfes and we elimate one of them
sorted_list = [-1, 4, 6, 9, 23, 30, 45, 65, 76, 77, 90]
def binary_search(sorted_nums, target):
min = 0 # 1 space
max = len(sorted_nums)-1 # 1 space
while max>min:
pivot = (max+min)//2 # 1 space
print(max, min, pivot)
if target == sorted_nums[pivot]:
return pivot
elif target < sorted_nums[pivot]:
max = pivot-1
else:
min = pivot+1
return -1
print(binary_search(sorted_list, -1))
# O(3) spaces
# O(1)
# O(3*log n ) spaces
# O(log n)
def fib(i):
# base cases
return fib(i-1) + fib(i-2)
# fib(4) = fib(3) + fib(2)
# We recreate i variable in every recursive call
| [
"[email protected]"
] | |
0a32d2b6c410aca949535c18a0afdc1811fa82de | d77cee829ec56d2ef12446bf1ebc75cf3a1d8de8 | /src/confluence/urls.py | 11ca30b6e7eba5d7d393b109c004ba297c8ac408 | [
"MIT"
] | permissive | thisisayush/Confluence | 6a508fdd96aebf38a9d063760fed7709c1a968f5 | a7e7b3b4d45ae9577f44d112c7383e4e101f3dd6 | refs/heads/master | 2021-04-15T08:02:05.097647 | 2017-03-02T19:15:49 | 2017-03-02T19:15:49 | 94,565,851 | 0 | 0 | null | 2017-06-16T17:15:55 | 2017-06-16T17:15:55 | null | UTF-8 | Python | false | false | 946 | py | """confluence URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
]
| [
"[email protected]"
] | |
0ff0703817449a164cc4148e5e772d7aad82761d | 20a0bd0a9675f52d4cbd100ee52f0f639fb552ef | /transit_odp/data_quality/migrations/0010_auto_20191118_1604.py | 1dbd2499c70b6991917a996f3979d7d53de8b877 | [] | no_license | yx20och/bods | 2f7d70057ee9f21565df106ef28dc2c4687dfdc9 | 4e147829500a85dd1822e94a375f24e304f67a98 | refs/heads/main | 2023-08-02T21:23:06.066134 | 2021-10-06T16:49:43 | 2021-10-06T16:49:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,602 | py | # Generated by Django 2.2.7 on 2019-11-18 16:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("data_quality", "0009_auto_20191118_1029"),
]
operations = [
migrations.RemoveField(
model_name="service",
name="report",
),
migrations.AddField(
model_name="service",
name="ito_id",
field=models.TextField(default=None, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name="service",
name="reports",
field=models.ManyToManyField(
related_name="services", to="data_quality.DataQualityReport"
),
),
migrations.AddField(
model_name="servicelink",
name="ito_id",
field=models.TextField(default=None, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name="servicepattern",
name="ito_id",
field=models.TextField(default=None, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name="timingpattern",
name="ito_id",
field=models.TextField(default=None, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name="vehiclejourney",
name="ito_id",
field=models.TextField(default=None, unique=True),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
fce283892ba59dcf2ba42e224830b42612d88aa5 | ec3e9925af8742d578fd11aac6f000ced71aa9f5 | /crm_app/migrations/0001_initial.py | a8d2064e20aeff0443aad84487887d739acbfa32 | [] | no_license | amrit-kumar/CRM-Customer-relationship-management- | cfd3ec42a975e7b987d76abe465cb2ec9eec62b4 | d41b482166557e17825b2a010d24bb03ee469245 | refs/heads/master | 2021-06-25T06:37:51.721771 | 2017-08-12T09:43:23 | 2017-08-12T09:43:23 | 96,964,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-01-17 10:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MsgReports',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('request_id', models.CharField(blank=True, max_length=250, null=True)),
('user_id', models.CharField(blank=True, max_length=250, null=True)),
('date', models.DateTimeField(blank=True, null=True)),
('discription', models.CharField(blank=True, max_length=250, null=True)),
('number', models.BigIntegerField(blank=True, null=True)),
('sender_id', models.CharField(blank=True, max_length=250, null=True)),
('campaign_name', models.CharField(blank=True, max_length=250, null=True)),
('status', models.CharField(blank=True, choices=[('1', '1'), ('2', '2'), ('3', '3')], max_length=250, null=True)),
],
),
]
| [
"[email protected]"
] | |
22cd4aa937ae8cfd23745a3259f156cd50b64a4e | cb3583cc1322d38b1ee05cb1c081e0867ddb2220 | /donor/migrations/0014_auto_20210331_0404.py | b1189bdce3ff86f5f1436a2a55ec393aa74d80f9 | [
"MIT"
] | permissive | iamgaddiel/codeupblood | 9e897ff23dedf5299cb59fd6c44d9bd8a645e9c6 | a0aa1725e5776d80e083b6d4e9e67476bb97e983 | refs/heads/main | 2023-05-07T23:34:27.475043 | 2021-04-24T20:49:08 | 2021-04-24T20:49:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # Generated by Django 3.1.6 on 2021-03-31 11:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('donor', '0013_auto_20210330_0743'),
]
operations = [
migrations.AlterField(
model_name='appointment',
name='d_id',
field=models.CharField(default='oiapGX', max_length=50),
),
]
| [
"[email protected]"
] | |
f427c925290c5a2a81db95be3c0f18e6c3e33066 | dccd1058e723b6617148824dc0243dbec4c9bd48 | /atcoder/abc048/a.py | 2a32b441150b9a7e79505fe4330cbbf200516869 | [] | no_license | imulan/procon | 488e49de3bcbab36c624290cf9e370abfc8735bf | 2a86f47614fe0c34e403ffb35108705522785092 | refs/heads/master | 2021-05-22T09:24:19.691191 | 2021-01-02T14:27:13 | 2021-01-02T14:27:13 | 46,834,567 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | for s in input().split():
print(s[0],end="")
print()
| [
"[email protected]"
] | |
8ab81a05046b4fbe1d20f70062f9411fee994e8d | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_to_M_Gk3_no_pad/pyr_Tcrop255_pad20_jit15/Sob_k17_s001/pyr_4s/L4/step10_a.py | 75773149c2e2458db22e88582b00384156b134b7 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41,921 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_4side_L4 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_MC
use_loss_obj = [G_sobel_k17_loss_info_builder.set_loss_target("UNet_Mask").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
# 1 3 6 10 15 21 28 36 45 55
# side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
ch032_1side_2__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
ch032_1side_3__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
ch032_1side_4__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
ch032_1side_5__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] | |
d1a50b99473a4235042bb673ae4d5648722d7914 | 720dcd12b8fb7ab26125317a6f3d00c2623e5f13 | /chatbotQuery/__init__.py | fe8fcde48e539b7f3222f7e172a5b2d88236c54b | [
"MIT"
] | permissive | tgquintela/chatbot_query | 78e6f21268e06572009295c271c277ef89f2dcbc | 4c5160992a444f828da019ae57a802467a13c2fa | refs/heads/master | 2021-01-01T18:00:46.261089 | 2017-10-13T18:03:32 | 2017-10-13T18:03:32 | 98,224,976 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,896 | py |
"""
TODO
----
Decorator for message collections
"""
import copy
class ChatbotMessage(dict):
"""
Compulsary elements
-------------------
- message
- collection
- from [user, bot]
"""
def __init__(self, message):
self.update({'message': '', 'collection': False})
self.update(message)
assert('from' in self)
assert('message' in self)
assert('collection' in self)
@classmethod
def from_message(cls, message):
if isinstance(message, ChatbotMessage):
return message
return cls(message)
@classmethod
def from_candidates_messages(cls, message):
message.update({'from': 'bot'})
if type(message['message']) == str:
message['collection'] = False
elif type(message['message']) == list:
message['collection'] = True
return cls(message)
@classmethod
def fake_user_message(cls):
return cls({'from': 'user'})
@property
def last_message_text(self):
if self['collection']:
return self['message'][-1]['message']
else:
return self['message']
def get_last_post(self):
_, last_post = self._filter_message_2_post()
for p in last_post:
yield p
def get_post(self):
posts, _ = self._filter_message_2_post()
for p in posts:
yield p
def get_all_messages(self):
for p in self.get_post():
yield p
for p in self.get_last_post():
yield p
def format_message(self, format_information):
if self['collection']:
self['message'][-1]['message'] =\
self['message'][-1]['message'].format(**format_information)
else:
self['message'] = self['message'].format(**format_information)
return self
def reflect_message(self, pre_message):
for key in pre_message:
if key not in ['message', 'from', 'time', 'answer_status',
'sending_status', 'collection', 'posting_status']:
self[key] = pre_message[key]
return self
def reflect_metadata(self, pre_message):
for key in pre_message:
if key not in self:
if key not in ['message', 'from', 'time', 'answer_status',
'sending_status', 'collection']:
self[key] = pre_message[key]
return self
def keep_query(self, pre_message):
if 'query' in pre_message:
if 'query' in self:
if self['query'] is None:
self['query'] = pre_message['query']
else:
self['query'] = pre_message['query']
return self
def _if_possible_send(self, message):
logi = True
logi = logi and (message['from'] == 'bot')
logi = logi and (message['message'] != '')
return logi
def _filter_message_2_post(self):
posts, last_post = [], []
if self['collection']:
messages = [m for m in self['message']
if self._if_possible_send(m)]
if len(messages):
last_post = [messages[-1]]
posts = messages[:-1]
else:
if self._if_possible_send(self):
last_post = [copy.copy(self)]
return posts, last_post
def _detect_message_sending_status(self):
if 'sending_status' in self:
return self['sending_status']
return True
def _preformat_collection_messages(self):
if not self._detect_message_sending_status():
if not self['collection']:
self['message'] = [copy.copy(self)]
self['collection'] = True
return self
return self
def _is_prepared(self, message):
if message['message'] == '':
return False
if 'sending_status' in self:
return self['sending_status']
if 'posting_status' in self:
return self['posting_status']
def is_prepared(self):
if self['collection']:
return any([self._is_prepared(e) for e in self['message']])
else:
return self._is_prepared(self)
return False
def add_tags(self, tags):
if tags is not None and (type(tags) in [list, str]):
tags = tags if type(tags) == list else [tags]
if 'tags' in self:
old_tags = self['tags']
old_tags += tags
old_tags = list(set(old_tags))
self['tags'] = old_tags
else:
self['tags'] = tags
if self['collection']:
if 'tags' in self['message'][-1]:
old_tags = self['message'][-1]['tags']
old_tags += tags
old_tags = list(set(old_tags))
self['message'][-1]['tags'] = old_tags
self['tags'] = old_tags
else:
self['message'][-1]['tags'] = tags
return self
def collapse_message(self, message):
self._preformat_collection_messages()
if self['collection']:
messagestext = copy.copy(self['message'])
if message['collection']:
messagestext += message['message']
else:
messagestext.append(message)
self.update(message)
self['message'] = messagestext
self['collection'] = True
self.check_message()
return self
else:
output_message = copy.copy(message)
output_message['collection'] = False
if 'query' in message:
output_message['query'] = message['query']
output_message =\
ChatbotMessage.from_candidates_messages(output_message)
output_message.check_message()
return output_message
def add_selector_types(self, selector_types):
## Store results in message
self['selector_types'] = selector_types
return self
def add_entry_to_last_message(self, entry_var, var):
self[entry_var] = var
if self['collection']:
self['message'][-1][entry_var] = var
return self
def structure_answer(self):
## Input selector types
if self['collection']:
self['message'][-1]['selector_types'] = self['selector_types']
self.check_message()
return self
def check_message(self):
if self['collection']:
assert(all([isinstance(m, dict) for m in self['message']]))
assert(all([isinstance(m['message'], str)
for m in self['message']]))
else:
assert(isinstance(self['message'], str))
| [
"[email protected]"
] | |
2d85e566ab46559127ff094934cff6b9e3b4a756 | e72db255e41332c113f929eb63815b2169038209 | /Chapter08/audio-encode-server-4/audio_encode_server/s3.py | 8585e1faf5d52e430754cde9e22635bf0eee6396 | [
"MIT"
] | permissive | PacktPublishing/Hands-On-Reactive-Programming-with-Python | b196b971fe49a36da9f979790b8c31c98a659031 | 757d45e2023032c6074e26ad252530f3c89978bf | refs/heads/master | 2023-02-07T01:03:37.648175 | 2023-02-05T18:21:17 | 2023-02-05T18:21:38 | 128,761,473 | 75 | 19 | null | null | null | null | UTF-8 | Python | false | false | 2,077 | py | import asyncio
from collections import namedtuple
from io import BytesIO
import reactivex as rx
import boto3
from boto3.session import Session
from cyclotron import Component
Source = namedtuple('Source', ['response'])
Sink = namedtuple('Sink', ['request'])
# Sink objects
Configure = namedtuple('Configure', [
'access_key', 'secret_key',
'bucket', 'endpoint_url', 'region_name'])
UploadObject = namedtuple('UploadObject', ['key', 'data', 'id'])
# Source objects
UploadReponse = namedtuple('UploadReponse', ['key', 'id'])
def make_driver(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
def driver(sink):
def on_subscribe(observer, scheduler):
client = None
bucket = None
def on_next(item):
nonlocal client
nonlocal bucket
if type(item) is Configure:
session = Session(aws_access_key_id=item.access_key,
aws_secret_access_key=item.secret_key)
client = session.client(
's3',
endpoint_url=item.endpoint_url,
region_name=item.region_name)
bucket = item.bucket
elif type(item) is UploadObject:
data = BytesIO(item.data)
client.upload_fileobj(data, bucket, item.key)
loop.call_soon_threadsafe(observer.on_next, UploadReponse(
key=item.key,
id=item.id))
else:
loop.call_soon_threadsafe(observer.on_error, "unknown item: {}".format(type(item)))
sink.request.subscribe(
on_next=on_next,
on_error=lambda e: loop.call_soon_threadsafe(observer.on_error, e),
on_completed=lambda: loop.call_soon_threadsafe(observer.on_completed))
return Source(
response=rx.create(on_subscribe)
)
return Component(call=driver, input=Sink)
| [
"[email protected]"
] | |
0f0a43f2a910cb3bd27dccab958083608f47a592 | 0258e0c9595406ceb3de32067aff776bc2a58fa8 | /06_p12.py | a649f413d98bebdcef131856db0da2a3d6949b5d | [] | no_license | akromibn37/python_code | 72c016c361b3ba2e04c83e1d1a703171b0bd8819 | 41d1a09f8ec8696e37ad83c1a0cb6506c7f0f4f6 | refs/heads/master | 2020-03-21T22:57:25.111642 | 2018-06-29T14:14:33 | 2018-06-29T14:14:33 | 139,157,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | data = input().strip()
l = []
for x in range(len(data)):
l.append(data[x])
num = int(input().strip())
out = ""
i = 0
while i<num:
out = ""
command = [e for e in input().split()]
if command[0] == "in":
l.insert(int(command[2]),command[1])
elif command[0] == "out":
l.pop(int(command[1]))
elif command[0] == "swap":
x = l[int(command[1])]
y = l[int(command[2])]
l[int(command[1])] = y
l[int(command[2])] = x
for j in range(len(l)):
out += l[j]
print(out)
i+=1
| [
"[email protected]"
] | |
a3832070b1ec7002d6f2dd0a9f5bd280d29a3962 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/python/keras/layers/cudnn_recurrent 2.py | 96ae66c775e623fff4738688d4f11005c5261b33 | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:52c49577848819c4116b99c29c11e765e7a2d686e7ccb4dc7b84454bdf31510f
size 20854
| [
"[email protected]"
] | |
69ef378642a90c904e60bcd86fa6932e967ed311 | 032117bbf248a76abd25fcc2355bc8ade84fa76a | /inheritance_4.py | b62203cddf2bf1a42b3576a58752aaab34cfb71a | [] | no_license | shefaligoel136/python_summer_training | ba8f28f6af008584b4239c73d466e4e9d35b4b01 | 0b97fea050342fe4ed95b18c5f7ed885a6c8ca23 | refs/heads/master | 2022-11-13T07:22:32.855717 | 2020-07-06T08:33:19 | 2020-07-06T08:33:19 | 277,480,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | # using super
class a:
def __init__(self):
print("initof A")
def feature1(self):
print("feature 1 is working")
def feature2(self):
print("feature 2 is working")
class b(a):
def __init__(self):
super().__init__()
print("initof B")
def feature3(self):
print("feature 3 is working")
def feature4(self):
print("feature 4 is working")
k = b()
k.feature1() | [
"[email protected]"
] | |
79445dc9be69e70168bbf832fc269c16f8377373 | c5859d1bdf44c8452563f856dc4191b74e85ce21 | /custom_components/image_processing/tagbox.py | 163ce385bf2c8182fd5f439a3f58b3d206199a0e | [] | no_license | balloob/homeassistant-config | 46774ea88ced4414e48e4f1f40af63ff67b6f990 | 9f341e4b695db56f3c4af7299a336d5a0f60cdcf | refs/heads/master | 2020-03-21T03:10:31.729526 | 2018-06-18T18:27:54 | 2018-06-18T18:27:54 | 138,039,924 | 11 | 0 | null | 2018-06-20T13:56:12 | 2018-06-20T13:56:12 | null | UTF-8 | Python | false | false | 4,157 | py | """
Component that will search images for tagged objects via a local
machinebox instance.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/image_processing.tagbox
"""
import base64
import requests
import logging
import time
import voluptuous as vol
from homeassistant.core import split_entity_id
import homeassistant.helpers.config_validation as cv
from homeassistant.components.image_processing import (
PLATFORM_SCHEMA, ImageProcessingEntity, CONF_SOURCE, CONF_ENTITY_ID,
CONF_NAME, DOMAIN)
_LOGGER = logging.getLogger(__name__)
CONF_ENDPOINT = 'endpoint'
CONF_TAGS = 'tags'
ROUNDING_DECIMALS = 2
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_ENDPOINT): cv.string,
vol.Optional(CONF_TAGS, default=[]):
vol.All(cv.ensure_list, [cv.string]),
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the classifier."""
entities = []
for camera in config[CONF_SOURCE]:
entities.append(Tagbox(
camera.get(CONF_NAME),
config[CONF_ENDPOINT],
camera[CONF_ENTITY_ID],
config[CONF_TAGS],
))
add_devices(entities)
class Tagbox(ImageProcessingEntity):
"""Perform a tag search via a Tagbox."""
def __init__(self, name, endpoint, camera_entity, tags):
"""Init with the API key and model id"""
super().__init__()
if name: # Since name is optional.
self._name = name
else:
self._name = "Tagbox {0}".format(
split_entity_id(camera_entity)[1])
self._camera = camera_entity
self._default_tags = {tag: 0.0 for tag in tags}
self._tags = self._default_tags
self._url = "http://{}/tagbox/check".format(endpoint)
self._state = "no_processing_performed"
self._response_time = None
def process_image(self, image):
"""Process an image."""
timer_start = time.perf_counter()
try:
response = requests.post(
self._url,
json=self.encode_image(image)
).json()
except:
response = {'success': False}
if response['success']:
elapsed_time = time.perf_counter() - timer_start
self._response_time = round(elapsed_time, ROUNDING_DECIMALS)
self._tags, self._state = self.process_response(response)
else:
self._state = "Request_failed"
self._tags = self._default_tags
def encode_image(self, image):
"""base64 encode an image stream."""
base64_img = base64.b64encode(image).decode('ascii')
return {"base64": base64_img}
def process_response(self, response):
"""Process response data, returning the processed tags and state."""
tags = self._default_tags.copy()
tags.update(self.process_tags(response['tags']))
if response['custom_tags']:
tags.update(self.process_tags(response['custom_tags']))
# Default tags have probability 0.0 and cause an exception.
try:
state = max(tags.keys(), key=(lambda k: tags[k]))
except:
state = "No_tags_identified"
return tags, state
def process_tags(self, tags_data):
"""Process tags data, returning the tag and rounded confidence."""
processed_tags = {
tag['tag'].lower(): round(tag['confidence'], ROUNDING_DECIMALS)
for tag in tags_data
}
return processed_tags
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
attr = self._tags.copy()
attr.update({'response_time': self._response_time})
return attr
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def name(self):
"""Return the name of the sensor."""
return self._name
| [
"[email protected]"
] | |
5c0d30018cbe2c3ef11519938d2dcc3bbcfa328b | 267ab87884d6c74f8d676c1b6cfebf7e217e2ea7 | /index/views.py | 79a1320fcddf6b714ccc0465ccd2299e1bfd4d22 | [] | no_license | Emehinola/charlotte | 0d564181de1f5419a67c06e7dba5cd81796cb1aa | c3175757f5ce7d3ceab272dad9a866c4bea4bd1d | refs/heads/master | 2023-04-23T00:38:18.965089 | 2021-04-30T19:34:17 | 2021-04-30T19:34:17 | 363,119,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | from django.shortcuts import render
from django.views import generic
from blog.models import Article, categories
# Create your views here.
class Home(generic.ListView):
model = Article
paginate_by = 30
template_name = 'index/home.html'
def get_context_data(self, **kwargs):
context = {
'must_read': Article.objects.filter(must_read=True)[:5],
'articles': Article.objects.all(),
'categories': get_category
}
return context
def get_category(): # return a list of blog categories
raw = []
readable = []
for i in categories:
raw.append(i[0]) # gets the first item of the list of tuples
readable.append(i[1]) # gets the second item of the list of tuples
output = zip(raw, readable)
return output
| [
"[email protected]"
] | |
de3fe45a87e82c646b0708bb94ef18a5f539f842 | 4d675034878c4b6510e1b45b856cc0a71af7f886 | /mmdet/models/seg_heads/panoptic_fusion_heads/heuristic_fusion_head.py | 06c1de2b9010fef13bd2322bbd3352d82a1f3e2f | [
"Apache-2.0",
"BSD-2-Clause-Views",
"MIT",
"BSD-2-Clause"
] | permissive | shinya7y/UniverseNet | 101ebc2ad8f15482ee45ea8d6561aa338a0fa49e | 3652b18c7ce68122dae7a32670624727d50e0914 | refs/heads/master | 2023-07-22T08:25:42.646911 | 2023-07-08T18:09:34 | 2023-07-08T18:09:34 | 263,555,721 | 407 | 58 | Apache-2.0 | 2023-01-27T01:13:31 | 2020-05-13T07:23:43 | Python | UTF-8 | Python | false | false | 4,482 | py | # Copyright (c) OpenMMLab. All rights reserved.
import torch
from mmdet.core.evaluation.panoptic_utils import INSTANCE_OFFSET
from mmdet.models.builder import HEADS
from .base_panoptic_fusion_head import BasePanopticFusionHead
@HEADS.register_module()
class HeuristicFusionHead(BasePanopticFusionHead):
"""Fusion Head with Heuristic method."""
def __init__(self,
num_things_classes=80,
num_stuff_classes=53,
test_cfg=None,
init_cfg=None,
**kwargs):
super(HeuristicFusionHead,
self).__init__(num_things_classes, num_stuff_classes, test_cfg,
None, init_cfg, **kwargs)
def forward_train(self, gt_masks=None, gt_semantic_seg=None, **kwargs):
"""HeuristicFusionHead has no training loss."""
return dict()
def _lay_masks(self, bboxes, labels, masks, overlap_thr=0.5):
"""Lay instance masks to a result map.
Args:
bboxes: The bboxes results, (K, 4).
labels: The labels of bboxes, (K, ).
masks: The instance masks, (K, H, W).
overlap_thr: Threshold to determine whether two masks overlap.
default: 0.5.
Returns:
Tensor: The result map, (H, W).
"""
num_insts = bboxes.shape[0]
id_map = torch.zeros(
masks.shape[-2:], device=bboxes.device, dtype=torch.long)
if num_insts == 0:
return id_map, labels
scores, bboxes = bboxes[:, -1], bboxes[:, :4]
# Sort by score to use heuristic fusion
order = torch.argsort(-scores)
bboxes = bboxes[order]
labels = labels[order]
segm_masks = masks[order]
instance_id = 1
left_labels = []
for idx in range(bboxes.shape[0]):
_cls = labels[idx]
_mask = segm_masks[idx]
instance_id_map = torch.ones_like(
_mask, dtype=torch.long) * instance_id
area = _mask.sum()
if area == 0:
continue
pasted = id_map > 0
intersect = (_mask * pasted).sum()
if (intersect / (area + 1e-5)) > overlap_thr:
continue
_part = _mask * (~pasted)
id_map = torch.where(_part, instance_id_map, id_map)
left_labels.append(_cls)
instance_id += 1
if len(left_labels) > 0:
instance_labels = torch.stack(left_labels)
else:
instance_labels = bboxes.new_zeros((0, ), dtype=torch.long)
assert instance_id == (len(instance_labels) + 1)
return id_map, instance_labels
def simple_test(self, det_bboxes, det_labels, mask_preds, seg_preds,
**kwargs):
"""Fuse the results of instance and semantic segmentations.
Args:
det_bboxes: The bboxes results, (K, 4).
det_labels: The labels of bboxes, (K,).
mask_preds: The masks results, (K, H, W).
seg_preds: The semantic segmentation results,
(K, num_stuff + 1, H, W).
Returns:
Tensor : The panoptic segmentation result, (H, W).
"""
mask_preds = mask_preds >= self.test_cfg.mask_thr_binary
id_map, labels = self._lay_masks(det_bboxes, det_labels, mask_preds,
self.test_cfg.mask_overlap)
seg_results = seg_preds.argmax(dim=0)
seg_results = seg_results + self.num_things_classes
pan_results = seg_results
instance_id = 1
for idx in range(det_labels.shape[0]):
_mask = id_map == (idx + 1)
if _mask.sum() == 0:
continue
_cls = labels[idx]
# simply trust detection
segment_id = _cls + instance_id * INSTANCE_OFFSET
pan_results[_mask] = segment_id
instance_id += 1
ids, counts = torch.unique(
pan_results % INSTANCE_OFFSET, return_counts=True)
stuff_ids = ids[ids >= self.num_things_classes]
stuff_counts = counts[ids >= self.num_things_classes]
ignore_stuff_ids = stuff_ids[
stuff_counts < self.test_cfg.stuff_area_limit]
assert pan_results.ndim == 2
pan_results[(pan_results.unsqueeze(2) == ignore_stuff_ids.reshape(
1, 1, -1)).any(dim=2)] = self.num_classes
return pan_results
| [
"[email protected]"
] | |
bded7a0abc4bf1dc4955561f7e0715bcba19006f | 7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3 | /codeforces/cf326-350/cf334/b.py | 3d79209e1a77d7ad5f7c126cf1c70b802e0ece89 | [] | no_license | roiti46/Contest | c0c35478cd80f675965d10b1a371e44084f9b6ee | c4b850d76796c5388d2e0d2234f90dc8acfaadfa | refs/heads/master | 2021-01-17T13:23:30.551754 | 2017-12-10T13:06:42 | 2017-12-10T13:06:42 | 27,001,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | # -*- coding: utf-8 -*-
import sys,copy,math,heapq,itertools as it,fractions,re,bisect,collections as coll
mod = 10**9 + 7
class UnionFind:
def __init__(self, size):
self.rank = [0] * size
self.par = range(size)
self.g_num = size
def find(self, x):
if x == self.par[x]: return x
self.par[x] = self.find(self.par[x])
return self.par[x]
def same(self, x, y):
return self.find(x) == self.find(y)
def unite(self, x, y):
x, y = self.find(x), self.find(y)
if x == y: return
self.g_num -= 1
if (self.rank[x] > self.rank[y]):
self.par[y] = x
else:
self.par[x] = y
if (self.rank[x] == self.rank[y]): self.rank[y] += 1
def group_num(self):
return self.g_num
#prime = [1] * 1000005
#prime[0] = prime[1] = 0
#for i in xrange(int(1000005**0.5) + 1):
# if prime[i]:
# prime[2*i::i] = [0] * len(prime[2*i::i])
p, k = map(int, raw_input().split())
if k == 0:
print pow(p, p - 1, mod)
exit()
uf = UnionFind(p)
cnt = 0
for x in xrange(p):
if x == k*x % p:
if k > 1:
cnt += 1
else:
uf.unite(x, k*x % p)
ans = pow(p, uf.group_num() - cnt, mod)
print ans
| [
"[email protected]"
] | |
8543bcbeaead0694113b144c40525f0d8ca0ac1d | cc54cf98ec7c1dc88eae06ad12b9c66dc8d500b9 | /intrinio_sdk/models/api_response_crypto_true_strength_index.py | bd1b9566f61dd243b6253547bf33ae54da7f8950 | [] | no_license | sanderbrauwers/python-sdk | 0b3caef4c51c7f4192d315a4636e7278de2dc252 | 81f6facb30e7781c70ba0000485a0d994a82dbf8 | refs/heads/master | 2020-05-27T09:57:51.492025 | 2019-05-24T10:53:21 | 2019-05-24T10:53:21 | 188,575,545 | 0 | 0 | null | 2019-05-25T14:24:44 | 2019-05-25T14:24:44 | null | UTF-8 | Python | false | false | 14,157 | py | # coding: utf-8
"""
Intrinio API
Welcome to the Intrinio API! Through our Financial Data Marketplace, we offer a wide selection of financial data feed APIs sourced by our own proprietary processes as well as from many data vendors. For a complete API request / response reference please view the [Intrinio API documentation](https://intrinio.com/documentation/api_v2). If you need additional help in using the API, please visit the [Intrinio website](https://intrinio.com) and click on the chat icon in the lower right corner. # noqa: E501
OpenAPI spec version: 2.6.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from intrinio_sdk.models.crypto_exchange_summary import CryptoExchangeSummary # noqa: F401,E501
from intrinio_sdk.models.crypto_pair_summary import CryptoPairSummary # noqa: F401,E501
from intrinio_sdk.models.technical_indicator import TechnicalIndicator # noqa: F401,E501
from intrinio_sdk.models.true_strength_index_technical_value import TrueStrengthIndexTechnicalValue # noqa: F401,E501
class ApiResponseCryptoTrueStrengthIndex(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'technicals': 'list[TrueStrengthIndexTechnicalValue]',
'indicator': 'TechnicalIndicator',
'pair': 'CryptoPairSummary',
'exchange': 'CryptoExchangeSummary',
'timeframe': 'str',
'next_page': 'str'
}
attribute_map = {
'technicals': 'technicals',
'indicator': 'indicator',
'pair': 'pair',
'exchange': 'exchange',
'timeframe': 'timeframe',
'next_page': 'next_page'
}
def __init__(self, technicals=None, indicator=None, pair=None, exchange=None, timeframe=None, next_page=None): # noqa: E501
"""ApiResponseCryptoTrueStrengthIndex - a model defined in Swagger""" # noqa: E501
self._technicals = None
self._indicator = None
self._pair = None
self._exchange = None
self._timeframe = None
self._next_page = None
self.discriminator = None
if technicals is not None:
self.technicals = technicals
if indicator is not None:
self.indicator = indicator
if pair is not None:
self.pair = pair
if exchange is not None:
self.exchange = exchange
if timeframe is not None:
self.timeframe = timeframe
if next_page is not None:
self.next_page = next_page
@property
def technicals(self):
"""Gets the technicals of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The technicals of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: list[TrueStrengthIndexTechnicalValue]
"""
return self._technicals
@property
def technicals_dict(self):
"""Gets the technicals of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The technicals of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: list[TrueStrengthIndexTechnicalValue]
"""
result = None
value = self.technicals
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'technicals': value }
return result
@technicals.setter
def technicals(self, technicals):
"""Sets the technicals of this ApiResponseCryptoTrueStrengthIndex.
:param technicals: The technicals of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: list[TrueStrengthIndexTechnicalValue]
"""
self._technicals = technicals
@property
def indicator(self):
"""Gets the indicator of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The name and symbol of the technical indicator # noqa: E501
:return: The indicator of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: TechnicalIndicator
"""
return self._indicator
@property
def indicator_dict(self):
"""Gets the indicator of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The name and symbol of the technical indicator as a dictionary. Useful for Panda Dataframes. # noqa: E501
:return: The indicator of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: TechnicalIndicator
"""
result = None
value = self.indicator
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'indicator': value }
return result
@indicator.setter
def indicator(self, indicator):
"""Sets the indicator of this ApiResponseCryptoTrueStrengthIndex.
The name and symbol of the technical indicator # noqa: E501
:param indicator: The indicator of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: TechnicalIndicator
"""
self._indicator = indicator
@property
def pair(self):
"""Gets the pair of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The pair of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: CryptoPairSummary
"""
return self._pair
@property
def pair_dict(self):
"""Gets the pair of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The pair of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: CryptoPairSummary
"""
result = None
value = self.pair
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'pair': value }
return result
@pair.setter
def pair(self, pair):
"""Sets the pair of this ApiResponseCryptoTrueStrengthIndex.
:param pair: The pair of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: CryptoPairSummary
"""
self._pair = pair
@property
def exchange(self):
"""Gets the exchange of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The exchange of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: CryptoExchangeSummary
"""
return self._exchange
@property
def exchange_dict(self):
"""Gets the exchange of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:return: The exchange of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: CryptoExchangeSummary
"""
result = None
value = self.exchange
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'exchange': value }
return result
@exchange.setter
def exchange(self, exchange):
"""Sets the exchange of this ApiResponseCryptoTrueStrengthIndex.
:param exchange: The exchange of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: CryptoExchangeSummary
"""
self._exchange = exchange
@property
def timeframe(self):
"""Gets the timeframe of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The time interval for the crypto currency prices # noqa: E501
:return: The timeframe of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: str
"""
return self._timeframe
@property
def timeframe_dict(self):
"""Gets the timeframe of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The time interval for the crypto currency prices as a dictionary. Useful for Panda Dataframes. # noqa: E501
:return: The timeframe of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: str
"""
result = None
value = self.timeframe
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'timeframe': value }
return result
@timeframe.setter
def timeframe(self, timeframe):
"""Sets the timeframe of this ApiResponseCryptoTrueStrengthIndex.
The time interval for the crypto currency prices # noqa: E501
:param timeframe: The timeframe of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: str
"""
self._timeframe = timeframe
@property
def next_page(self):
"""Gets the next_page of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The token required to request the next page of the data # noqa: E501
:return: The next_page of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: str
"""
return self._next_page
@property
def next_page_dict(self):
"""Gets the next_page of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
The token required to request the next page of the data as a dictionary. Useful for Panda Dataframes. # noqa: E501
:return: The next_page of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:rtype: str
"""
result = None
value = self.next_page
if isinstance(value, list):
result = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result = value.to_dict()
elif isinstance(value, dict):
result = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result = { 'next_page': value }
return result
@next_page.setter
def next_page(self, next_page):
"""Sets the next_page of this ApiResponseCryptoTrueStrengthIndex.
The token required to request the next page of the data # noqa: E501
:param next_page: The next_page of this ApiResponseCryptoTrueStrengthIndex. # noqa: E501
:type: str
"""
self._next_page = next_page
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiResponseCryptoTrueStrengthIndex):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
8eefdcd0f560f9474b98e085a4292b064e7dce77 | 65329299fca8dcf2e204132624d9b0f8f8f39af7 | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/__init__.py | 21732f34697d6d2ac9444bb3316752278e827cf6 | [
"Apache-2.0"
] | permissive | darylturner/napalm-yang | bf30420e22d8926efdc0705165ed0441545cdacf | b14946b884ad2019b896ee151285900c89653f44 | refs/heads/master | 2021-05-14T12:17:37.424659 | 2017-11-17T07:32:49 | 2017-11-17T07:32:49 | 116,404,171 | 0 | 0 | null | 2018-01-05T16:21:37 | 2018-01-05T16:21:36 | null | UTF-8 | Python | false | false | 11,048 | py |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import subTLVs_
class subTLVs(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/subTLVs. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS prefix sub-TLVs.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__subTLVs',)
_yang_name = 'subTLVs'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__subTLVs = YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'levels', u'level', u'link-state-database', u'lsp', u'tlvs', u'tlv', u'mt-ipv6-reachability', u'prefixes', u'prefix', u'subTLVs']
def _get_subTLVs(self):
"""
Getter method for subTLVs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs (list)
YANG Description: List of subTLV types in the LSDB for the specified TLV.
"""
return self.__subTLVs
def _set_subTLVs(self, v, load=False):
"""
Setter method for subTLVs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_subTLVs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subTLVs() directly.
YANG Description: List of subTLV types in the LSDB for the specified TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subTLVs must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
})
self.__subTLVs = t
if hasattr(self, '_set'):
self._set()
def _unset_subTLVs(self):
self.__subTLVs = YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
subTLVs = __builtin__.property(_get_subTLVs)
_pyangbind_elements = {'subTLVs': subTLVs, }
import subTLVs_
class subTLVs(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/subTLVs. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS prefix sub-TLVs.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__subTLVs',)
_yang_name = 'subTLVs'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__subTLVs = YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'levels', u'level', u'link-state-database', u'lsp', u'tlvs', u'tlv', u'mt-ipv6-reachability', u'prefixes', u'prefix', u'subTLVs']
def _get_subTLVs(self):
"""
Getter method for subTLVs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs (list)
YANG Description: List of subTLV types in the LSDB for the specified TLV.
"""
return self.__subTLVs
def _set_subTLVs(self, v, load=False):
"""
Setter method for subTLVs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/subTLVs/subTLVs (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_subTLVs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subTLVs() directly.
YANG Description: List of subTLV types in the LSDB for the specified TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subTLVs must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
})
self.__subTLVs = t
if hasattr(self, '_set'):
self._set()
def _unset_subTLVs(self):
self.__subTLVs = YANGDynClass(base=YANGListType("subtlv_type",subTLVs_.subTLVs, yang_name="subTLVs", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='subtlv-type', extensions=None), is_container='list', yang_name="subTLVs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)
subTLVs = __builtin__.property(_get_subTLVs)
_pyangbind_elements = {'subTLVs': subTLVs, }
| [
"[email protected]"
] | |
d2e145a737723d90d40cb49ba1513f4ce09da229 | d0fcc2198f1caf5633c4fc0d004ba68714396f1b | /bc4py/utils.py | d1c4a85cb4d9f0df6c85fb081bee3a4001b51119 | [
"MIT"
] | permissive | webclinic017/bc4py | 4bfce04b666c2aaadda4b7ecc2a8270839231850 | 620b7d855ec957b3e2b4021cf8069d9dd128587a | refs/heads/master | 2022-12-09T22:23:49.842255 | 2019-06-21T14:24:17 | 2019-06-21T14:24:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,100 | py | from bc4py.config import C, V
from bc4py.gittool import get_current_branch
from bc4py.chain.utils import GompertzCurve
from Cryptodome.Cipher import AES
from Cryptodome import Random
from Cryptodome.Hash import SHA256
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from logging import getLogger, DEBUG, INFO, WARNING, ERROR
import multiprocessing
import os
import psutil
import sys
WALLET_VERSION = 0
log = getLogger('bc4py')
NAME2LEVEL = {
'DEBUG': DEBUG,
'INFO': INFO,
'WARNING': WARNING,
'ERROR': ERROR,
}
def set_database_path(sub_dir=None):
V.DB_HOME_DIR = os.path.join(os.path.expanduser("~"), 'blockchain-py')
if not os.path.exists(V.DB_HOME_DIR):
os.makedirs(V.DB_HOME_DIR)
if sub_dir:
V.DB_HOME_DIR = os.path.join(V.DB_HOME_DIR, sub_dir)
if not os.path.exists(V.DB_HOME_DIR):
os.makedirs(V.DB_HOME_DIR)
V.DB_ACCOUNT_PATH = os.path.join(V.DB_HOME_DIR, 'wallet.ver{}.dat'.format(WALLET_VERSION))
def set_blockchain_params(genesis_block, params):
assert 'spawn' in multiprocessing.get_all_start_methods(), 'Not found spawn method'
V.GENESIS_BLOCK = genesis_block
V.GENESIS_PARAMS = params
V.BECH32_HRP = params.get('hrp')
V.BLOCK_GENESIS_TIME = params.get('genesis_time')
V.BLOCK_MINING_SUPPLY = params.get('mining_supply')
V.BLOCK_TIME_SPAN = params.get('block_span')
V.BLOCK_REWARD = params.get('block_reward')
V.COIN_DIGIT = params.get('digit_number')
V.COIN_MINIMUM_PRICE = params.get('minimum_price')
V.BLOCK_CONSENSUSES = params.get('consensus')
GompertzCurve.k = V.BLOCK_MINING_SUPPLY
V.BRANCH_NAME = get_current_branch()
def check_already_started():
assert V.DB_HOME_DIR is not None
# check already started
pid_path = os.path.join(V.DB_HOME_DIR, 'pid.lock')
if os.path.exists(pid_path):
with open(pid_path, mode='r') as fp:
pid = int(fp.read())
if psutil.pid_exists(pid):
raise RuntimeError('Already running blockchain-py pid={}'.format(pid))
new_pid = os.getpid()
with open(pid_path, mode='w') as fp:
fp.write(str(new_pid))
log.info("create new process lock file pid={}".format(new_pid))
def console_args_parser():
"""get help by `python publicnode.py -h`"""
p = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
p.add_argument('--p2p',
help='p2p server bind port',
default=2000,
type=int)
p.add_argument('--rest',
help='REST API bind port',
default=3000,
type=int)
p.add_argument('--host',
help='REST API bind host',
default='127.0.0.1',
type=str)
p.add_argument('--user', '-u',
help='API user name',
default='user',
type=str)
p.add_argument('--password', '-p',
help='API password',
default='password',
type=str)
p.add_argument('--sub-dir',
help='setup blockchain folder path',
default=None)
p.add_argument('--log-level',
help='logging level',
choices=list(NAME2LEVEL),
default='INFO')
p.add_argument('--log-path',
help='recode log file path',
default=None,
type=str)
p.add_argument('--remove-log',
help='remove old log file when start program',
action='store_true')
p.add_argument('--daemon',
help='make process daemon',
action='store_true')
p.add_argument('--staking',
help='enable coin base staking',
action='store_true')
p.add_argument('--solo-mining',
help='solo mining for debug or testnet',
action='store_true')
return p.parse_args()
def check_process_status(f_daemon):
if sys.platform == 'win32':
# windows
if f_daemon:
if sys.executable.endswith("pythonw.exe"):
sys.stdout = open(os.devnull, "w")
sys.stderr = open(os.devnull, "w")
else:
print("ERROR: Please execute by `pythonw.exe` not `python.exe` if you enable daemon flag")
sys.exit()
else:
if sys.executable.endswith("pythonw.exe"):
print("ERROR: Please execute by `python.exe`")
sys.exit()
else:
# stdin close to prevent lock on console
sys.stdin.close()
else:
# other
if f_daemon:
pid = os.fork()
if pid == 0:
# child process (daemon)
sys.stdout = open(os.devnull, "w")
sys.stderr = open(os.devnull, "w")
else:
# main process
print("INFO: Make daemon process pid={}".format(pid))
sys.exit()
else:
# stdin close to prevent lock on console
sys.stdin.close()
class AESCipher:
@staticmethod
def create_key():
return os.urandom(AES.block_size)
@staticmethod
def encrypt(key, raw):
assert isinstance(key, bytes)
assert isinstance(raw, bytes), "input data is bytes"
key = SHA256.new(key).digest()[:AES.block_size]
raw = AESCipher._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
return iv + cipher.encrypt(raw)
@staticmethod
def decrypt(key, enc):
assert isinstance(key, bytes)
assert isinstance(enc, bytes), 'Encrypt data is bytes'
key = SHA256.new(key).digest()[:AES.block_size]
iv = enc[:AES.block_size]
cipher = AES.new(key, AES.MODE_CBC, iv)
raw = AESCipher._unpad(cipher.decrypt(enc[AES.block_size:]))
if len(raw) == 0:
raise ValueError("AES decryption error, not correct key")
else:
return raw
@staticmethod
def _pad(s):
pad = AES.block_size - len(s) % AES.block_size
add = AES.block_size - len(s) % AES.block_size
return s + add * pad.to_bytes(1, 'little')
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
class ProgressBar:
"""
terminal progressbar
original: https://github.com/bozoh/console_progressbar
author: Carlos Alexandre S. da Fonseca
"""
def __init__(self, prefix, default_suffix='', total=100, decimals=0, length=50, fill='X', zfill='-'):
self.prefix = prefix
self.default_suffix = default_suffix
self.__decimals = decimals
self.__length = length
self.__fill = fill
self.__zfill = zfill
self.__total = total
def _generate_bar(self, iteration, suffix=None):
percent = ("{0:." + str(self.__decimals) + "f}")
percent = percent.format(100 * (iteration / float(self.__total)))
filled_length = int(self.__length * iteration // self.__total)
bar = self.__fill * filled_length + self.__zfill * (self.__length - filled_length)
return '{0} |{1}| {2}% {3}'.format(self.prefix, bar, percent, suffix or self.default_suffix)
def print_progress_bar(self, iteration, suffix=None):
print('\r%s' % (self._generate_bar(iteration, suffix)), end='')
sys.stdout.flush()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self.print_progress_bar(self.__total, 'Complete')
print()
else:
print()
sys.stdout.flush()
log.error('Error on progress, {}'.format(exc_val))
return True
__all__ = [
"set_database_path",
"set_blockchain_params",
"check_already_started",
"console_args_parser",
"check_process_status",
"AESCipher",
"ProgressBar",
]
| [
"[email protected]"
] | |
9c435a42cdc60fb08b9624fc926efccf8f66c4b1 | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc_asyncio.py | 1472c7e2d2d253937db5f0bc67d4dde86f67efb8 | [
"Apache-2.0"
] | permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 50,442 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
import warnings
from google.api_core import gapic_v1, grpc_helpers_async, operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.alloydb_v1.types import resources, service
from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport
from .grpc import AlloyDBAdminGrpcTransport
class AlloyDBAdminGrpcAsyncIOTransport(AlloyDBAdminTransport):
"""gRPC AsyncIO backend transport for AlloyDBAdmin.
Service describing handlers for resources
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "alloydb.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "alloydb.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: Optional[aio.Channel] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def list_clusters(
self,
) -> Callable[
[service.ListClustersRequest], Awaitable[service.ListClustersResponse]
]:
r"""Return a callable for the list clusters method over gRPC.
Lists Clusters in a given project and location.
Returns:
Callable[[~.ListClustersRequest],
Awaitable[~.ListClustersResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_clusters" not in self._stubs:
self._stubs["list_clusters"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListClusters",
request_serializer=service.ListClustersRequest.serialize,
response_deserializer=service.ListClustersResponse.deserialize,
)
return self._stubs["list_clusters"]
@property
def get_cluster(
self,
) -> Callable[[service.GetClusterRequest], Awaitable[resources.Cluster]]:
r"""Return a callable for the get cluster method over gRPC.
Gets details of a single Cluster.
Returns:
Callable[[~.GetClusterRequest],
Awaitable[~.Cluster]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_cluster" not in self._stubs:
self._stubs["get_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetCluster",
request_serializer=service.GetClusterRequest.serialize,
response_deserializer=resources.Cluster.deserialize,
)
return self._stubs["get_cluster"]
@property
def create_cluster(
self,
) -> Callable[[service.CreateClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create cluster method over gRPC.
Creates a new Cluster in a given project and
location.
Returns:
Callable[[~.CreateClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_cluster" not in self._stubs:
self._stubs["create_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateCluster",
request_serializer=service.CreateClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_cluster"]
@property
def update_cluster(
self,
) -> Callable[[service.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update cluster method over gRPC.
Updates the parameters of a single Cluster.
Returns:
Callable[[~.UpdateClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_cluster" not in self._stubs:
self._stubs["update_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateCluster",
request_serializer=service.UpdateClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_cluster"]
@property
def delete_cluster(
self,
) -> Callable[[service.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete cluster method over gRPC.
Deletes a single Cluster.
Returns:
Callable[[~.DeleteClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_cluster" not in self._stubs:
self._stubs["delete_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteCluster",
request_serializer=service.DeleteClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_cluster"]
@property
def promote_cluster(
self,
) -> Callable[[service.PromoteClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the promote cluster method over gRPC.
Promotes a SECONDARY cluster. This turns down
replication from the PRIMARY cluster and promotes a
secondary cluster into its own standalone cluster.
Imperative only.
Returns:
Callable[[~.PromoteClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "promote_cluster" not in self._stubs:
self._stubs["promote_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/PromoteCluster",
request_serializer=service.PromoteClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["promote_cluster"]
@property
def restore_cluster(
self,
) -> Callable[[service.RestoreClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the restore cluster method over gRPC.
Creates a new Cluster in a given project and
location, with a volume restored from the provided
source, either a backup ID or a point-in-time and a
source cluster.
Returns:
Callable[[~.RestoreClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "restore_cluster" not in self._stubs:
self._stubs["restore_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/RestoreCluster",
request_serializer=service.RestoreClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["restore_cluster"]
@property
def create_secondary_cluster(
self,
) -> Callable[
[service.CreateSecondaryClusterRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create secondary cluster method over gRPC.
Creates a cluster of type SECONDARY in the given
location using the primary cluster as the source.
Returns:
Callable[[~.CreateSecondaryClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_secondary_cluster" not in self._stubs:
self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateSecondaryCluster",
request_serializer=service.CreateSecondaryClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_secondary_cluster"]
@property
def list_instances(
self,
) -> Callable[
[service.ListInstancesRequest], Awaitable[service.ListInstancesResponse]
]:
r"""Return a callable for the list instances method over gRPC.
Lists Instances in a given project and location.
Returns:
Callable[[~.ListInstancesRequest],
Awaitable[~.ListInstancesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListInstances",
request_serializer=service.ListInstancesRequest.serialize,
response_deserializer=service.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"]
@property
def get_instance(
self,
) -> Callable[[service.GetInstanceRequest], Awaitable[resources.Instance]]:
r"""Return a callable for the get instance method over gRPC.
Gets details of a single Instance.
Returns:
Callable[[~.GetInstanceRequest],
Awaitable[~.Instance]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetInstance",
request_serializer=service.GetInstanceRequest.serialize,
response_deserializer=resources.Instance.deserialize,
)
return self._stubs["get_instance"]
@property
def create_instance(
self,
) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create instance method over gRPC.
Creates a new Instance in a given project and
location.
Returns:
Callable[[~.CreateInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_instance" not in self._stubs:
self._stubs["create_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateInstance",
request_serializer=service.CreateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_instance"]
@property
def create_secondary_instance(
self,
) -> Callable[
[service.CreateSecondaryInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create secondary instance method over gRPC.
Creates a new SECONDARY Instance in a given project
and location.
Returns:
Callable[[~.CreateSecondaryInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_secondary_instance" not in self._stubs:
self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateSecondaryInstance",
request_serializer=service.CreateSecondaryInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_secondary_instance"]
@property
def batch_create_instances(
self,
) -> Callable[
[service.BatchCreateInstancesRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the batch create instances method over gRPC.
Creates new instances under the given project,
location and cluster. There can be only one primary
instance in a cluster. If the primary instance exists in
the cluster as well as this request, then API will throw
an error.
The primary instance should exist before any read pool
instance is created. If the primary instance is a part
of the request payload, then the API will take care of
creating instances in the correct order. This method is
here to support Google-internal use cases, and is not
meant for external customers to consume. Please do not
start relying on it; its behavior is subject to change
without notice.
Returns:
Callable[[~.BatchCreateInstancesRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "batch_create_instances" not in self._stubs:
self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/BatchCreateInstances",
request_serializer=service.BatchCreateInstancesRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["batch_create_instances"]
@property
def update_instance(
self,
) -> Callable[[service.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update instance method over gRPC.
Updates the parameters of a single Instance.
Returns:
Callable[[~.UpdateInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_instance" not in self._stubs:
self._stubs["update_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateInstance",
request_serializer=service.UpdateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_instance"]
@property
def delete_instance(
self,
) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete instance method over gRPC.
Deletes a single Instance.
Returns:
Callable[[~.DeleteInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteInstance",
request_serializer=service.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def failover_instance(
self,
) -> Callable[
[service.FailoverInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the failover instance method over gRPC.
Forces a Failover for a highly available instance.
Failover promotes the HA standby instance as the new
primary. Imperative only.
Returns:
Callable[[~.FailoverInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "failover_instance" not in self._stubs:
self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/FailoverInstance",
request_serializer=service.FailoverInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["failover_instance"]
@property
def inject_fault(
self,
) -> Callable[[service.InjectFaultRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the inject fault method over gRPC.
Injects fault in an instance.
Imperative only.
Returns:
Callable[[~.InjectFaultRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "inject_fault" not in self._stubs:
self._stubs["inject_fault"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/InjectFault",
request_serializer=service.InjectFaultRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["inject_fault"]
@property
def restart_instance(
self,
) -> Callable[
[service.RestartInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the restart instance method over gRPC.
Restart an Instance in a cluster.
Imperative only.
Returns:
Callable[[~.RestartInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "restart_instance" not in self._stubs:
self._stubs["restart_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/RestartInstance",
request_serializer=service.RestartInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["restart_instance"]
@property
def list_backups(
self,
) -> Callable[[service.ListBackupsRequest], Awaitable[service.ListBackupsResponse]]:
r"""Return a callable for the list backups method over gRPC.
Lists Backups in a given project and location.
Returns:
Callable[[~.ListBackupsRequest],
Awaitable[~.ListBackupsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_backups" not in self._stubs:
self._stubs["list_backups"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListBackups",
request_serializer=service.ListBackupsRequest.serialize,
response_deserializer=service.ListBackupsResponse.deserialize,
)
return self._stubs["list_backups"]
@property
def get_backup(
self,
) -> Callable[[service.GetBackupRequest], Awaitable[resources.Backup]]:
r"""Return a callable for the get backup method over gRPC.
Gets details of a single Backup.
Returns:
Callable[[~.GetBackupRequest],
Awaitable[~.Backup]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_backup" not in self._stubs:
self._stubs["get_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetBackup",
request_serializer=service.GetBackupRequest.serialize,
response_deserializer=resources.Backup.deserialize,
)
return self._stubs["get_backup"]
@property
def create_backup(
self,
) -> Callable[[service.CreateBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create backup method over gRPC.
Creates a new Backup in a given project and location.
Returns:
Callable[[~.CreateBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_backup" not in self._stubs:
self._stubs["create_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateBackup",
request_serializer=service.CreateBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_backup"]
@property
def update_backup(
self,
) -> Callable[[service.UpdateBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update backup method over gRPC.
Updates the parameters of a single Backup.
Returns:
Callable[[~.UpdateBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_backup" not in self._stubs:
self._stubs["update_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateBackup",
request_serializer=service.UpdateBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_backup"]
@property
def delete_backup(
self,
) -> Callable[[service.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete backup method over gRPC.
Deletes a single Backup.
Returns:
Callable[[~.DeleteBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_backup" not in self._stubs:
self._stubs["delete_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteBackup",
request_serializer=service.DeleteBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_backup"]
@property
def list_supported_database_flags(
self,
) -> Callable[
[service.ListSupportedDatabaseFlagsRequest],
Awaitable[service.ListSupportedDatabaseFlagsResponse],
]:
r"""Return a callable for the list supported database flags method over gRPC.
Lists SupportedDatabaseFlags for a given project and
location.
Returns:
Callable[[~.ListSupportedDatabaseFlagsRequest],
Awaitable[~.ListSupportedDatabaseFlagsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_supported_database_flags" not in self._stubs:
self._stubs[
"list_supported_database_flags"
] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListSupportedDatabaseFlags",
request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize,
response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize,
)
return self._stubs["list_supported_database_flags"]
@property
def list_users(
self,
) -> Callable[[service.ListUsersRequest], Awaitable[service.ListUsersResponse]]:
r"""Return a callable for the list users method over gRPC.
Lists Users in a given project and location.
Returns:
Callable[[~.ListUsersRequest],
Awaitable[~.ListUsersResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_users" not in self._stubs:
self._stubs["list_users"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListUsers",
request_serializer=service.ListUsersRequest.serialize,
response_deserializer=service.ListUsersResponse.deserialize,
)
return self._stubs["list_users"]
@property
def get_user(self) -> Callable[[service.GetUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the get user method over gRPC.
Gets details of a single User.
Returns:
Callable[[~.GetUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_user" not in self._stubs:
self._stubs["get_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetUser",
request_serializer=service.GetUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["get_user"]
@property
def create_user(
self,
) -> Callable[[service.CreateUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the create user method over gRPC.
Creates a new User in a given project, location, and
cluster.
Returns:
Callable[[~.CreateUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_user" not in self._stubs:
self._stubs["create_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateUser",
request_serializer=service.CreateUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["create_user"]
@property
def update_user(
self,
) -> Callable[[service.UpdateUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the update user method over gRPC.
Updates the parameters of a single User.
Returns:
Callable[[~.UpdateUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_user" not in self._stubs:
self._stubs["update_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateUser",
request_serializer=service.UpdateUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["update_user"]
@property
def delete_user(
self,
) -> Callable[[service.DeleteUserRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete user method over gRPC.
Deletes a single User.
Returns:
Callable[[~.DeleteUserRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_user" not in self._stubs:
self._stubs["delete_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteUser",
request_serializer=service.DeleteUserRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_user"]
def close(self):
return self.grpc_channel.close()
@property
def delete_operation(
self,
) -> Callable[[operations_pb2.DeleteOperationRequest], None]:
r"""Return a callable for the delete_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_operation" not in self._stubs:
self._stubs["delete_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/DeleteOperation",
request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["delete_operation"]
@property
def cancel_operation(
self,
) -> Callable[[operations_pb2.CancelOperationRequest], None]:
r"""Return a callable for the cancel_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["cancel_operation"]
@property
def get_operation(
self,
) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
r"""Return a callable for the get_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
self._stubs["get_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["get_operation"]
@property
def list_operations(
self,
) -> Callable[
[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
]:
r"""Return a callable for the list_operations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
self._stubs["list_operations"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
)
return self._stubs["list_operations"]
@property
def list_locations(
self,
) -> Callable[
[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse
]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_locations" not in self._stubs:
self._stubs["list_locations"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/ListLocations",
request_serializer=locations_pb2.ListLocationsRequest.SerializeToString,
response_deserializer=locations_pb2.ListLocationsResponse.FromString,
)
return self._stubs["list_locations"]
@property
def get_location(
self,
) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_location" not in self._stubs:
self._stubs["get_location"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/GetLocation",
request_serializer=locations_pb2.GetLocationRequest.SerializeToString,
response_deserializer=locations_pb2.Location.FromString,
)
return self._stubs["get_location"]
__all__ = ("AlloyDBAdminGrpcAsyncIOTransport",)
| [
"[email protected]"
] | |
668657bcff004b73d7f1774f4f953091a5bf649f | 3f55607c033fef615f8d0f9ef8d284f43d1709a1 | /shop/shop/settings.py | 04d5a80fe47afc58d6e082ce02f49aedb74d8b9d | [] | no_license | aakashres/shoppingcart | d37f7425f8585ac0463153a90ae4f1d2ed49c460 | 2060fac698130b78860072f5fcc0532ec716d087 | refs/heads/master | 2022-11-09T15:55:27.061262 | 2017-04-04T15:00:08 | 2017-04-04T15:00:08 | 273,651,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,505 | py | """
Django settings for shop project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_bai2f0i6@h=+dy+x1b(&i5$83kg0+g(rq6s5djrt=g+uunlvc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'account',
'cart',
'messaging',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'shop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'shop',
'USER': 'prixa',
'PASSWORD': 'prixatech',
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kathmandu'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "root", "static_cdn")
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "root", "media_cdn")
| [
"[email protected]"
] | |
24cdcbecc7eafa54f83bb32c05eaadece9ae923c | 24caa6710105a060fab2e17147e6d56609939011 | /05-Importing_Data_in_Python_(Part_1)/03-Working_with_relational_databases_in_Python/09-Pandas_for_more_complex_querying.py | c6ed202627f94fe3a86b7922d627daf248673cce | [] | no_license | inverseundefined/DataCamp | 99607022ad3f899d7681ad1f70fcedab290e269a | 7226b6b6f41888c3610a884db9a226e013d37e56 | refs/heads/master | 2022-01-10T00:53:21.714908 | 2019-07-24T13:27:49 | 2019-07-24T13:27:49 | 198,280,648 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | '''
Pandas for more complex querying
Here, you'll become more familiar with the pandas function read_sql_query() by using it to execute a more complex query: a SELECT statement followed by both a WHERE clause AND an ORDER BY clause.
You'll build a DataFrame that contains the rows of the Employee table for which the EmployeeId is greater than or equal to 6 and you'll order these entries by BirthDate.
Instructions
100 XP
Using the function create_engine(), create an engine for the SQLite database Chinook.sqlite and assign it to the variable engine.
Use the pandas function read_sql_query() to assign to the variable df the DataFrame of results from the following query: select all records from the Employee table where the EmployeeId is greater than or equal to 6 and ordered by BirthDate (make sure to use WHERE and ORDER BY in this precise order).
Take Hint (-30 XP)
'''
# Import packages
from sqlalchemy import create_engine
import pandas as pd
# Create engine: engine
engine = create_engine('sqlite:///Chinook.sqlite')
# Execute query and store records in DataFrame: df
df = pd.read_sql_query('SELECT * FROM Employee WHERE EmployeeId >= 6 ORDER BY BirthDate',engine)
# Print head of DataFrame
print(df.head())
| [
"[email protected]"
] | |
0f9bf124f49507e8e88f9c99a67d39996068f0e1 | f090c3e0faa70cf0ef7c4be99cb894630bce2842 | /scripts_201410/simpleMeasurements/FFT/micromotioncompensate.py | 61e0fc2a67cd09a122b42c0821e42d4d1b12e7ff | [] | no_license | HaeffnerLab/resonator | 157d1dc455209da9b7de077157bda53b4883c8b7 | 7c2e377fdc45f6c1ad205f8bbc2e6607eb3fdc71 | refs/heads/master | 2021-01-09T20:48:03.587634 | 2016-09-22T18:40:17 | 2016-09-22T18:40:17 | 6,715,345 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | from FFT import measureFFT
import numpy as np
import labrad
import time
cxn = labrad.connect()
dv = cxn.data_vault
recordTime = 0.5 #seconds
average = 4
freqSpan = 50.0 #Hz
freqOffset = -889 #Hz, the offset between the counter clock and the rf synthesizer clock
#setting up FFT
fft = measureFFT(cxn, recordTime, average, freqSpan, freqOffset, savePlot = False)
#saving
dv.cd(['','QuickMeasurements','FFT', 'Compensation'],True)
name = dv.new('FFT',[('number', 'n')], [('FFTPeak','Arb','Arb')] )
dv.add_parameter('plotLive',True)
print 'Saving {}'.format(name)
for j in range(100):
micromotion = fft.getPeakArea(ptsAround = 3)
dv.add(j, micromotion)
print micromotion
| [
"[email protected]"
] | |
df7b27de7032e41159d2757d07e22dd5bf52718c | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /TensorFlow2/LanguageModeling/BERT/dllogger_class.py | be211785d770825978dc9b4cb32631e11f2435bc | [] | no_license | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 2,852 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from dllogger import Logger, StdOutBackend, JSONStreamBackend, Verbosity
import numpy
class dllogger_class():
def format_step(self, step):
if isinstance(step, str):
return step
elif isinstance(step, int):
return "Iteration: {} ".format(step)
elif len(step) > 0:
return "Iteration: {} ".format(step[0])
else:
return ""
def __init__(self, log_path="bert_dllog.json"):
self.logger = Logger([
StdOutBackend(Verbosity.DEFAULT, step_format=self.format_step),
JSONStreamBackend(Verbosity.VERBOSE, log_path),
])
self.logger.metadata("mlm_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("nsp_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("avg_loss_step", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("total_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("f1", {"unit": None, "format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("precision", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("recall", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("mcc", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("exact_match", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata(
"throughput_train",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "TRAIN"},
)
self.logger.metadata(
"throughput_inf",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "VAL"},
)
self.logger.metadata(
"throughput_val",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "VAL"},
)
| [
"[email protected]"
] | |
6bec030a51b5bb4b0f123d9777dc394b085cf5e0 | 9eaa2c64a777bd24a3cccd0230da5f81231ef612 | /study/1905/month01/code/Stage5/day16/demo06_canny.py | 8cecd5c5324a39778bbcead274373be63fe735f3 | [
"MIT"
] | permissive | Dython-sky/AID1908 | 4528932f2ca66b844d8a3fcab5ed8bf84d20eb0c | 46cd54a7b36b5f009974f2bbb7005a4ad440ca1a | refs/heads/master | 2022-04-14T12:23:30.426270 | 2020-04-01T18:05:19 | 2020-04-01T18:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | """
demo06_canny.py 边缘识别
"""
import cv2 as cv
original = cv.imread('../ml_data/chair.jpg',cv.IMREAD_GRAYSCALE)
print(original)
cv.imshow('original',original)
# 索贝尔边缘识别
sobel = cv.Sobel(original,cv.CV_64F,0,1,ksize=5)
cv.imshow('sobel',sobel)
# 拉普斯边缘识别
laplacian = cv.Laplacian(original,cv.CV_64F)
cv.imshow('laplacian',laplacian)
# Canny边缘识别
canny = cv.Canny(original,50,200)
cv.imshow('canny',canny)
cv.waitKey() | [
"[email protected]"
] | |
81eb6216326223d83778b2d3bd64fbec29228251 | 73758dde83d1a1823c103e1a4ba71e7c95168f71 | /nsd2002/py02/day03/game_role.py | 65eea729683ff4a6c379867472ab679b07dec8fa | [] | no_license | tonggh220/md_5_nsd_notes | 07ffdee7c23963a7a461f2a2340143b0e97bd9e1 | a58a021ad4c7fbdf7df327424dc518f4044c5116 | refs/heads/master | 2023-07-02T01:34:38.798929 | 2021-05-12T08:48:40 | 2021-05-12T08:48:40 | 393,885,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | class Role:
def __init__(self, name, weapon):
# 构造器方法,实例化时自动调用,注意,self不是关键字,可以是任何自定义的变量
# 绑定在实例身上的属性,在类中任意位置可见可用
self.name = name
self.weapon = weapon
def show_me(self):
# 绑定在实例身上的属性,在类中任意位置可见可用
print('我是%s,我擅用%s' % (self.name, self.weapon))
def speak(self, words):
# 没有绑定在实例身上的变量,只是局部变量,只能用在函数中
hh = 'Hahaha'
print(hh)
print(words)
if __name__ == '__main__':
# 实例本身将会自动作为第一个参数传递,本例中是lb
lb = Role('吕布', '方天画戟') # 实例化,创建具体的对象
print(lb.name, lb.weapon)
lb.show_me()
lb.speak('马中赤兔,人中吕布')
| [
"[email protected]"
] | |
3d87924ec7d7fd9fcc0bcf9142588b70d3044ea6 | 04e2a63c2a393ec3782a482b1734b6462c885d5d | /univelcity/open_file.py | a5d41c60faaaf3883d1b9e76f60d5a9ad4ae687c | [] | no_license | AzeezBello/_python | c1d671efbca2ed2ca7d65513efd2c55b496ddad7 | 266bc5aed9bfb93ea93b07712b48406331a9a327 | refs/heads/master | 2020-05-17T18:09:49.133120 | 2019-05-16T07:08:50 | 2019-05-16T07:08:50 | 183,876,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,299 | py | # file = open("death_causes.csv", "r")
# index = 0
# for line in file:
# index += 1
# print(line.split(","))
# if index == 3:
# break
# # Year,Cause Name,Cause Name,State,Deaths,Age-adjusted Death Rate
# file = open("death_causes.csv", "r")
# deaths = 0
# count = 0
# for line in file:
# if count == 0:
# pass
# else:
# raw = line.split(",")
# print(raw)
# if raw[0] == "2014":
# deaths += int(raw[4])
# count += 1
# print(deaths/365)
# Year,Cause Name,Cause Name,State,Deaths,Age-adjusted Death Rate
# with open("twist.txt", "r") as file:
# for line in file:
# print(line)
# file.close()
import pymysql.cursors
class Mortality:
def __init__(self, year, cause_name_full, cause_name, state, deaths, age_adjusted_death_rate):
self.year = (year)
self.cause_name_full = cause_name_full
self.cause_name = cause_name
self.state = state
self.deaths = (deaths)
self.age_adjusted_death_rate = age_adjusted_death_rate[:-1]
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='db',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
def create_table(name):
with connection.cursor() as cursor:
# Create a new record
try:
sql = f"""CREATE TABLE {name}
(id int NOT NULL PRIMARY KEY AUTO_INCREMENT,
year INT(4),
cause_name_full TEXT,
cause_name TEXT,
state VARCHAR(50),
deaths VARCHAR(50),
age_adjusted_death_rate VARCHAR(50))"""
cursor.execute(sql)
connection.commit()
except:
# connection is not autocommit by default. So you must commit to save
# your changes.
print('Table Exists')
def open_file():
file = open("death_causes.csv", "r")
count = 0
for line in file:
if count == 0:
pass
else:
raw = line.split(",")
# print(raw)
new_mortality_object = Mortality( year = raw[0], cause_name_full = raw[1], cause_name= raw[2], state = raw[3], deaths = raw[4], age_adjusted_death_rate = raw[5])
post_to_db(new_mortality_object)
count += 1
def post_to_db(mortality_object):
with connection.cursor() as cursor:
# Create a new record
sql = f"""insert into mortality_rate (year, cause_name_full, cause_name, state, deaths, age_adjusted_death_rate)
values ("{mortality_object.year}", "{mortality_object.cause_name_full}", "{mortality_object.cause_name}", "{mortality_object.state}", "{mortality_object.deaths}", "{mortality_object.age_adjusted_death_rate}")"""
# print(sql)
cursor.execute(sql)
connection.commit()
#CREATE TABLE IN DATABASE
create_table("mortality_rate")
#THEN PUSH FILES INTO TABLE
open_file() | [
"[email protected]"
] | |
0332c4d5e620cd87f9b70d77e4f57a67c07e72a3 | 3b89c0a97ac6b58b6923a213bc8471e11ad4fe69 | /python/CodingExercises/MoveSpacesFrontString.py | af9641cf57932b4daa0e84d62d196bc3aa65de22 | [] | no_license | ksayee/programming_assignments | b187adca502ecf7ff7b51dc849d5d79ceb90d4a6 | 13bc1c44e1eef17fc36724f20b060c3339c280ea | refs/heads/master | 2021-06-30T07:19:34.192277 | 2021-06-23T05:11:32 | 2021-06-23T05:11:32 | 50,700,556 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,116 | py | '''
Move spaces to front of string in single traversal
Given a string that has set of words and spaces, write a program to move all spaces to front of string, by traversing the string only once.
Examples:
Input : str = "geeks for geeks"
Output : ste = " geeksforgeeks"
Input : str = "move these spaces to beginning"
Output : str = " movethesespacestobeginning"
There were four space characters in input,
all of them should be shifted in front.
'''
def MoveSpacesFrontString(str1):
output_list=[]
lst=str1.split(' ')
prev_word=''
for word in lst:
if len(word)==0:
output_list.append(' ')
else:
if len(prev_word)>0:
output_list.append(' ')
prev_word=word
output_list.append(''.join(lst))
return ''.join(output_list)
def main():
str1="geeks for geeks"
print(MoveSpacesFrontString(str1))
str1 = "move these spaces to beginning"
print(MoveSpacesFrontString(str1))
str1 = "move these spaces to beginning"
print(MoveSpacesFrontString(str1))
if __name__=='__main__':
main() | [
"[email protected]"
] | |
b7aade2484b165d22de966e987fd39bcf4cf37f0 | 286df6528096b6393b61d3ecb3b7002cb9a7b983 | /python/ql/test/library-tests/frameworks/aiohttp/response_test.py | 1988f4435604cade3227c27d40ba902f6661df59 | [
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"LicenseRef-scancode-free-unknown",
"Python-2.0",
"MIT"
] | permissive | Inncee81/codeql | ed620df0ae7b706943eccd92af37e037f540f6a4 | 38a38fd2c145628472d14c9e9d6ca812fd525793 | refs/heads/main | 2023-06-13T01:23:30.086459 | 2021-06-22T10:59:44 | 2021-06-22T10:59:44 | 379,254,229 | 1 | 0 | MIT | 2021-06-22T12:02:02 | 2021-06-22T12:02:01 | null | UTF-8 | Python | false | false | 3,173 | py | from aiohttp import web
routes = web.RouteTableDef()
@routes.get("/raw_text") # $ routeSetup="/raw_text"
async def raw_text(request): # $ requestHandler
return web.Response(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/raw_body") # $ routeSetup="/raw_body"
async def raw_body(request): # $ requestHandler
return web.Response(body=b"foo") # $ HttpResponse mimetype=application/octet-stream responseBody=b"foo"
@routes.get("/html_text") # $ routeSetup="/html_text"
async def html_text(request): # $ requestHandler
return web.Response(text="foo", content_type="text/html") # $ HttpResponse mimetype=text/html responseBody="foo"
@routes.get("/html_body") # $ routeSetup="/html_body"
async def html_body(request): # $ requestHandler
return web.Response(body=b"foo", content_type="text/html") # $ HttpResponse mimetype=text/html responseBody=b"foo"
@routes.get("/html_body_set_later") # $ routeSetup="/html_body_set_later"
async def html_body_set_later(request): # $ requestHandler
resp = web.Response(body=b"foo") # $ HttpResponse mimetype=application/octet-stream responseBody=b"foo"
resp.content_type = "text/html" # $ MISSING: mimetype=text/html
return resp
# Each HTTP status code has an exception
# see https://docs.aiohttp.org/en/stable/web_quickstart.html#exceptions
@routes.get("/through_200_exception") # $ routeSetup="/through_200_exception"
async def through_200_exception(request): # $ requestHandler
raise web.HTTPOk(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/through_200_exception_html") # $ routeSetup="/through_200_exception_html"
async def through_200_exception(request): # $ requestHandler
exception = web.HTTPOk(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
exception.content_type = "text/html" # $ MISSING: mimetype=text/html
raise exception
@routes.get("/through_404_exception") # $ routeSetup="/through_404_exception"
async def through_404_exception(request): # $ requestHandler
raise web.HTTPNotFound(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/redirect_301") # $ routeSetup="/redirect_301"
async def redirect_301(request): # $ requestHandler
if not "kwarg" in request.url.query:
raise web.HTTPMovedPermanently("/login") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/login"
else:
raise web.HTTPMovedPermanently(location="/logout") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/logout"
@routes.get("/redirect_302") # $ routeSetup="/redirect_302"
async def redirect_302(request): # $ requestHandler
if not "kwarg" in request.url.query:
raise web.HTTPFound("/login") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/login"
else:
raise web.HTTPFound(location="/logout") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/logout"
if __name__ == "__main__":
app = web.Application()
app.add_routes(routes)
web.run_app(app)
| [
"[email protected]"
] | |
8400f0f8f16237cd362e0cc37f3436e13b3d755f | 82f6a6c50a1fef2d7522a43cc4f60e5ff80b37a8 | /solutions/Longest Word in Dictionary through Deleting/solution.py | 267c70a98bb61b70fe13d5f17a5e27cb662c0fae | [
"MIT"
] | permissive | nilax97/leetcode-solutions | ca0f9545ce70975617738f053e0935fac00b04d4 | d3c12f2b289662d199510e0431e177bbf3cda121 | refs/heads/master | 2023-05-14T02:21:48.893716 | 2021-06-08T13:16:53 | 2021-06-08T13:16:53 | 374,466,870 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 617 | py | class Solution:
def findLongestWord(self, s: str, d: List[str]) -> str:
s = '_' + s
n, nxt = len(s), [{} for _ in s]
for i, c in enumerate(s):
for j in range(i-1, -1, -1):
nxt[j][c] = i
if s[j] == c: break
def find(word):
i = 0
for c in word:
i = nxt[i].get(c)
if i is None: return False
return True
res = ""
for word in d:
if find(word) and (not res or (-len(word), word) < (-len(res), res)):
res = word
return res
| [
"[email protected]"
] | |
e9a1fed6a23067a05df9d37a4204e81098c48194 | b9bf3b34b59ec8e566b7ad6e58b7d0429370d6bd | /gunicorn_conf.py | 3b6bec2f43185136d7017ecf5ea3fe59f9f34931 | [] | no_license | dutradda/chunli | 7eea614b6c6c3c0738bec2f15d8224430e450a82 | 54e4385a34f805a2c13acdf85aec98d63c4eaff7 | refs/heads/master | 2021-08-16T09:22:45.388575 | 2020-09-03T12:55:33 | 2020-09-03T12:55:33 | 217,397,141 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | import os
import redis
def worker_exit(server, worker):
r = redis.Redis.from_url(os.environ.get('REDIS_TARGET', 'redis://'))
r.publish('chunli:distributed', 'stop')
def child_exit(server, worker):
r = redis.Redis.from_url(os.environ.get('REDIS_TARGET', 'redis://'))
r.publish('chunli:distributed', 'stop')
| [
"[email protected]"
] | |
8fe248d9822eea62924d8b53b9b960bb32bfe359 | 6541487fb7df24610e5c61aa30d4a39b9117b427 | /tests/test_math_helpers.py | 6cf87e9dc244968d69684b98f2d4a3ab0f4b7c6f | [
"MIT"
] | permissive | theY4Kman/birdfeeder | 0e1f90a96b1607c0675ea3ab70a00fc99b97e7ac | 25503a138fe01589fb28317ae0f3e281d6ce1961 | refs/heads/master | 2023-04-21T11:23:07.699322 | 2021-03-24T08:36:13 | 2021-03-24T08:37:40 | 368,974,412 | 0 | 0 | MIT | 2021-05-19T19:03:43 | 2021-05-19T19:03:43 | null | UTF-8 | Python | false | false | 510 | py | from decimal import Decimal
from birdfeeder.math_helpers import safe_div, safe_mean
def test_safe_div_basic():
assert safe_div(10, 2) == 5.0
def test_safe_div_basic_decimal():
assert safe_div(Decimal(10), Decimal(2)) == Decimal(5)
def test_safe_div_zero_div():
assert safe_div(10, 0) == 0.0
def test_safe_mean_basic():
assert safe_mean([2, 4]) == 3.0
def test_safe_mean_empty():
assert safe_mean([]) == 0.0
def test_safe_mean_zero_values():
assert safe_mean([0, 0]) == 0.0
| [
"[email protected]"
] | |
b9b8b6190fea295a20706bf72e02f8bd6b16d816 | 0a15660807aee7d2fccbef1a3e633cabd1deb972 | /subway/models.py | 6dd5ae55f71cff97c7052df438f87e6a8c662e4e | [] | no_license | chirs/hs | 7860e77230cd2577cac79539039f0e2a7590ef35 | f1985e11a73b29fa8bf4fd1725c529ec8e61cb5b | refs/heads/master | 2021-01-21T10:42:15.789926 | 2017-02-28T20:12:31 | 2017-02-28T20:12:31 | 83,474,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,437 | py | from sqlalchemy import Table, Column, Integer, String, Boolean, DateTime, MetaData, ForeignKey, Text, Float
from sqlalchemy.orm import mapper
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///:memory:', echo=True)
Base = declarative_base()
class Station(Base):
"""
A subway station, like "Atlantic - Pacific"
"""
__tablename__ = 'stations'
id = Column(Integer, primary_key=True)
sid = Column(String)
name = Column(String)
lat = Column(Float)
lng = Column(Float)
def __init__(self, sid, name, lat, lng):
self.sid = sid
self.name = name
self.lat = lat
self.lng = lng
class SubStation(Base):
"""
A subway substation, like 116N [116th Street North]
"""
__tablename__ = 'substations'
id = Column(Integer, primary_key=True)
pid = Column(Integer, ForeignKey('stations.id'))
name = Column(String)
class Route(Base):
"""
A subway route like 1 or D.
"""
__tablename__ = 'routes'
id = Column(Integer, primary_key=True)
rid = Column(String)
name = Column(String)
description = Column(String)
color = Column(String)
def __init__(self, rid, name, description, color):
self.rid = rid
self.name = name
self.description = description
self.color = color
Base.metadata.create_all(engine)
| [
"[email protected]"
] | |
dd15eca3d521afbdc79bca58fa83066ccbc92404 | 3337e9150a743e0df2898528dd1e4dfac9730b25 | /artemis/general/mymath.py | e60d306dc6a969ee2e04e2cb9db36f9d9ba7edad | [] | no_license | ml-lab/artemis | f3353cb462b06d64e1007010db94667b4703c90e | b4f5f627f1798aff90b845d70fd582142a9f76c8 | refs/heads/master | 2021-01-22T06:49:41.346341 | 2017-09-01T15:31:13 | 2017-09-01T15:31:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,159 | py | import logging
from artemis.general.should_be_builtins import memoize, bad_value
import numpy as np
from scipy.stats import norm, mode as sp_mode
try:
from scipy import weave
except ImportError:
logging.warn("Could not import scipy.weave. That's ok, ignore this unless you need it.")
__author__ = 'peter'
# Note - this module used to be called math, but it somehow results in a numpy import error
# due to some kind of name conflict with another module called math.
sigm = lambda x: 1/(1+np.exp(-x))
def cummean(x, axis = None):
"""
Cumulative mean along axis
:param x: An array
:param axis: The axis
:return: An array of the same shape
"""
if axis is None:
assert isinstance(x, list) or x.ndim == 1, 'You must specify axis for a multi-dimensional array'
axis = 0
elif axis < 0:
axis = x.ndim+axis
x = np.array(x)
normalizer = np.arange(1, x.shape[axis]+1).astype(float)[(slice(None), )+(None, )*(x.ndim-axis-1)]
return np.cumsum(x, axis)/normalizer
def cumvar(x, axis = None, sample = True):
"""
:return: Cumulative variance along axis
"""
if axis is None:
assert isinstance(x, list) or x.ndim == 1, 'You must specify axis for a multi-dimensional array'
axis = 0
if not isinstance(x, np.ndarray):
x = np.array(x)
ex_2 = cummean(x, axis=axis)**2
e_x2 = cummean(x**2, axis=axis)
var = e_x2-ex_2
if sample and x.shape[axis] > 1:
var *= x.shape[axis]/float(x.shape[axis]-1)
return var
@memoize
def binary_permutations(n_bits):
"""
Given some number of bits, return a shape (2**n_bits, n_bits) boolean array containing every permoutation
of those bits as a row.
:param n_bits: An integer number of bits
:return: A shape (2**n_bits, n_bits) boolean array containing every permoutation
of those bits as a row.
"""
return np.right_shift(np.arange(2**n_bits)[:, None], np.arange(n_bits-1, -1, -1)[None, :]) & 1
def softmax(x, axis = None):
"""
The softmax function takes an ndarray, and returns an ndarray of the same size,
with the softmax function applied along the given axis. It should always be the
case that np.allclose(np.sum(softmax(x, axis), axis)==1)
"""
if axis is None:
assert x.ndim==1, "You need to specify the axis for softmax if your data is more thn 1-D"
axis = 0
x = x - np.max(x, axis=axis, keepdims=True) # For numerical stability - has no effect mathematically
expx = np.exp(x)
return expx/np.sum(expx, axis=axis, keepdims=True)
def expected_sigm_of_norm(mean, std, method = 'probit'):
"""
Approximate the expected value of the sigmoid of a normal distribution.
Thanks go to this guy:
http://math.stackexchange.com/questions/207861/expected-value-of-applying-the-sigmoid-function-to-a-normal-distribution
:param mean: Mean of the normal distribution
:param std: Standard Deviation of the normal distribution
:return: An approximation to Expectation(sigm(N(mu, sigma**2)))
"""
if method == 'maclauren-2':
eu = np.exp(-mean)
approx_exp = 1/(eu+1) + 0.5*(eu-1)*eu/((eu+1)**3) * std**2
return np.minimum(np.maximum(approx_exp, 0), 1)
elif method == 'maclauren-3':
eu = np.exp(-mean)
approx_exp = 1/(eu+1) + \
0.5*(eu-1)*eu/((eu+1)**3) * std**2 + \
(eu**3-11*eu**2+57*eu-1)/((8*(eu+1))**5) * std**4
return np.minimum(np.maximum(approx_exp, 0), 1)
elif method == 'probit':
return norm.cdf(mean/np.sqrt(2.892 + std**2))
else:
raise Exception('Method "%s" not known' % method)
l1_error = lambda x1, x2: np.mean(np.abs(x1-x2), axis = -1)
def normalize(x, axis=None, degree = 2, avoid_nans = False):
"""
Normalize array x.
:param x: An array
:param axis: Which axis to normalize along
:param degree: Degree of normalization (1 for L1-norm, 2 for L2-norm, etc)
:param avoid_nans: If, along an axis, there is a norm of zero, then normalize this to a uniform vector (instead of nans).
:return: An array the same shape as x, normalized along the given axis
"""
assert degree in (1, 2), "Give me a reason and I'll give you more degrees"
if degree == 1:
z = np.sum(np.abs(x), axis = axis, keepdims=True)
else:
z = np.sum(x**degree, axis = axis, keepdims=True)**(1./degree)
normed = x/z
if avoid_nans:
uniform_vector_value = (1./x.shape[axis])**(1./degree)
normed[np.isnan(normed)] = uniform_vector_value
return normed
def mode(x, axis = None, keepdims = False):
mode_x, _ = sp_mode(x, axis = axis)
if not keepdims:
mode_x = np.take(mode_x, 0, axis = axis)
return mode_x
def cummode(x, weights = None, axis = 1):
"""
Cumulative mode along an axis. Ties give priority to the first value to achieve the
given count.
"""
assert x.ndim == 2 and axis == 1, 'Only implemented for a special case!'
all_values, element_ids = np.unique(x, return_inverse=True)
n_unique = len(all_values)
element_ids = element_ids.reshape(x.shape)
result = np.zeros(x.shape, dtype = int)
weighted = weights is not None
if weighted:
assert x.shape == weights.shape
counts = np.zeros(n_unique, dtype = float if weighted else int)
code = """
bool weighted = %s;
int n_samples = Nelement_ids[0];
int n_events = Nelement_ids[1];
for (int i=0; i<n_samples; i++){
float maxcount = 0;
int maxel = -1;
for (int k=0; k<n_unique; k++)
counts[k] = 0;
for (int j=0; j<n_events; j++){
int ix = i*n_events+j;
int k = element_ids[ix];
counts[k] += weighted ? weights[ix] : 1;
if (counts[k] > maxcount){
maxcount = counts[k];
maxel = k;
}
result[ix]=maxel;
}
}
""" % ('true' if weighted else 'false')
weave.inline(code, ['element_ids', 'result', 'n_unique', 'counts', 'weights'], compiler = 'gcc')
mode_values = all_values[result]
return mode_values
def angle_between(a, b, axis=None, in_degrees = False):
"""
Return the angle between two vectors a and b, in radians. Raise an exception if one is a zero vector
:param a: A vector
:param b: A vector the same size as a
:return: The angle between these vectors, in radians.
Credit to Pace: http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python
"""
cos_dist = cosine_distance(a, b, axis=axis)
angle = np.arccos(cos_dist)
if in_degrees:
angle = angle * 180/np.pi
return angle
def cosine_distance(a, b, axis=None):
"""
Return the cosine distance between two vectors a and b. Raise an exception if one is a zero vector
:param a: An array
:param b: Another array of the same shape
:return: The cosine distance between a and b, reduced along the given axis.
Credit to Pace: http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python
"""
a = np.array(a) if not isinstance(a, np.ndarray) else a
b = np.array(b) if not isinstance(b, np.ndarray) else b
if not a.dtype==float:
a=a.astype(float)
if not b.dtype==float:
b=b.astype(float)
if axis is None:
a = a.ravel()
b = b.ravel()
axis = 0
assert a.shape[-1]==b.shape[-1]
cosine_distance = (a*b).sum(axis=axis)/np.sqrt((a**2).sum(axis=axis) * (b**2).sum(axis=axis))
# For numerical resons, we might get values outside [-1, 1] here, so we truncate:
cosine_distance = np.minimum(cosine_distance, 1)
cosine_distance = np.maximum(cosine_distance, -1)
return cosine_distance
def degrees_between(a, b):
return angle_between(a, b, in_degrees=True)
def magnitude_ratio(a, b):
"""
Return the ratio of the L2-magnitudes of each vector
:param a: A vector
:param b: Another vector of the same size
:return: The ratio |a_mag
"""
assert a.ndim == 1 and a.shape==b.shape
a_mag = np.sqrt(np.sum(a**2))
b_mag = np.sqrt(np.sum(b**2))
d_magnitude = a_mag/b_mag
return d_magnitude
def is_parallel(a, b, angular_tolerance = 1e-7):
"""
Test whether two vectors are parallel to within a given tolerance.
Throws an exception for zero-vectors.
:param a: A vector
:param b: A vector the same size as a
:param angular_tolerance: The tolerance, in radians.
:return: A boolean, indicating that the vectors are parallel to within the specified tolerance.
"""
assert 0 <= angular_tolerance <= 2*np.pi, "It doesn't make sense to specity an angular tolerance outside of [0, 2*pi]. Why are you doing this?"
angle = angle_between(a, b)
return angle < angular_tolerance
def align_curves(xs, ys, n_bins='median', xrange = ('min', 'max'), spacing = 'lin'):
"""
Given multiple curves with different x-coordinates, interpolate so that each has the same x points.
:param xs: A length-N list of sorted vectors containing the x-coordinates of each curve
:param ys: A length-N list of vectors containing the corresponding y-coordinates
:param n_bins: Number of points to make along new x-axis. 'median' to use the median number of points in the curves.
:param xrange: 2-tuple indicating range of x-axis to span. 'min' indicates "minimum across curves", As with 'max'.
:param spacing: Either 'lin' or 'log', depenting on whether you want your interpolation points spaced linearly or
logarithmically.
:return: (new_xs, new_ys).
new_xs is a (n_bins, ) curve indicating the new x-locations.
new_ys is a (N, n_bins)
"""
assert spacing in ('lin', 'log')
assert len(xs)==len(ys)
assert all(len(x)==len(y) for x, y in zip(xs, ys))
start, stop = xrange
if start == 'min':
start = np.min([x[0] for x in xs if len(x)>0])
if stop == 'max':
stop = np.max([x[-1] for x in xs if len(x)>0])
if n_bins == 'median':
n_bins = int(np.round(np.median([len(x) for x in xs])))
new_x = np.linspace(start, stop, n_bins) if spacing=='lin' else np.logspace(np.log10(start), np.log10(stop), n_bins)
new_ys = np.zeros((len(xs), n_bins)) + np.nan
for x, y, ny in zip(xs, ys, new_ys):
if len(x)>=2:
ny[:] = np.interp(x=new_x, xp=x, fp=y, left=np.nan, right=np.nan)
return new_x, new_ys
def sqrtspace(a, b, n_points):
"""
:return: Distribute n_points quadratically from point a to point b, inclusive
"""
return np.linspace(0, 1, n_points)**2*(b-a)+a
def fixed_diff(x, axis=-1, initial_value = 0.):
"""
Modification of numpy.diff where the first element is compared to the initial value.
The resulting array has the same shape as x.
Note that this inverts np.cumsum so that np.cumsum(fixed_diff(x)) == x (except for numerical errors)
:param x: An array
:param axis: Axis along which to diff
:param initial_value: The initial value agains which to diff the first element along the axis.
:return: An array of the same shape, representing the difference in x along the axis.
"""
x = np.array(x, copy=False)
if axis<0:
axis = x.ndim+axis
result = np.empty_like(x)
initial_indices = (slice(None), )*axis
result[initial_indices+(slice(1, None), )] = np.diff(x, axis=axis)
if initial_value == 'first':
result[initial_indices+(0, )] = 0
else:
result[initial_indices+(0, )] = x[initial_indices+(0, )]-initial_value
return result
def decaying_cumsum(x, memory, axis=-1):
if axis<0:
axis = x.ndim+axis
assert 0 <= memory < 1
result = np.empty_like(x)
leading_indices = (slice(None), )*axis
one_minus_mem = 1-memory
result[leading_indices+(0, )] = one_minus_mem*x[leading_indices+(0, )]
for i in xrange(1, x.shape[axis]):
result[leading_indices+(i, )] = memory*result[leading_indices+(i-1, )] + one_minus_mem*x[leading_indices+(i, )]
if np.max(np.abs(result)>1e9):
print 'sdfdsf: {}'.format(np.max(np.abs(x)))
return result
def point_space(start, stop, n_points, spacing):
if spacing=='lin':
values = np.linspace(start, stop, n_points)
elif spacing=='sqrt':
values = sqrtspace(start, stop, n_points)
elif spacing=='log':
values = np.logspace(np.log10(start), np.log10(stop), n_points)
else:
raise NotImplementedError(spacing)
return values
def geosum(rate, t_end, t_start=0):
"""
Geometric sum of a series from t_start to t_end
e.g. geosum(0.5, t_end=4, t_start=2) = 0.5**2 + 0.5**3 + 0.5**4 = 0.375
"""
return np.where(rate==1, np.array(t_end-t_start+1, copy=False).astype(float), np.array(rate**(t_end+1)-rate**t_start)/(rate-1))
def selective_sum(x, ixs):
"""
:param x: An nd array
:param ixs: A tuple of length x.ndim indexing each of the dimensions.
:return: A scalar sum of all array elements selected by any of the dimensions.
This is best explained by example:
a = np.array([[ 0, 1, 2, 3],
... [ 4, 5, 6, 7],
... [ 8, 9, 10, 11],
... [12, 13, 14, 15]])
If we want to add all elements rows 1, and 2, as well as the column, then we go:
s = selective_sum(a, [(1,3), 2])
And we can verify that:
s == 4+5+6+7 + 12+13+14+15 + 2+10 == 88
If you don't want to select coluns
"""
assert x.ndim==len(ixs), 'The dimension of x must match the length of ixs'
al = (slice(None), )
selection_mask = np.zeros(x.shape, dtype='bool')
for i, ix in enumerate(ixs):
selection_mask[al*i+(ix, )+al*(x.ndim-i-1)] = True
return (x*selection_mask).sum()
# Note, we'd like to do this more efficiently, but it gets a little complicated.
# (we have to add the individual indexes, but subtract the double-counted regions, and then subtract the triple-counted
# regions, and so on....)
# return sum(x[al*i+(ix, )+al*(x.ndim-i-1)].sum() for i, ix in enumerate(ixs)) - x[ixs].sum()
def conv_fanout(input_len, kernel_len, conv_mode):
"""
Note: this is horrific and must be simplified.
:param input_len:
:param kernel_len:
:param conv_mode:
:return:
"""
left_pad = kernel_len / 2 if conv_mode == 'same' else 0 if conv_mode == 'valid' else conv_mode if isinstance(conv_mode, int) else bad_value(conv_mode)
right_pad = (kernel_len-1) / 2 if conv_mode == 'same' else 0 if conv_mode == 'valid' else conv_mode if isinstance(conv_mode, int) else bad_value(conv_mode)
full_range = np.arange(left_pad + input_len + right_pad)
max_fanout = np.minimum(kernel_len, np.maximum(input_len-kernel_len+1+2*left_pad, 1))
fanout_over_full_range = np.minimum(max_fanout, np.minimum(full_range+1, full_range[::-1]+1))
fanout = fanout_over_full_range[left_pad:len(full_range)-right_pad]
return fanout
def conv2_fanout_map(input_shape, kernel_shape, conv_mode):
size_y, size_x = input_shape
k_size_y, k_size_x = kernel_shape
y_fanout = conv_fanout(input_len = size_y, kernel_len=k_size_y, conv_mode=conv_mode)
x_fanout = conv_fanout(input_len = size_x, kernel_len=k_size_x, conv_mode=conv_mode)
fanout_map = y_fanout[:, None] * x_fanout
return fanout_map
def levenshtein_distance(s1, s2):
"""
The Levenshtein Distance (a type of edit distance) between strings
Thank you to Salvador Dali here: https://stackoverflow.com/a/32558749/851699
:param s1: A string
:param s2: Another String
:return: An integer distance.
"""
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2+1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1] | [
"[email protected]"
] | |
dc105c937af95e74bf4880b57361a7470c141909 | fb8cbebdf034b2f478943752d5443afc82c6eef5 | /tuirer/venv/lib/python3.6/site-packages/jedi/evaluate/dynamic.py | fe9d28e5d70906257d64b55fcc219bbc2f5d3c6a | [] | no_license | fariasjr/CitiTuirer | f64e0ec93ef088f8140bb0961d2ad4ed3b59448a | deb3f7a9c2d45b8a7f54639037f097b99abdac11 | refs/heads/master | 2020-03-24T05:10:36.261050 | 2018-08-01T20:24:30 | 2018-08-01T20:24:30 | 142,477,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,165 | py | """
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- |Jedi| sees a param
- search for function calls named ``foo``
- execute these calls and check the input.
"""
from jedi import debug, settings
from jedi.evaluate import imports
from jedi.evaluate.arguments import TreeArguments
from jedi.evaluate.base_context import ContextSet
from jedi.evaluate.cache import evaluator_function_cache
from jedi.evaluate.context import ModuleContext, instance
from jedi.evaluate.helpers import is_stdlib_path
from jedi.evaluate.param import create_default_params
from jedi.evaluate.utils import to_list
from jedi.parser_utils import get_parent_scope
from parso.python import tree
MAX_PARAM_SEARCHES = 20
class MergedExecutedParams(object):
"""
Simulates being a parameter while actually just being multiple params.
"""
def __init__(self, executed_params):
self._executed_params = executed_params
def infer(self):
return ContextSet.from_sets(p.infer() for p in self._executed_params)
@debug.increase_indent
def search_params(evaluator, execution_context, funcdef):
"""
A dynamic search for param values. If you try to complete a type:
>>> def func(foo):
... foo
>>> func(1)
>>> func("")
It is not known what the type ``foo`` without analysing the whole code. You
have to look for all calls to ``func`` to find out what ``foo`` possibly
is.
"""
if not settings.dynamic_params:
return create_default_params(execution_context, funcdef)
evaluator.dynamic_params_depth += 1
try:
path = execution_context.get_root_context().py__file__()
if path is not None and is_stdlib_path(path):
# We don't want to search for usages in the stdlib. Usually people
# don't work with it (except if you are a core maintainer, sorry).
# This makes everything slower. Just disable it and run the tests,
# you will see the slowdown, especially in 3.6.
return create_default_params(execution_context, funcdef)
if funcdef.type == 'lambdef':
string_name = _get_lambda_name(funcdef)
if string_name is None:
return create_default_params(execution_context, funcdef)
else:
string_name = funcdef.name.value
debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA')
try:
module_context = execution_context.get_root_context()
function_executions = _search_function_executions(
evaluator,
module_context,
funcdef,
string_name=string_name,
)
if function_executions:
zipped_params = zip(*list(
function_execution.get_params()
for function_execution in function_executions
))
params = [MergedExecutedParams(executed_params) for executed_params in zipped_params]
# Evaluate the ExecutedParams to types.
else:
return create_default_params(execution_context, funcdef)
finally:
debug.dbg('Dynamic param result finished', color='MAGENTA')
return params
finally:
evaluator.dynamic_params_depth -= 1
@evaluator_function_cache(default=None)
@to_list
def _search_function_executions(evaluator, module_context, funcdef, string_name):
"""
Returns a list of param names.
"""
compare_node = funcdef
if string_name == '__init__':
cls = get_parent_scope(funcdef)
if isinstance(cls, tree.Class):
string_name = cls.name.value
compare_node = cls
found_executions = False
i = 0
for for_mod_context in imports.get_modules_containing_name(
evaluator, [module_context], string_name):
if not isinstance(module_context, ModuleContext):
return
for name, trailer in _get_possible_nodes(for_mod_context, string_name):
i += 1
# This is a simple way to stop Jedi's dynamic param recursion
# from going wild: The deeper Jedi's in the recursion, the less
# code should be evaluated.
if i * evaluator.dynamic_params_depth > MAX_PARAM_SEARCHES:
return
random_context = evaluator.create_context(for_mod_context, name)
for function_execution in _check_name_for_execution(
evaluator, random_context, compare_node, name, trailer):
found_executions = True
yield function_execution
# If there are results after processing a module, we're probably
# good to process. This is a speed optimization.
if found_executions:
return
def _get_lambda_name(node):
stmt = node.parent
if stmt.type == 'expr_stmt':
first_operator = next(stmt.yield_operators(), None)
if first_operator == '=':
first = stmt.children[0]
if first.type == 'name':
return first.value
return None
def _get_possible_nodes(module_context, func_string_name):
try:
names = module_context.tree_node.get_used_names()[func_string_name]
except KeyError:
return
for name in names:
bracket = name.get_next_leaf()
trailer = bracket.parent
if trailer.type == 'trailer' and bracket == '(':
yield name, trailer
def _check_name_for_execution(evaluator, context, compare_node, name, trailer):
from jedi.evaluate.context.function import FunctionExecutionContext
def create_func_excs():
arglist = trailer.children[1]
if arglist == ')':
arglist = None
args = TreeArguments(evaluator, context, arglist, trailer)
if value_node.type == 'classdef':
created_instance = instance.TreeInstance(
evaluator,
value.parent_context,
value,
args
)
for execution in created_instance.create_init_executions():
yield execution
else:
yield value.get_function_execution(args)
for value in evaluator.goto_definitions(context, name):
value_node = value.tree_node
if compare_node == value_node:
for func_execution in create_func_excs():
yield func_execution
elif isinstance(value.parent_context, FunctionExecutionContext) and \
compare_node.type == 'funcdef':
# Here we're trying to find decorators by checking the first
# parameter. It's not very generic though. Should find a better
# solution that also applies to nested decorators.
params = value.parent_context.get_params()
if len(params) != 1:
continue
values = params[0].infer()
nodes = [v.tree_node for v in values]
if nodes == [compare_node]:
# Found a decorator.
module_context = context.get_root_context()
execution_context = next(create_func_excs())
for name, trailer in _get_possible_nodes(module_context, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos:
random_context = evaluator.create_context(execution_context, name)
iterator = _check_name_for_execution(
evaluator,
random_context,
compare_node,
name,
trailer
)
for function_execution in iterator:
yield function_execution
| [
"[email protected]"
] | |
1317aafa3a4fd100947fdd513f504d272f19b67c | 867c876541c29775bd5c1548a2ba59f0dc84737d | /MxShop/extra_apps/xadmin/sites.py | 4d151c4b285f85168ebbcd2566c77c2ade989a9c | [] | no_license | flowpig/daily_demos | be9c8aec7c8070e96ee7012b249c2f60e777e248 | b4bc7779c55ca0a02098c6dafe23a8f5af461182 | refs/heads/master | 2023-01-10T21:46:24.059317 | 2019-11-29T06:33:42 | 2019-11-29T06:33:42 | 117,111,372 | 0 | 0 | null | 2022-12-26T20:42:28 | 2018-01-11T14:29:56 | JavaScript | UTF-8 | Python | false | false | 15,072 | py | import sys
from functools import update_wrapper
from future.utils import iteritems
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.utils import six
from django.views.decorators.cache import never_cache
from django.template.engine import Engine
import inspect
if six.PY2 and sys.getdefaultencoding() == 'ascii':
import imp
imp.reload(sys)
sys.setdefaultencoding("utf-8")
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class MergeAdminMetaclass(type):
def __new__(cls, name, bases, attrs):
return type.__new__(cls, str(name), bases, attrs)
class AdminSite(object):
def __init__(self, name='xadmin'):
self.name = name
self.app_name = 'xadmin'
self._registry = {} # model_class class -> admin_class class
self._registry_avs = {} # admin_view_class class -> admin_class class
self._registry_settings = {} # settings name -> admin_class class
self._registry_views = []
# url instance contains (path, admin_view class, name)
self._registry_modelviews = []
# url instance contains (path, admin_view class, name)
self._registry_plugins = {} # view_class class -> plugin_class class
self._admin_view_cache = {}
# self.check_dependencies()
self.model_admins_order = 0
def copy_registry(self):
import copy
return {
'models': copy.copy(self._registry),
'avs': copy.copy(self._registry_avs),
'views': copy.copy(self._registry_views),
'settings': copy.copy(self._registry_settings),
'modelviews': copy.copy(self._registry_modelviews),
'plugins': copy.copy(self._registry_plugins),
}
def restore_registry(self, data):
self._registry = data['models']
self._registry_avs = data['avs']
self._registry_views = data['views']
self._registry_settings = data['settings']
self._registry_modelviews = data['modelviews']
self._registry_plugins = data['plugins']
def register_modelview(self, path, admin_view_class, name):
from xadmin.views.base import BaseAdminView
if issubclass(admin_view_class, BaseAdminView):
self._registry_modelviews.append((path, admin_view_class, name))
else:
raise ImproperlyConfigured(u'The registered view class %s isn\'t subclass of %s' %
(admin_view_class.__name__, BaseAdminView.__name__))
def register_view(self, path, admin_view_class, name):
self._registry_views.append((path, admin_view_class, name))
def register_plugin(self, plugin_class, admin_view_class):
from xadmin.views.base import BaseAdminPlugin
if issubclass(plugin_class, BaseAdminPlugin):
self._registry_plugins.setdefault(
admin_view_class, []).append(plugin_class)
else:
raise ImproperlyConfigured(u'The registered plugin class %s isn\'t subclass of %s' %
(plugin_class.__name__, BaseAdminPlugin.__name__))
def register_settings(self, name, admin_class):
self._registry_settings[name.lower()] = admin_class
def register(self, model_or_iterable, admin_class=object, **options):
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, ModelBase) or issubclass(model_or_iterable, BaseAdminView):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered(
'The model %s is already registered' % model.__name__)
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type(str("%s%sAdmin" % (model._meta.app_label, model._meta.model_name)), (admin_class,), options or {})
admin_class.model = model
admin_class.order = self.model_admins_order
self.model_admins_order += 1
self._registry[model] = admin_class
else:
if model in self._registry_avs:
raise AlreadyRegistered('The admin_view_class %s is already registered' % model.__name__)
if options:
options['__module__'] = __name__
admin_class = type(str(
"%sAdmin" % model.__name__), (admin_class,), options)
# Instantiate the admin class to save in the registry
self._registry_avs[model] = admin_class
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, (ModelBase, BaseAdminView)):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model not in self._registry:
raise NotRegistered(
'The model %s is not registered' % model.__name__)
del self._registry[model]
else:
if model not in self._registry_avs:
raise NotRegistered('The admin_view_class %s is not registered' % model.__name__)
del self._registry_avs[model]
def set_loginview(self, login_view):
self.login_view = login_view
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.contenttypes.models import ContentType
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
default_template_engine = Engine.get_default()
if not ('django.contrib.auth.context_processors.auth' in default_template_engine.context_processors or
'django.core.context_processors.auth' in default_template_engine.context_processors):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request) and getattr(view, 'need_site_permission', True):
return self.create_admin_view(self.login_view)(request, *args, **kwargs)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
return update_wrapper(inner, view)
def _get_merge_attrs(self, option_class, plugin_class):
return dict([(name, getattr(option_class, name)) for name in dir(option_class)
if name[0] != '_' and not callable(getattr(option_class, name)) and hasattr(plugin_class, name)])
def _get_settings_class(self, admin_view_class):
name = admin_view_class.__name__.lower()
if name in self._registry_settings:
return self._registry_settings[name]
elif name.endswith('admin') and name[0:-5] in self._registry_settings:
return self._registry_settings[name[0:-5]]
elif name.endswith('adminview') and name[0:-9] in self._registry_settings:
return self._registry_settings[name[0:-9]]
return None
def _create_plugin(self, option_classes):
def merge_class(plugin_class):
if option_classes:
attrs = {}
bases = [plugin_class]
for oc in option_classes:
attrs.update(self._get_merge_attrs(oc, plugin_class))
meta_class = getattr(oc, plugin_class.__name__, getattr(oc, plugin_class.__name__.replace('Plugin', ''), None))
if meta_class:
bases.insert(0, meta_class)
if attrs:
plugin_class = MergeAdminMetaclass(
'%s%s' % (''.join([oc.__name__ for oc in option_classes]), plugin_class.__name__),
tuple(bases), attrs)
return plugin_class
return merge_class
def get_plugins(self, admin_view_class, *option_classes):
from xadmin.views import BaseAdminView
plugins = []
opts = [oc for oc in option_classes if oc]
for klass in admin_view_class.mro():
if klass == BaseAdminView or issubclass(klass, BaseAdminView):
merge_opts = []
reg_class = self._registry_avs.get(klass)
if reg_class:
merge_opts.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merge_opts.append(settings_class)
merge_opts.extend(opts)
ps = self._registry_plugins.get(klass, [])
plugins.extend(map(self._create_plugin(
merge_opts), ps) if merge_opts else ps)
return plugins
def get_view_class(self, view_class, option_class=None, **opts):
merges = [option_class] if option_class else []
for klass in view_class.mro():
reg_class = self._registry_avs.get(klass)
if reg_class:
merges.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merges.append(settings_class)
merges.append(klass)
new_class_name = ''.join([c.__name__ for c in merges])
if new_class_name not in self._admin_view_cache:
plugins = self.get_plugins(view_class, option_class)
self._admin_view_cache[new_class_name] = MergeAdminMetaclass(
new_class_name, tuple(merges),
dict({'plugin_classes': plugins, 'admin_site': self}, **opts))
return self._admin_view_cache[new_class_name]
def create_admin_view(self, admin_view_class):
return self.get_view_class(admin_view_class).as_view()
def create_model_admin_view(self, admin_view_class, model, option_class):
return self.get_view_class(admin_view_class, option_class).as_view()
def get_urls(self):
from django.urls import include, path, re_path
from xadmin.views.base import BaseAdminView
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n')
]
# Registed admin views
# inspect[isclass]: Only checks if the object is a class. With it lets you create an custom view that
# inherits from multiple views and have more of a metaclass.
urlpatterns += [
re_path(
_path,
wrap(self.create_admin_view(clz_or_func))
if inspect.isclass(clz_or_func) and issubclass(clz_or_func, BaseAdminView)
else include(clz_or_func(self)),
name=name
)
for _path, clz_or_func, name in self._registry_views
]
# Add in each model's views.
for model, admin_class in iteritems(self._registry):
view_urls = [
re_path(
_path,
wrap(self.create_model_admin_view(clz, model, admin_class)),
name=name % (model._meta.app_label, model._meta.model_name)
)
for _path, clz, name in self._registry_modelviews
]
urlpatterns += [
re_path(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(view_urls))
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.name, self.app_name
def i18n_javascript(self, request):
from django.views.i18n import JavaScriptCatalog
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
def register(models, **kwargs):
def _model_admin_wrapper(admin_class):
site.register(models, admin_class)
return _model_admin_wrapper | [
"[email protected]"
] | |
b0da7bdba534730f35505b2301bd30a30bf8b8a2 | 26192962dc2627e7ca5f0e3b249c3fabcf52442c | /Python/AD-HOC/1196 - WERTYU.py | f1d867b9f14a29527c0d7a750ed75bcb36716f79 | [] | no_license | PierreVieira/URI | 77278ccb1724ca206ab2c12afbea1e51fa08ff73 | c1eb211c788d26b5cb9bedf5dda4147a2961fa19 | refs/heads/master | 2023-04-10T07:03:13.954639 | 2023-03-22T00:18:28 | 2023-03-22T00:18:28 | 189,321,748 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | """
Autor: Pierre Vieira
Data da submissão: 02/02/2020 16:48:12
"""
linha = "`1234567890-=QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,.'"
while True:
s = ''
try:
frase = input()
except EOFError:
break
else:
for c in frase:
if c == ' ':
s += c
else:
s += linha[linha.find(c)-1]
print(s)
| [
"[email protected]"
] | |
734bba3ac3df513251e2431b420b08c3a0bb20f7 | c2643fdff3185b659c2c7fa807d8b8d345a90343 | /tests/test_basic.py | 4bea68de088fd5206824e30ac834120108554bc5 | [
"BSD-2-Clause"
] | permissive | auxten/fhost | b39ae209a056b301e737d176f8f12dcafd82cfa2 | 6536c4955e13fd67c939a6fc6cc687d29e976d15 | refs/heads/master | 2021-01-16T00:35:43.304418 | 2012-06-25T10:17:52 | 2012-06-25T10:17:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,837 | py | #
## BEGIN LICENSE BLOCK
#
# Copyright (c) <2012>, Raul Perez <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
## END LICENSE BLOCK
#
import context
import unittest
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
3ce934caaa6e0a49902a84d3e6ce84ac3d1aac37 | 5cb8df4d10cd1a1d77f227ea8e1b311744750d5b | /generate.py | b4ba55cf4e1d1accfe70b88346848e422bbf65cf | [
"CC0-1.0"
] | permissive | YoonGenwu/hearthstonejson | 388d46c5c082cde8389bef1011dded7d46fea7dc | 3d6709f99dc7d0c0b75ccf441cfebec00f48a184 | refs/heads/master | 2021-01-15T11:42:57.006639 | 2016-02-17T01:12:14 | 2016-02-17T01:12:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,707 | py | #!/usr/bin/env python
import os
import json
import sys
from argparse import ArgumentParser
from enum import IntEnum
from hearthstone.dbf import Dbf
from hearthstone.cardxml import load
from hearthstone.enums import CardType, Faction, GameTag, Locale, LOCALIZED_TAGS
MECHANICS_TAGS = [
GameTag.ADJACENT_BUFF,
GameTag.AURA,
GameTag.BATTLECRY,
GameTag.CHARGE,
GameTag.COMBO,
GameTag.DEATHRATTLE,
GameTag.DIVINE_SHIELD,
GameTag.ENRAGED,
GameTag.FORGETFUL,
GameTag.FREEZE,
GameTag.INSPIRE,
GameTag.MORPH,
GameTag.OVERLOAD,
GameTag.POISONOUS,
GameTag.SECRET,
GameTag.SILENCE,
GameTag.STEALTH,
GameTag.SPELLPOWER,
GameTag.TAG_ONE_TURN_EFFECT,
GameTag.TAUNT,
GameTag.TREASURE,
GameTag.WINDFURY,
GameTag.ImmuneToSpellpower,
GameTag.InvisibleDeathrattle,
]
def json_dump(obj, filename, pretty=False):
print("Writing to %r" % (filename))
if pretty:
kwargs = {"sort_keys": True, "indent": "\t", "separators": (",", ": ")}
else:
kwargs = {"separators": (",", ":")}
with open(filename, "w", encoding="utf8") as f:
json.dump(obj, f, ensure_ascii=False, **kwargs)
def show_field(card, k, v):
if k == "cost" and card.type not in (CardType.ENCHANTMENT, CardType.HERO):
return True
if k == "faction" and v == Faction.NEUTRAL:
return False
if k == "attack" and card.type in (CardType.MINION, CardType.WEAPON):
return True
if k == "health" and card.type in (CardType.MINION, CardType.HERO):
return True
if k == "durability" and card.type == CardType.WEAPON:
return True
return bool(v)
def get_mechanics(card):
ret = []
for tag in MECHANICS_TAGS:
value = card.tags.get(tag, 0)
if value:
ret.append(tag.name)
return ret
TAG_NAMES = {
GameTag.CARDNAME: "name",
GameTag.FLAVORTEXT: "flavortext",
GameTag.CARDTEXT_INHAND: "text",
GameTag.CardTextInPlay: "textInPlay",
GameTag.HOW_TO_EARN: "howToEarn",
GameTag.HOW_TO_EARN_GOLDEN: "howToEarnGolden",
GameTag.TARGETING_ARROW_TEXT: "targetingArrowText",
}
def serialize_card(card):
ret = {
"id": card.id,
"name": card.name,
"flavor": card.flavortext,
"text": card.description,
"textInPlay": card.playtext,
"howToEarn": card.how_to_earn,
"howToEarnGolden": card.how_to_earn_golden,
"targetingArrowText": card.targeting_arrow_text,
"artist": card.artist,
"faction": card.faction,
"playerClass": card.card_class,
"race": card.race,
"rarity": card.rarity,
"set": card.card_set,
"type": card.type,
"collectible": card.collectible,
"attack": card.atk,
"cost": card.cost,
"durability": card.durability,
"health": card.health,
}
ret = {k: v for k, v in ret.items() if show_field(card, k, v)}
for k, v in ret.items():
if isinstance(v, IntEnum):
ret[k] = v.name
mechanics = get_mechanics(card)
if mechanics:
ret["mechanics"] = mechanics
if card.entourage:
ret["entourage"] = card.entourage
if card.requirements:
ret["playRequirements"] = {k.name: v for k, v in card.requirements.items()}
if card.craftable:
ret["dust"] = card.crafting_costs + card.disenchant_costs
# if card.choose_cards:
# ret["chooseCards"] = card.choose_cards
return ret
def export_cards_to_file(cards, filename, locale):
ret = []
for card in cards:
card.locale = locale
ret.append(serialize_card(card))
json_dump(ret, filename)
def export_all_locales_cards_to_file(cards, filename):
ret = []
for card in cards:
obj = serialize_card(card)
for tag in LOCALIZED_TAGS:
if tag in TAG_NAMES:
value = card._localized_tags[tag]
if value:
obj[TAG_NAMES[tag]] = value
ret.append(obj)
json_dump(ret, filename)
def write_cardbacks(dbf, filename, locale):
ret = []
for record in dbf.records:
ret.append({
"id": record["ID"],
"note_desc": record["NOTE_DESC"],
"source": record["SOURCE"],
"enabled": record["ENABLED"],
"name": record.get("NAME", {}).get(locale.name, ""),
"prefab_name": record.get("PREFAB_NAME", ""),
"description": record.get("DESCRIPTION", {}).get(locale.name, ""),
"source_description": record.get("SOURCE_DESCRIPTION", {}).get(locale.name, ""),
})
json_dump(ret, filename)
def main():
parser = ArgumentParser()
parser.add_argument(
"-o", "--output-dir",
type=str,
dest="output_dir",
default="out",
help="Output directory"
)
parser.add_argument(
"-i", "--input-dir",
type=str,
dest="input_dir",
default="hs-data",
help="Input hs-data directory"
)
args = parser.parse_args(sys.argv[1:])
db, xml = load(os.path.join(args.input_dir, "CardDefs.xml"))
dbf_path = os.path.join(args.input_dir, "DBF", "CARD_BACK.xml")
if not os.path.exists(dbf_path):
print("Skipping card back generation (%s does not exist)" % (dbf_path))
dbf = None
else:
dbf = Dbf.load(dbf_path)
cards = db.values()
collectible_cards = [card for card in cards if card.collectible]
for locale in Locale:
if locale.unused:
continue
basedir = os.path.join(args.output_dir, locale.name)
if not os.path.exists(basedir):
os.makedirs(basedir)
filename = os.path.join(basedir, "cards.json")
export_cards_to_file(cards, filename, locale.name)
filename = os.path.join(basedir, "cards.collectible.json")
export_cards_to_file(collectible_cards, filename, locale.name)
if dbf is not None:
filename = os.path.join(basedir, "cardbacks.json")
write_cardbacks(dbf, filename, locale)
# Generate merged locales
basedir = os.path.join(args.output_dir, "all")
if not os.path.exists(basedir):
os.makedirs(basedir)
filename = os.path.join(basedir, "cards.json")
export_all_locales_cards_to_file(cards, filename)
filename = os.path.join(basedir, "cards.collectible.json")
export_all_locales_cards_to_file(collectible_cards, filename)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
1e1c3159a79488453e4810b9362f7850f72e9c90 | f68eda51246c95597def569224f3b56d4c3700e7 | /top/api/rest/SellercenterUserPermissionsGetRequest.py | a3f561db414e9ebc103b8c2d04ac8c7b445babb9 | [
"MIT",
"BSD-3-Clause"
] | permissive | stoensin/taobao-openapi | 47de8fb29ae2d8ce47d4fce07c0ccaeaee1ef91f | 202a9df2085229838541713bd24433a90d07c7fc | refs/heads/main | 2023-07-17T02:17:51.527455 | 2021-08-25T15:08:49 | 2021-08-25T15:08:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | '''
Created by auto_sdk on 2018.07.25
'''
from top.api.base import RestApi
class SellercenterUserPermissionsGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.nick = None
def getapiname(self):
return 'taobao.sellercenter.user.permissions.get'
| [
"[email protected]"
] | |
1daefdaaf3cdc9dbbd4d888acd5c05d94d6285dd | 85c337f0364f1452c068b7e93421b3e24af85358 | /MzManage/manage.py | 362fb7bb3d7af3d8d0dfab2d09b3c4fb6b0b78a7 | [] | no_license | hornLK/AuthSystemWeb | 9518f23453f910e17c516db26ea3a00fe0d0c806 | c2c03ff2133151889a2ecc205a753a0eb2bbfd91 | refs/heads/master | 2022-12-14T19:18:00.560077 | 2018-04-19T12:39:14 | 2018-04-19T12:39:14 | 130,317,561 | 0 | 0 | null | 2022-12-08T00:59:04 | 2018-04-20T06:17:08 | JavaScript | UTF-8 | Python | false | false | 540 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MzManage.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
347502a5063ca3f7fdbb96e81aadf62f71a48dae | 97e534b26a76bf0d954e166841179979748bcfa2 | /objects/migrations/0046_auto_20180625_0823.py | d6855e81eb891d0362368b4d406690be5fbde2c7 | [] | no_license | mehdi1361/http_server | 3a8bd73ce44307ee2b7761d1211671ca8cb0f3ba | d8a962c55165ef0237bfb26d27d9cfa11a415a5d | refs/heads/develop | 2022-12-11T00:44:11.089407 | 2019-01-20T12:02:48 | 2019-01-20T12:02:48 | 166,656,299 | 0 | 0 | null | 2022-12-07T23:53:22 | 2019-01-20T12:02:05 | HTML | UTF-8 | Python | false | false | 958 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-25 08:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('objects', '0045_auto_20180625_0724'),
]
operations = [
migrations.AddField(
model_name='league',
name='play_off_start_gem_1',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem 1'),
),
migrations.AddField(
model_name='league',
name='play_off_start_gem_2',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem 2'),
),
migrations.AlterField(
model_name='league',
name='play_off_start_gem',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem '),
),
]
| [
"[email protected]"
] | |
3fb2a9f1ae58ad0743c9d750da7afc275cf304bf | 0cc4eb3cb54f8394c127ace62d3108fdb5230c85 | /.spack-env/view/lib/python3.7/test/test_multibytecodec.py | 7384d370794ac8065a971022cc5463d45a9edc2d | [] | no_license | jacobmerson/spack-develop-env | 5b2d76f58c0b64ae97c64f77a3c4d33a770c71c8 | 5fca20ca343b1a76f05fc635c87f94ed25417d94 | refs/heads/master | 2022-07-04T02:22:50.264727 | 2020-05-06T05:13:50 | 2020-05-06T05:13:50 | 261,657,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | /lore/mersoj/spack/spack/opt/spack/linux-rhel7-x86_64/gcc-7.3.0/python-3.7.7-oihhthdoxtgh4krvzpputn5ozwcnq2by/lib/python3.7/test/test_multibytecodec.py | [
"[email protected]"
] | |
914e5a276b7849b267a4458ca7c0afd16ec3f18e | 3f73ce74b6fdfb7966abb71a98f4986edd727c5f | /lib/config.py | 9d9e5784d61265a408685b6fae7a08e8e51d01e0 | [
"MIT"
] | permissive | yuta-komura/amateras | 9c2efd310b18f159b1354864d65f9894ab93737f | cf8cc8fe0b5d8c382090fd1784a3ce96e6953157 | refs/heads/master | 2023-01-21T19:57:18.763894 | 2020-11-25T04:02:28 | 2020-11-25T04:02:28 | 297,432,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | from enum import Enum
PROJECT_DIR = __file__.replace("/lib/config.py", "")
class HistoricalPrice(Enum):
TIME_FRAME = 60 # minutes
CHANNEL_WIDTH = 67
class DATABASE(Enum):
class TRADINGBOT(Enum):
HOST = "*********"
USER = "*********"
PASSWORD = "*********"
DATABASE = "*********"
class Bitflyer(Enum):
class Api(Enum):
KEY = "*********"
SECRET = "*********"
class DirPath(Enum):
PROJECT = PROJECT_DIR
class FilePath(Enum):
WARNING_MP3 = PROJECT_DIR + "/sound/WARNING.mp3"
ERROR_MP3 = PROJECT_DIR + "/sound/ERROR.mp3"
SYSTEM_LOG = PROJECT_DIR + "/log/system.log"
AA = PROJECT_DIR + "/document/AA.txt"
| [
"[email protected]"
] | |
67fc115da063c9287e6ada76e5e4d1b617f534dd | 1676168244eed1c5610b2c1c38f692f89990b112 | /part3-python/Bigdata/ComStat_v0.15.py | 88aa905773aed8169fd5bcd5f19bd774c6f5136a | [] | no_license | gtpgg1013/AI_docs | 351e83f986d66224c82fff2de944753c98336d03 | 43f8eed8b2732314bd40ed65e1d7eb44dd28fc04 | refs/heads/master | 2022-12-09T17:32:02.992554 | 2019-11-20T09:03:56 | 2019-11-20T09:03:56 | 182,927,565 | 1 | 0 | null | 2022-12-08T06:50:23 | 2019-04-23T03:54:56 | Jupyter Notebook | UTF-8 | Python | false | false | 26,630 | py | import matplotlib
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
from tkinter import *
import matplotlib.animation as animation
import psutil
import threading
import time
import math
import datetime
import platform
from tkinter import font
import cv2
import numpy as np
from PIL import Image, ImageFilter, ImageEnhance, ImageOps
LARGE_FONT = ("Verdana", 12)
class SeaofBTCapp(Tk): # Tk를 상속받은 놈
def __init__(self, *args, **kwargs):
Tk.__init__(self, *args, **kwargs) # 상속받았으므로 tk.Tk(부모꺼)도 해줌
Tk.iconbitmap(self) # 걍 아이콘
Tk.title(self, "Comstat v0.15") # 이름 만들어주기
Tk.wm_geometry(self,"1180x590")
Tk.wm_resizable(self, width=False, height=False)
container = Frame(self) # 컨테이너라는 놈 프레임으로 만들어주고
container.pack(side="top", fill="both", expand=True) # 컨테이너 붙이고
container.grid_rowconfigure(0, weight=1) # row 설정
container.grid_columnconfigure(0, weight=1) # col 설정
self.frames = {} # frames라는 딕셔너리 필드 선언
for F in (StartPage, PageOne, PageTwo, PageThree, PageFour):
frame = F(container, self)
# print(frame)
self.frames[F] = frame # 딕셔너리에 저장
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame(StartPage) # 스타트 페이지 보이기
self.cpuflag = False
self.tottime = 0
self.limit = 80.
import smtplib
from email.mime.text import MIMEText
from datetime import datetime
def cpuSendEmail():
timer = threading.Timer(1, cpuSendEmail)
tmptime = psutil.cpu_percent()
if tmptime > self.limit:
# print(tmptime)
self.cpuflag = True
if tmptime > self.limit and self.cpuflag == True:
self.tottime += 1
else:
self.tottime = 0
self.cpuflag = False
if self.tottime > 4:
try:
print("over, send a email to the user")
############ 메일 보내기 ###############
s = smtplib.SMTP('smtp.gmail.com',587)
s.starttls()
s.login('[email protected]','')
msg = MIMEText('CPU 수치가 '+str(self.limit)+"을 초과한 지 "+str(self.tottime)+"초 되었습니다."
"컴퓨터 사용량을 확이핸주세요.")
msg['Subject'] = "현재시각: "+str(datetime.now()) + "CPU 사용량 임계점 초과 경고 메일"
s.sendmail("[email protected]","[email protected]",msg.as_string())
s.quit()
############ 메일 송신 완료 ############
self.cpuflag == False
except:
pass
timer.start()
cpuSendEmail()
def show_frame(self, cont): # 페이지 띄우는 메서드
frame = self.frames[cont] # 프레임 딕셔너리의 몇번째 프레임
frame.tkraise() # 프레임이 여러개일 때 맨 앞으로 가져오는 메서드
class StartPage(Frame): # 첫번째 페이지 # Frame을 상속받은 비슷한 머시기에다가 self를 쓰면 계속 달아주겠다는 말
def __init__(self, parent, controller): # 프레임을 상속받은 놈
Frame.__init__(self, parent)
bigFont = font.Font(self, family='Courier',size=40,weight='bold')
label = Label(self, text="COM_STAT v0.15", font=bigFont, height=1) # 라벨 써주고
label.pack(pady=50, padx=10)
button = Button(self, text="Static Indications",
command=lambda: controller.show_frame(PageOne))
button.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Stats",
command=lambda: controller.show_frame(PageThree))
button3.pack()
button4 = Button(self, text="CPU & RAM Usage",
command=lambda: controller.show_frame(PageFour))
button4.pack()
mName = Label(self, text=platform.machine(), font=LARGE_FONT)
dName = Label(self, text=platform.node(), font=LARGE_FONT)
pName = Label(self, text=platform.platform(), font=LARGE_FONT)
procName = Label(self, text=platform.processor(), font=LARGE_FONT)
cName = Label(self, text=platform.python_compiler(), font=LARGE_FONT)
pVer = Label(self, text="Python version : "+platform.python_branch(), font=LARGE_FONT)
mName.pack(side=BOTTOM,expand=YES)
dName.pack(side=BOTTOM,expand=YES)
pName.pack(side=BOTTOM,expand=YES)
procName.pack(side=BOTTOM,expand=YES)
cName.pack(side=BOTTOM,expand=YES)
pVer.pack(side=BOTTOM,expand=YES)
class PageOne(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="Static Indications", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Status",
command=lambda: controller.show_frame(PageThree))
button3.pack()
# Label
cpuFreq_c = Label(self, text="CPUFreq - current : "+str(psutil.cpu_freq().current))
cpuFreq_mx = Label(self, text="CPUFreq - max : " + str(psutil.cpu_freq().max))
cpuFreq_min = Label(self, text="CPUFreq - min : " + str(psutil.cpu_freq().min))
hard_readCount = Label(self, text="Hard - readcount : " + str(psutil.disk_io_counters().read_count>>20))
hard_writeCount = Label(self, text="Hard - writecount : " + str(psutil.disk_io_counters().write_count>>20))
hard_readBytes = Label(self, text="Hard - readbytes : " + str(psutil.disk_io_counters().read_bytes>>20))
hard_writeBytes = Label(self, text="Hard - writebytes : " + str(psutil.disk_io_counters().write_bytes>>20))
hard_readTime = Label(self, text="Hard - read_time : " + str(psutil.disk_io_counters().read_time))
hard_writeTime = Label(self, text="Hard - write_time : "+str(psutil.disk_io_counters().write_time))
netAddr_fam_MAC = Label(self, text="Network Address - family MAC : " + str(psutil.net_if_addrs()['이더넷'][0][1]))
netAddr_IP = Label(self, text="Network Address - IP : " + str(psutil.net_if_addrs()['이더넷'][1][1]))
netAddr_netmask = Label(self, text="Network Address - netmask : " + str(psutil.net_if_addrs()['이더넷'][1][2]))
memory_total = Label(self, text="Memory - total : "+str(psutil.virtual_memory().total))
memory_available = Label(self, text="Memory - available : "+str(psutil.virtual_memory().available))
dt = datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S")
bootTime = Label(self, text="Boot Time : "+str(dt))
UserName = Label(self, text="User name : "+str(psutil.users()[0].name))
# pack
cpuFreq_c.pack()
cpuFreq_mx.pack()
cpuFreq_min.pack()
hard_readCount.pack()
hard_writeCount.pack()
hard_readBytes.pack()
hard_writeBytes.pack()
hard_writeTime.pack()
hard_writeTime.pack()
netAddr_fam_MAC.pack()
netAddr_IP.pack()
netAddr_netmask.pack()
# netAddr_broadcast.pack()
# netAddr_ptp.pack()
memory_total.pack()
memory_available.pack()
bootTime.pack()
UserName.pack()
class PageTwo(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU times", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU status",
command=lambda: controller.show_frame(PageThree))
button2.pack()
button3 = Button(self, text="CPU & RAM",
command=lambda: controller.show_frame(PageFour))
button3.pack()
canvasforPic = Canvas(self)
cpuTime1 = Label(canvasforPic, text="CPUTime-user: " + str(psutil.cpu_times().user))
cpuTime2 = Label(canvasforPic, text="CPUTime-system: " + str(psutil.cpu_times().system))
cpuTime3 = Label(canvasforPic, text="CPUTime-idle: " + str(psutil.cpu_times().idle))
cpuTime4 = Label(canvasforPic, text="CPUTime-interrupt: " + str(psutil.cpu_times().interrupt))
ylim = 0
tcpuTimeInd = psutil.cpu_times()
tcpuTimeList = [tcpuTimeInd.user, tcpuTimeInd.system, tcpuTimeInd.idle, tcpuTimeInd.interrupt]
for tcpu in tcpuTimeList:
if ylim < tcpu:
ylim = tcpu
ylim *= 0.1
cpuTime1.pack()
cpuTime2.pack()
cpuTime3.pack()
cpuTime4.pack()
canvasforPic.pack(side=RIGHT)
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpuTime1.configure(text="CPUTime-user: " + str(psutil.cpu_times().user))
cpuTime2.configure(text="CPUTime-system: " + str(psutil.cpu_times().system))
cpuTime3.configure(text="CPUTime-idle: " + str(psutil.cpu_times().idle))
cpuTime4.configure(text="CPUTime-interrupt: " + str(psutil.cpu_times().interrupt))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU time")
ax.set_ylim(0,ylim *1.2)
ax.set_xlim(0,5.0)
ax.grid(True)
ax.set_ylabel("CPU time")
ax.set_xlabel("Time")
# Data Placeholders
cpuUser = np.zeros(0)
cpuSys = np.zeros(0)
cpuI = np.zeros(0)
cpuC = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpuUser, = ax.plot(t, cpuUser, 'b-', label="CPU User")
plotCpuSys, = ax.plot(t, cpuSys, 'g-', label="CPU System")
plotCpuI, = ax.plot(t, cpuI, 'r-', label="CPU Idle")
plotCpuC, = ax.plot(t, cpuC, 'd-', label="CPU Corrpution")
ax.legend([plotCpuUser, plotCpuSys, plotCpuI, plotCpuC],\
[plotCpuUser.get_label(), plotCpuSys.get_label(), plotCpuI.get_label(), plotCpuC.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpuUser, cpuSys, cpuI, cpuC, ylim
nonlocal x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
# print(x)
cpuTimeInd = psutil.cpu_times()
cpuTimeList = [[cpuTimeInd.user], [cpuTimeInd.system], [cpuTimeInd.idle], [cpuTimeInd.interrupt]]
tmpCpuU = cpuTimeList[0][0] * 0.1
tmpCpuSys = cpuTimeList[1][0] * 0.1
tmpCpuI = cpuTimeList[2][0] * 0.1
tmpCpuC = cpuTimeList[3][0] * 0.1
# print(tmpCpuC)
cpuUser = np.append(cpuUser,tmpCpuU)
cpuSys = np.append(cpuSys,tmpCpuSys)
cpuI = np.append(cpuI,tmpCpuI)
cpuC = np.append(cpuC,tmpCpuC)
t = np.append(t,x)
x += 0.05
plotCpuUser.set_data(t, cpuUser)
plotCpuSys.set_data(t, cpuSys)
plotCpuI.set_data(t, cpuI)
plotCpuC.set_data(t, cpuC)
if x >= xmax - 1.00:
plotCpuUser.axes.set_xlim(x - xmax +1.0, x+1.0)
return plotCpuUser
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
class PageThree(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU Stats", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU & RAM",
command=lambda: controller.show_frame(PageFour))
button3.pack()
canvasforPic = Canvas(self)
cpustats1 = Label(canvasforPic, text="Ctx_switches: " + str(psutil.cpu_stats().ctx_switches>>20))
cpustats2 = Label(canvasforPic, text="interrupts: " + str(psutil.cpu_stats().interrupts>>20))
cpustats3 = Label(canvasforPic, text="syscalls: " + str(psutil.cpu_stats().syscalls>>20))
cpustats1.pack()
cpustats2.pack()
cpustats3.pack()
canvasforPic.pack(side=RIGHT)
ylim = 0
tcpuTimeInd = psutil.cpu_stats()
tcpuTimeList = [tcpuTimeInd.ctx_switches>>20, tcpuTimeInd.interrupts>>20, tcpuTimeInd.syscalls>>20]
for tcpu in tcpuTimeList:
if ylim < tcpu:
ylim = tcpu
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpustats1.configure(text="Ctx_switches: " + str(psutil.cpu_stats().ctx_switches>>20))
# print(str(psutil.cpu_stats().ctx_switches))
cpustats2.configure(text="interrupts: " + str(psutil.cpu_stats().interrupts>>20))
cpustats3.configure(text="syscalls: " + str(psutil.cpu_stats().syscalls>>20))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU Stat")
ax.set_ylim(0,ylim*2)
ax.set_xlim(0,5.0)
ax.grid(True)
ax.set_ylabel("CPU Stat")
ax.set_xlabel("Time")
# Data Placeholders
cpuC = np.zeros(0)
cpuI = np.zeros(0)
cpuS = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpuCtx, = ax.plot(t, cpuC, 'b-', label="Ctx switches")
plotCpuint, = ax.plot(t, cpuI, 'g-', label="interrupts")
plotCpuSys, = ax.plot(t, cpuS, 'r-', label="syscalls")
ax.legend([plotCpuCtx, plotCpuSys, plotCpuint],\
[plotCpuCtx.get_label(), plotCpuSys.get_label(), plotCpuint.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpuC, cpuS, cpuI, ylim
nonlocal x, plotCpuCtx, plotCpuSys, plotCpuint, t # 요놈들 쓸거임
# print(x)
cpuTimeInd = psutil.cpu_stats()
cpuTimeList = [[cpuTimeInd.ctx_switches], [cpuTimeInd.interrupts], [cpuTimeInd.syscalls]]
tmpCpuC = cpuTimeList[0][0]>>20
tmpCpuI = cpuTimeList[1][0]>>20
tmpCpuS = cpuTimeList[2][0]>>20
# print(tmpCpuC)
cpuC = np.append(cpuC,tmpCpuC)
cpuI = np.append(cpuI,tmpCpuI)
cpuS = np.append(cpuS,tmpCpuS)
t = np.append(t,x)
x += 0.05
plotCpuCtx.set_data(t, cpuC)
plotCpuint.set_data(t, cpuI)
plotCpuSys.set_data(t, cpuS)
if x >= xmax - 1.00:
plotCpuCtx.axes.set_xlim(x - xmax +1.0, x+1.0)
return plotCpuCtx
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
class PageFour(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU Stats", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Status",
command=lambda: controller.show_frame(PageThree))
button3.pack()
# canvasforPic = Canvas(self)
#########################################################################
############# 요기 캔버스에다가 사진을 박을거임 #########################
#########################################################################
inImage, outImage = None, None
inH, inW, outH, outW = [0] * 4
photo, cvPhoto = None, None
paper = None
canvasforPic = None
# canvasforPic = Canvas(self)
def loadImageColor(self,fnameOrCvData):
nonlocal paper, inImage, outImage, inH, inW, outH, outW
nonlocal photo, cvPhoto, canvasforPic
#######################################
### PIL 객체 --> OpenCV 객체로 복사 ###
## 이거 왜 되는지 잘 생각해보자!!
if type(fnameOrCvData) == str: # 파일명이 들어왔을경우
cvData = cv2.imread(fnameOrCvData) # 파일 --> CV 데이터
else:
cvData = fnameOrCvData # 이거 들여쓰기 안해서 실행이 안됬었음
cvPhoto = cv2.cvtColor(cvData, cv2.COLOR_BGR2RGB) # 중요한 CV개체 # 이거 numpy array임
# print(cvPhoto)
photo = Image.fromarray(cvPhoto)
# print(type(photo))
inW, inH = photo.size # (photo.width, photo.height)
outW, outH = inW, inH
# 캔버스 제작
# self를 붙여야 이게 됨
canvasforPic = Canvas(self, height=inH, width=inW)
#######################################
inImage = np.array(photo)
outImage = inImage.copy()
# print(outImage)
def displayImageColor():
nonlocal paper, inImage, outImage, inH, inW, outH, outW
nonlocal cvPhoto, canvasforPic
VIEW_X, VIEW_Y = inW, inH
# print(VIEW_X)
## 고정된 화면 크기
# 가로/세로 비율 계산
paper = PhotoImage(height=outH, width=outW)
# paper = PhotoImage('CPU.PNG')
canvasforPic.create_image((outH // 2, outW // 2), image=paper, state='normal')
# print(outH)
import numpy
rgbStr = '' # 전체 픽셀의 문자열을 저장
for i in numpy.arange(0, outH):
tmpStr = ''
for k in numpy.arange(0, outW):
i = int(i);
k = int(k)
r, g, b = outImage[i, k, R], outImage[i, k, G], outImage[i, k, B]
tmpStr += ' #%02x%02x%02x' % (r, g, b)
rgbStr += '{' + tmpStr + '} '
# print(rgbStr)
paper.put(rgbStr)
# print(paper)
inImage = outImage.copy()
cvPhoto = outImage.copy()
canvasforPic.pack(expand=1, anchor=CENTER)
# canvasforPic = Canvas(self, height=inH, width=inW)
loadImageColor(self,"CPU.PNG") # inImage, inH, inW, outH, outW 설정
# print(canvasforPic)
# print(inImage)
print(type(outImage))
displayImageColor()
# canvasforPic.pack(expand=1, anchor=CENTER, side=RIGHT)
#########################################################################################
##################### 여기까지 그림박기 끝 ##############################################
#########################################################################################
#
cpuI = Label(canvasforPic, text="Cpu Usage percent: " + str(psutil.cpu_percent()))
ramI = Label(canvasforPic, text="Ram Usage percent: " + str(psutil.virtual_memory().percent))
cpuI.pack(side=BOTTOM)
ramI.pack(side=BOTTOM)
canvasforPic.pack(side=RIGHT)
ylim = 100
cpuRamList = [psutil.cpu_percent(), psutil.virtual_memory().percent]
# for cr in cpuRamList:
# if ylim < cr:
# ylim = cr
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpuI.configure(text="CPU Usage: " + str(psutil.cpu_percent()))
# print(str(psutil.cpu_stats().ctx_switches))
ramI.configure(text="RAM Usage: " + str(psutil.virtual_memory().percent))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU & RAM Usage")
ax.set_ylim(0, ylim)
ax.set_xlim(0, 5.0)
ax.grid(True)
ax.set_ylabel("CPU & RAM Usage")
ax.set_xlabel("Time")
# Data Placeholders
cpu = np.zeros(0)
ram = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpu, = ax.plot(t, cpu, 'b-', label="Cpu Usage")
plotRam, = ax.plot(t, ram, 'g-', label="Ram Usage")
ax.legend([plotCpu, plotRam], \
[plotCpu.get_label(), plotRam.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpu, ram
nonlocal x, plotCpu, plotRam, t # 요놈들 쓸거임
# print(x)
cpuRamList = [[psutil.cpu_percent()], [psutil.virtual_memory().percent]]
tmpC = cpuRamList[0][0]
tmpR = cpuRamList[1][0]
# print(tmpCpuC)
cpu = np.append(cpu, tmpC)
ram = np.append(ram, tmpR)
t = np.append(t, x)
x += 0.05
plotCpu.set_data(t, cpu)
plotRam.set_data(t, ram)
if x >= xmax - 1.00:
plotCpu.axes.set_xlim(x - xmax + 1.0, x + 1.0)
return plotCpu
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
# 전역 변수
R, G, B = 0, 1, 2 # 3차원으로 쉽게 다루려고 전역 상수 지정해줌
inImage, outImage = None, None # 이제 넘파이로 다룰래
inH, inW, outH, outW = [0] * 4
window, canvas, paper = None, None, None
filename = ""
panYN = False
sx, sy, ex, ey = [0] * 4
VIEW_X, VIEW_Y = 512, 512 # 화면에 보일 크기 (출력용)
# 메인 코드
app = SeaofBTCapp()
app.mainloop() | [
"[email protected]"
] | |
3c41505b6df63f203fef498970b71945aa8ac5d8 | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /Scripts/simulation/situations/complex/looping_interaction_situation.py | a00207c4f468d786c117ad5289444841aeb74d90 | [
"Apache-2.0"
] | permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,478 | py | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\situations\complex\looping_interaction_situation.py
# Compiled at: 2020-10-01 19:10:21
# Size of source mod 2**32: 12943 bytes
from event_testing.test_events import TestEvent
from event_testing.tests import TunableTestSet
from interactions.base.interaction import InteractionFailureOptions
from interactions.context import InteractionContext, QueueInsertStrategy
from interactions.interaction_finisher import FinishingType
from interactions.priority import Priority
from sims4.tuning.tunable import Tunable, TunablePackSafeReference
from singletons import DEFAULT
from situations.situation import Situation
from situations.situation_complex import SituationComplexCommon, SituationStateData, CommonSituationState, TunableSituationJobAndRoleState
import interactions.aop, routing, services, situations, operator
OBJECT_TOKEN = 'object_id'
class RunInteractionState(CommonSituationState):
FACTORY_TUNABLES = {'max_retry_attempts': Tunable(description='\n The number of times the Sim can fail to successfully run the \n tuned interaction before giving up and moving on to the next \n object as a target.\n ',
tunable_type=int,
default=3)}
def __init__(self, *args, targets=None, interaction=None, max_retry_attempts=None, basic_extra=None, previous_si=None, **kwargs):
(super().__init__)(*args, **kwargs)
self.targets = targets
self.interaction = interaction
self._retry_count = 0
self._target = None
self.max_retry_attempts = max_retry_attempts
self.basic_extra = basic_extra
self._previous_si = previous_si
self._interaction_instance = None
def on_activate(self, reader=None):
if not self.find_target_and_push_interaction():
if not self.targets:
self.owner._self_destruct()
else:
self.retry_interaction()
return
self._test_event_register(TestEvent.InteractionStart)
self._test_event_register(TestEvent.InteractionExitedPipeline)
def handle_event(self, sim_info, event, resolver):
if event == TestEvent.InteractionStart:
if resolver.interaction is self._interaction_instance:
if self._additional_tests(sim_info, event, resolver):
self._on_interaction_of_interest_start()
return
elif event == TestEvent.InteractionExitedPipeline:
if resolver.interaction is self._interaction_instance and self._additional_tests(sim_info, event, resolver):
if resolver.interaction.has_been_user_canceled:
self.cancel_interaction()
return
if not resolver.interaction.is_finishing_naturally:
self._on_interaction_of_interest_failure()
return
def _on_interaction_of_interest_start(self):
self.owner.advance_to_next_object((self.targets), previous_si=(self._interaction_instance))
def _on_interaction_of_interest_failure(self):
self.retry_interaction()
def _additional_tests(self, sim_info, event, resolver):
return self.owner.is_sim_in_situation(sim_info.get_sim_instance())
def cancel_interaction(self):
self.owner._self_destruct()
def timer_expired(self):
self.owner.advance_to_next_object(previous_si=(self._interaction_instance))
def find_target_and_push_interaction(self):
if self.targets is None:
self.owner._self_destruct()
return
sim = self.owner.initiating_sim_info.get_sim_instance()
failed_connectivity_obj = None
for obj in sorted((self.targets), key=(operator.attrgetter('part_group_index'))):
if self._previous_si is not None:
context = self._previous_si.context.clone_for_continuation(self._previous_si)
else:
context = InteractionContext(sim, (InteractionContext.SOURCE_SCRIPT),
(Priority.High),
insert_strategy=(QueueInsertStrategy.FIRST))
resolver = self.interaction.get_resolver(target=obj, context=context)
if not self.owner.tests.run_tests(resolver):
self.targets.remove(obj)
continue
if not obj.is_connected(sim):
failed_connectivity_obj = obj
self.targets.remove(obj)
continue
self.targets.remove(obj)
self._target = obj
return self.push_interaction(context=context)
if failed_connectivity_obj is not None:
route_fail_context = InteractionContext(sim, (InteractionContext.SOURCE_SCRIPT),
(Priority.High),
insert_strategy=(QueueInsertStrategy.NEXT))
result = sim.push_super_affordance((InteractionFailureOptions.ROUTE_FAILURE_AFFORDANCE), failed_connectivity_obj,
route_fail_context, interaction_name=self.interaction.get_name(target=failed_connectivity_obj, context=route_fail_context),
interaction_icon_info=self.interaction.get_icon_info(target=failed_connectivity_obj, context=route_fail_context))
return False
def push_interaction(self, context=DEFAULT):
for sim in self.owner.all_sims_in_situation_gen():
if context is DEFAULT:
context = InteractionContext(sim, (InteractionContext.SOURCE_SCRIPT), (Priority.High),
insert_strategy=(QueueInsertStrategy.NEXT))
aop = interactions.aop.AffordanceObjectPair(self.interaction, self._target, self.interaction, None)
test_result, execute_result = aop.test_and_execute(context)
self._interaction_instance = execute_result[1]
if self.basic_extra:
if self._interaction_instance is not None:
self._interaction_instance.add_additional_instance_basic_extra(self.basic_extra)
return test_result
def retry_interaction(self):
self._retry_count += 1
if self._retry_count < self.max_retry_attempts:
self.push_interaction()
else:
self._retry_count = 0
self.owner.advance_to_next_object((self.targets), previous_si=(self._interaction_instance))
class LoopingInteractionSituation(situations.situation_complex.SituationComplexCommon):
INSTANCE_TUNABLES = {'tendor_job_and_role_state':TunableSituationJobAndRoleState(description='\n Job and Role State for the Sim in this situation.\n '),
'interaction':TunablePackSafeReference(description='\n The interaction that the Sim will run in looping succession on\n the object(s) specified if the tests pass.\n ',
manager=services.affordance_manager()),
'tests':TunableTestSet(description='\n The tests that muss pass for the Sim to run the tuned interaction\n with the object as the target.\n '),
'run_interaction_state':RunInteractionState.TunableFactory(description='\n Situation State used to run the tuned interaction on a specific\n object.\n ',
tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP)}
REMOVE_INSTANCE_TUNABLES = Situation.NON_USER_FACING_REMOVE_INSTANCE_TUNABLES
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
reader = self._seed.custom_init_params_reader
self.targets = None
self._retry_count = 0
self.interaction_override = self._seed.extra_kwargs.get('interaction', None)
self.basic_extra = self._seed.extra_kwargs.get('basic_extra', ())
if reader is None:
self._target_id = self._seed.extra_kwargs.get('default_target_id', None)
else:
self._target_id = reader.read_uint64(OBJECT_TOKEN, None)
if self._target_id is not None:
target = services.object_manager().get(self._target_id)
if target.parts:
self.targets = set(target.parts)
else:
self.targets = set((target,))
@classmethod
def default_job(cls):
pass
@classmethod
def _states(cls):
return (SituationStateData(1, RunInteractionState, factory=(cls.run_interaction_state)),)
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls.tendor_job_and_role_state.job, cls.tendor_job_and_role_state.role_state)]
def _on_set_sim_job(self, sim, job_type):
super()._on_set_sim_job(sim, job_type)
self._change_state(self.run_interaction_state(targets=(self.targets), interaction=(self.looping_interaction),
basic_extra=(self.basic_extra)))
def advance_to_next_object(self, targets, previous_si=None):
self._change_state(self.run_interaction_state(targets=targets, interaction=(self.looping_interaction),
basic_extra=(self.basic_extra),
previous_si=previous_si))
@property
def looping_interaction(self):
if self.interaction_override is not None:
return self.interaction_override
return self.interaction | [
"[email protected]"
] | |
75434b093211de8bd09ddd5d42a9bf15f06d16c6 | 77116b044adb3f28c5ea53d17fc69c29fd9bee55 | /modules/influxdb_wrapper.py | 683fcb41dd50d91836b1b24a3421205c11cc4a99 | [
"MIT"
] | permissive | manav1403/stopstalk-deployment | 63a5c22f20cf1dbe81024ba63b33c1c986ae8ada | 667f6d89b24ce04595e2c70e02aa44aa3d836c42 | refs/heads/master | 2023-03-22T18:39:37.371341 | 2021-03-20T15:40:20 | 2021-03-20T15:40:20 | 290,265,152 | 0 | 0 | MIT | 2020-08-25T16:22:59 | 2020-08-25T16:22:58 | null | UTF-8 | Python | false | false | 2,960 | py | """
Copyright (c) 2015-2020 Raj Patel([email protected]), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from influxdb import SeriesHelper
from gluon import current
series_helper_classes = {}
# ------------------------------------------------------------------------------
def get_series_helper(measurement_name,
measurement_fields,
measurement_tags):
if measurement_name in series_helper_classes:
return series_helper_classes[measurement_name]
else:
series_helper_classes[measurement_name] = series_helper_class_wrapper(
measurement_name,
measurement_fields,
measurement_tags
)
return series_helper_classes[measurement_name]
# ------------------------------------------------------------------------------
def series_helper_class_wrapper(measurement_name,
measurement_fields,
measurement_tags):
class StopStalkSeriesHelper(SeriesHelper):
"""Instantiate SeriesHelper to write points to the backend."""
class Meta:
"""Meta class stores time series helper configuration."""
# The client should be an instance of InfluxDBClient.
client = current.INFLUXDB_CLIENT
# The series name must be a string. Add dependent fields/tags
# in curly brackets.
series_name = measurement_name
# Defines all the fields in this time series.
fields = measurement_fields
# Defines all the tags for the series.
tags = measurement_tags
# Defines the number of data points to store prior to writing
# on the wire.
bulk_size = 5
# autocommit must be set to True when using bulk_size
autocommit = True
return StopStalkSeriesHelper | [
"[email protected]"
] | |
5901e5381b54af17773dc3e7c1520e28cf0de3f4 | 2cb507ecd6629b9ff457a36e462f987913d94c1a | /webspider/3.数据提取/3.3bs4模块/07-bs4案例.py | e0ba2dc15a95fa18c7b4907a3e13a505e0e05098 | [
"Apache-2.0"
] | permissive | youaresherlock/PythonPractice | 6869e0a5949675198826e5a07552237a636d6f5b | 2e22d3fdcb26353cb0d8215c150e84d11bc9a022 | refs/heads/master | 2021-08-16T03:09:44.203035 | 2021-08-02T07:40:00 | 2021-08-02T07:40:00 | 146,625,560 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | #!usr/bin/python
# -*- coding:utf8 -*-
"""
https://mil.news.sina.com.cn/roll/index.d.html
"""
import json
import requests
from bs4 import BeautifulSoup
url = 'https://mil.news.sina.com.cn/roll/index.d.html'
headers = {"User-Agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)"}
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.content.decode(), 'html.parser')
# 层级选择器
news_list = soup.select('.linkNews li a')
news_results = []
for news in news_list:
new_dict = dict()
new_dict['title'] = news.get_text()
new_dict['url'] = news.get('href')
news_results.append(new_dict)
print(news_results)
with open('news.json', 'w') as f:
content = json.dumps(news_results, ensure_ascii=False, indent=1)
f.write(content)
| [
"[email protected]"
] | |
3f9c5087daf02fa4d3f63eed410bf3cac7690a7a | 5936b0f025944d265cc64d31ef93bc578d5ae6a2 | /home/migrations/0002_load_initial_data.py | aa10fce4bdfc1b079fe4363502f83665c2758cfe | [] | no_license | crowdbotics-apps/smiley-18358 | b4e91ddeaf525aedf990ec1df65d65fb583f4b7c | 7935dd2fad196a7b573c1126905af5fcf93110b0 | refs/heads/master | 2022-11-06T07:02:33.512245 | 2020-06-23T20:08:09 | 2020-06-23T20:08:09 | 274,497,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,278 | py | from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "smiley"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">smiley</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "smiley-18358.botics.co"
site_params = {
"name": "smiley",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
| [
"[email protected]"
] | |
0c959bb906d3c3b2c695aa535eb404b7f8e52c55 | c795ec7f77219892183a1222fb51b8be2e754944 | /multiverse server/multiverse-server/multiverse/config/mv_fantasy/ability_db.py | e25cedb2bc832ac0df1d59a2590ebf554a5d19c1 | [
"MIT"
] | permissive | radtek/MultiverseClientServer | 89d9a6656953417170e1066ff3bd06782305f071 | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | refs/heads/master | 2023-01-19T04:54:26.163862 | 2020-11-30T04:58:30 | 2020-11-30T04:58:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,874 | py | #
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
from java.lang import *
from java.util import *
from multiverse.mars import *
from multiverse.mars.objects import *
from multiverse.mars.core import *
from multiverse.mars.events import *
from multiverse.mars.util import *
from multiverse.mars.effects import *
from multiverse.mars.abilities import *
from multiverse.server.math import *
from multiverse.server.events import *
from multiverse.server.objects import *
from multiverse.server.engine import *
True=1
False=0
effect = HealEffect("heal effect")
effect.setMinInstantHeal(100)
effect.setMaxInstantHeal(100)
Mars.EffectManager.register(effect.getName(), effect)
effect = StunEffect("stun effect")
effect.setDuration(7000)
Mars.EffectManager.register(effect.getName(), effect)
effect = StatEffect("armor effect")
effect.setDuration(15000)
effect.setStat("armor", 20)
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach heal effect")
effect.setAbilityName("heal")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach stun effect")
effect.setAbilityName("stun")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach armor effect")
effect.setAbilityName("armor")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach fireball effect")
effect.setAbilityName("fireball")
Mars.EffectManager.register(effect.getName(), effect)
effect = DamageEffect("poison effect")
effect.isPeriodic(1)
effect.isPersistent(1)
effect.setMinPulseDamage(12)
effect.setMaxPulseDamage(12)
effect.setNumPulses(10)
effect.setDuration(50000)
effect.setDamageType("Poison")
Mars.EffectManager.register(effect.getName(), effect)
effect = HealEffect("health regen effect")
effect.setHealProperty("health")
effect.setMinPulseHeal(2)
effect.setMaxPulseHeal(2)
effect.isPersistent(True)
effect.isPeriodic(True)
effect.setDuration(1000000)
effect.setNumPulses(500)
Mars.EffectManager.register(effect.getName(), effect)
effect = HealEffect("mana regen effect")
effect.setHealProperty("mana")
effect.setMinPulseHeal(2)
effect.setMaxPulseHeal(2)
effect.isPersistent(True)
effect.isPeriodic(True)
effect.setDuration(1000000)
effect.setNumPulses(500)
Mars.EffectManager.register(effect.getName(), effect)
effect = DamageEffect("fireball effect")
effect.setDamageProperty("health")
effect.setMinInstantDamage(40)
effect.setMaxInstantDamage(60)
effect.setDamageType("Fire")
Mars.EffectManager.register(effect.getName(), effect)
ability = EffectAbility("stun")
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(10000)
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("stun effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("armor")
ability.setActivationCost(30)
ability.setCostProperty("mana")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("armor effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
healCastingEffect = CoordinatedEffect("SpellCastingEffect")
healCastingEffect.sendSourceOid(True)
healCastingEffect.putArgument("castingTime", Integer(3000))
healCastingEffect.putArgument("decalTexture", "eight-hearts.png")
healTargetEffect = CoordinatedEffect("SpellTargetEffect")
healTargetEffect.sendTargetOid(True)
fireballCastingEffect = CoordinatedEffect("SpellCastingEffect")
fireballCastingEffect.sendSourceOid(True)
fireballCastingEffect.putArgument("castingTime", Integer(1500))
fireballCastingEffect.putArgument("decalTexture", "fire_ring_decal.dds")
fireballTargetEffect = CoordinatedEffect("MvFantasyFireball")
fireballTargetEffect.sendSourceOid(True)
fireballTargetEffect.sendTargetOid(True)
attackEffect = CoordinatedEffect("AttackEffect")
attackEffect.sendSourceOid(True)
attackEffect.sendTargetOid(True)
ability = EffectAbility("heal")
ability.setActivationTime(5000)
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(20000)
ability.setIcon("Interface\FantasyWorldIcons\SPELL_heal_A")
ability.setTargetType(MarsAbility.TargetType.FRIEND)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, healCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("heal potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Healing Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("poison potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("poison effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Poison Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("heal scroll")
ability.setTargetType(MarsAbility.TargetType.FRIEND)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.setMaxRange(20000)
ability.setActivationTime(3000)
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Healing Scroll")
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, healCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self heal ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Tome of Heal")
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self stun ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach stun effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self armor ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach armor effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self fireball ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach fireball effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Tome of Fireball")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("leather tanning")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(3000)
ability.setItem("Finished Leather")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
ability.addReagent("Wolf Skin")
ability.addReagent("Wolf Skin")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("make healing potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(0)
ability.setItem("Healing Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("make healing scroll")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(0)
ability.setItem("Healing Scroll")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("fireball")
ability.setActivationTime(1500)
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(40000)
ability.setIcon("Interface\FantasyWorldIcons\SPELL_fireball_A")
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("fireball effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, fireballCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, fireballTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
effect = HealEffect("restore mana effect")
effect.setHealProperty("mana")
effect.setMinInstantHeal(100)
effect.setMaxInstantHeal(100)
Mars.EffectManager.register(effect.getName(), effect)
ability = EffectAbility("restore mana potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("restore mana effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Mana Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
rand = Random()
class FantasyCombatEffect (MarsEffect):
def apply(self, state):
MarsEffect.apply(self, state)
params = state.getParams()
result = params.get("result")
baseDmg = params.get("damage")
target = state.getObject()
if (result == "miss"):
dmgType = "miss"
elif (result == "hit"):
dmgType = "Physical"
elif (result == "crit"):
dmgType = "critical Physical"
else:
Log.error("FantasyCombatEffect.apply: unknown result")
penetration = params.get("penetration")
armor = target.statGetCurrentValue("armor")
dmgEff = (1.0 * penetration) / (armor + penetration)
if (dmgEff < 0.25):
dmgEff = 0.25
dmg = dmgEff * baseDmg
dmg = int(dmg)
if (dmg == 0 and dmgType != "miss"):
dmg = 1
target.statModifyBaseValue("health", -dmg)
target.sendStatusUpdate()
Log.debug("FantasyCombatEffect.apply: sending damage message target=" + target.toString()
+ " caster=" + state.getCaster().toString() + " dmg=" + str(dmg) + " dmgType=" + dmgType)
Engine.getAgent().sendBroadcast(CombatClient.DamageMessage(target.getOwnerOid(),
state.getCaster().getOwnerOid(),
dmg, dmgType))
effect = FantasyCombatEffect("attack effect")
Mars.EffectManager.register(effect.getName(), effect)
class FantasyCombatAbility (CombatAbility):
def resolveHit(self, state):
params = HashMap()
caster = state.getObject()
target = state.getTarget()
defense = target.statGetCurrentValue("defense")
accuracy = caster.statGetCurrentValue("accuracy")
agility = caster.statGetCurrentValue("agility")
atkPower = caster.statGetCurrentValue("attack power")
atkDelay = caster.getAttackDelay()
missChance = 0.2 + (defense-accuracy)/1000.0
if (missChance < 0.05):
missChance = 0.05
critChance = missChance + 0.05 + agility/1000.0
roll = rand.nextFloat()
bonusDmg = (atkPower * atkDelay) / 10000.0
baseWeapDmg = caster.getProperty("weaponBaseDmg")
varWeapDmg = caster.getProperty("weaponVarDmg")
dmg = bonusDmg + baseWeapDmg + rand.nextFloat() * varWeapDmg
if (roll < missChance):
dmg = 0
params.put("result", "miss")
elif (roll < critChance):
dmg *= 1.5
params.put("result", "crit")
else:
params.put("result", "hit")
params.put("damage", int(dmg))
penetration = caster.statGetCurrentValue("offense skill")
params.put("penetration", penetration)
return params
ability = FantasyCombatAbility("attack ability")
ability.setMaxRange(5000)
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("attack effect"))
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
Mars.AbilityManager.register(ability.getName(), ability)
| [
"[email protected]"
] | |
6d3e6d6192178fdbd567a66120eb0aeb0b1077a1 | a281d09ed91914b134028c3a9f11f0beb69a9089 | /contrib/great_expectations_semantic_types_expectations/great_expectations_semantic_types_expectations/expectations/expect_column_values_to_be_valid_stellar_address.py | e90bce385fe104a9ad05fb1b06683e925a02a2db | [
"Apache-2.0"
] | permissive | CarstenFrommhold/great_expectations | 4e67bbf43d21bc414f56d576704259a4eca283a5 | 23d61c5ed26689d6ff9cec647cc35712ad744559 | refs/heads/develop | 2023-01-08T10:01:12.074165 | 2022-11-29T18:50:18 | 2022-11-29T18:50:18 | 311,708,429 | 0 | 0 | Apache-2.0 | 2020-11-10T15:52:05 | 2020-11-10T15:52:04 | null | UTF-8 | Python | false | false | 6,125 | py | """
This is a template for creating custom ColumnMapExpectations.
For detailed instructions on how to use it, please see:
https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_column_map_expectations
"""
import json
from typing import Optional
import coinaddrvalidator
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.exceptions import InvalidExpectationConfigurationError
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.expectations.expectation import ColumnMapExpectation
from great_expectations.expectations.metrics import (
ColumnMapMetricProvider,
column_condition_partial,
)
def is_valid_stellar_address(addr: str) -> bool:
try:
res = coinaddrvalidator.validate("xlm", addr).valid
return res
except Exception as e:
return False
# This class defines a Metric to support your Expectation.
# For most ColumnMapExpectations, the main business logic for calculation will live in this class.
class ColumnValuesToBeValidStellarAddress(ColumnMapMetricProvider):
# This is the id string that will be used to reference your metric.
condition_metric_name = "column_values.valid_stellar_address"
# This method implements the core logic for the PandasExecutionEngine
@column_condition_partial(engine=PandasExecutionEngine)
def _pandas(cls, column, **kwargs):
return column.apply(lambda x: is_valid_stellar_address(x))
# This method defines the business logic for evaluating your metric when using a SqlAlchemyExecutionEngine
# @column_condition_partial(engine=SqlAlchemyExecutionEngine)
# def _sqlalchemy(cls, column, _dialect, **kwargs):
# raise NotImplementedError
# This method defines the business logic for evaluating your metric when using a SparkDFExecutionEngine
# @column_condition_partial(engine=SparkDFExecutionEngine)
# def _spark(cls, column, **kwargs):
# raise NotImplementedError
# This class defines the Expectation itself
class ExpectColumnValuesToBeValidStellarAddress(ColumnMapExpectation):
"""Expect column values to be valid Stellar address"""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = [
{
"data": {
"all_valid": [
"GA7YNBW5CBTJZ3ZZOWX3ZNBKD6OE7A7IHUQVWMY62W2ZBG2SGZVOOPVH",
"GBTA54J4LY5BAQWA4KECII66TPTU3V6DXPBPNVXIPMHN5W6QFATWRXY5",
"GCINDD6LNZSYPND4WRQL6NRFGOAXMAMK7M3QP2JXWC5634BY4DSZ4YG2",
"GDKRCHSD2YUW3X6FXRAVOOZZ2IOMWSGM6SH6I56VCX6V2DTPG7FO626W",
],
"some_other": [
"1BoatSLRHtKNngkdXEeobR76b53LETtpyT",
"n2nzi7xDTrMVK9stGpbK3BtrpBCJfH7LRQ",
"3QJmV3qfvL9SuYo34YihAf3sRCW3qSinyC",
"bc1qxneu85dnhx33asv8da45x55qyeu44ek9h3vngxdsare",
],
},
"tests": [
{
"title": "basic_positive_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "all_valid"},
"out": {
"success": True,
},
},
{
"title": "basic_negative_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "some_other", "mostly": 1},
"out": {
"success": False,
},
},
],
}
]
# This is the id string of the Metric used by this Expectation.
# For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above.
map_metric = "column_values.valid_stellar_address"
# This is a list of parameter names that can affect whether the Expectation evaluates to True or False
success_keys = ("mostly",)
# This dictionary contains default values for any parameters that should have default values
default_kwarg_values = {}
def validate_configuration(
self, configuration: Optional[ExpectationConfiguration]
) -> None:
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
None. Raises InvalidExpectationConfigurationError if the config is not validated successfully
"""
super().validate_configuration(configuration)
if configuration is None:
configuration = self.configuration
# # Check other things in configuration.kwargs and raise Exceptions if needed
# try:
# assert (
# ...
# ), "message"
# assert (
# ...
# ), "message"
# except AssertionError as e:
# raise InvalidExpectationConfigurationError(str(e))
return True
# This object contains metadata for display in the public Gallery
library_metadata = {
"maturity": "experimental",
"tags": [
"hackathon-22",
"experimental",
"typed-entities",
], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@szecsip", # Don't forget to add your github handle here!
],
"requirements": ["coinaddrvalidator"],
}
if __name__ == "__main__":
ExpectColumnValuesToBeValidStellarAddress().print_diagnostic_checklist()
| [
"[email protected]"
] | |
0a84c7d2819c6909abef3aa8cf9c8e577efad451 | 13f4a06cd439f579e34bf38406a9d5647fe7a0f3 | /nn_ns/parsing/IterParser/ParseResultAST.py | 205e7e97ea49432cf019048a7b1198e730ce036c | [] | no_license | edt-yxz-zzd/python3_src | 43d6c2a8ef2a618f750b59e207a2806132076526 | 41f3a506feffb5f33d4559e5b69717d9bb6303c9 | refs/heads/master | 2023-05-12T01:46:28.198286 | 2023-05-01T13:46:32 | 2023-05-01T13:46:32 | 143,530,977 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 346 | py |
from sand.types.NonMathTree import NonMathTree, LeafNode, OrientedNode, UnorientedNode
class ParseResultAST(NonMathTree):
class __UnboxedTypeID__:pass
class ConRuleNode(OrientedNode, ParseResultAST):pass
class AltRuleNode(UnorientedNode, ParseResultAST):pass
class TerminalNode(LeafNode, ParseResultAST):pass
| [
"[email protected]"
] | |
5793547e4f2688f451442dce1b433dfd365ef5a8 | 9715a7d27f9b146632f964b643ee7243a7e9a38c | /match-sift.py | b7275f8f1c0e357c2af2b24419bc14fbb03ef725 | [] | no_license | uakfdotb/skyquery | 3eb9b2265992127a3c5b3b3612c32ddea0f39195 | dc67b98ee8034711c274408640e3582d20482673 | refs/heads/master | 2020-07-07T03:49:57.856424 | 2019-08-21T19:13:18 | 2019-08-21T19:13:18 | 203,237,682 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,632 | py | from discoverlib import geom, grid_index
import get_db
import cv2
import json
import math
import multiprocessing
import numpy
import os
from PIL import Image
import scipy.ndimage
import sys
video_id = int(sys.argv[1])
db = get_db.get_db()
BASE_PATH = 'ortho-masked.jpg'
FRAME_PATH = 'frames/{}/'.format(video_id)
LK_PARAMETERS = dict(winSize=(21, 21), maxLevel=2, criteria=(cv2.TERM_CRITERIA_COUNT | cv2.TERM_CRITERIA_EPS, 30, 0.01))
# in ortho-imagery resolution units which was 2cm/pixel but resized 4cm/pixel
# and time units is framerate
MAX_SPEED = 75
sift = cv2.xfeatures2d.SIFT_create()
matcher = cv2.DescriptorMatcher_create(cv2.DESCRIPTOR_MATCHER_BRUTEFORCE_L1)
base_im = scipy.ndimage.imread(BASE_PATH)
base_keypoints, base_desc = sift.detectAndCompute(base_im, None)
index = grid_index.GridIndex(256)
for i, kp in enumerate(base_keypoints):
p = geom.Point(kp.pt[0], kp.pt[1])
index.insert(p, i)
def points_to_poly_str(points):
strs = ['{},{}'.format(points[j, 0], points[j, 1]) for j in xrange(points.shape[0])]
return ' '.join(strs)
def homography_from_flow(prev_homography, prev_gray, cur_gray):
positions = []
for i in xrange(0, prev_gray.shape[0]-50, 50):
for j in xrange(0, prev_gray.shape[1]-50, 50):
positions.append((i, j))
positions_np = numpy.array(positions, dtype='float32').reshape(-1, 1, 2)
def flip_pos(positions):
return numpy.stack([positions[:, :, 1], positions[:, :, 0]], axis=2)
next_positions, st, err = cv2.calcOpticalFlowPyrLK(prev_gray, cur_gray, flip_pos(positions_np), None, **LK_PARAMETERS)
if next_positions is None:
return None
next_positions = flip_pos(next_positions)
differences = next_positions[:, 0, :] - positions_np[:, 0, :]
differences_okay = differences[numpy.where(st[:, 0] == 1)]
median = [numpy.median(differences_okay[:, 0]), numpy.median(differences_okay[:, 1])]
good = (numpy.square(differences[:, 0] - median[0]) + numpy.square(differences[:, 1] - median[1])) < 16
if float(numpy.count_nonzero(good)) / differences.shape[0] < 0.7:
return None
# translate previous homography based on the flow result
translation = [numpy.median(differences[:, 0]), numpy.median(differences[:, 1])]
H_translation = numpy.array([[1, 0, -translation[1]], [0, 1, -translation[0]], [0,0,1]], dtype='float32')
return prev_homography.dot(H_translation)
frame_idx_to_fname = {}
for fname in os.listdir(FRAME_PATH):
if '.jpg' not in fname:
continue
frame_idx = int(fname.split('.jpg')[0])
frame_idx_to_fname[frame_idx] = fname
prev_bounds = None
prev_frame, prev_gray = None, None
prev_homography = None
prev_counter = 0
#db.execute("SELECT id, idx FROM video_frames WHERE bounds IS NULL AND video_id = %s ORDER BY idx", [video_id])
db.execute("SELECT id, idx FROM video_frames WHERE video_id = %s ORDER BY idx", [video_id])
for row in db.fetchall():
#while True:
# db.execute("SELECT id, idx FROM video_frames WHERE bounds IS NULL AND video_id = %s ORDER BY RAND() LIMIT 1", [video_id])
# rows = db.fetchall()
# if len(rows) != 1:
# break
# row = rows[0]
frame_id, frame_idx = row
frame_fname = frame_idx_to_fname[frame_idx]
print 'process {}'.format(frame_idx)
frame = scipy.ndimage.imread(FRAME_PATH + frame_fname)
frame = cv2.resize(frame, (frame.shape[1]/2, frame.shape[0]/2))
frame_gray = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
H = None
if prev_homography is not None and prev_counter < 5:
H = homography_from_flow(prev_homography, prev_gray, frame_gray)
prev_counter += 1
if H is None:
keypoints, desc = sift.detectAndCompute(frame, None)
if prev_bounds is None:
query_keypoints, query_desc = base_keypoints, base_desc
else:
indices = index.search(prev_bounds.add_tol(2*MAX_SPEED))
indices = numpy.array(list(indices), dtype='int32')
query_keypoints = []
for i in indices:
query_keypoints.append(base_keypoints[i])
query_desc = base_desc[indices]
matches = matcher.knnMatch(queryDescriptors=query_desc, trainDescriptors=desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.6*n.distance:
good.append(m)
src_pts = numpy.float32([keypoints[m.trainIdx].pt for m in good]).reshape(-1,1,2)
dst_pts = numpy.float32([query_keypoints[m.queryIdx].pt for m in good]).reshape(-1,1,2)
try:
H, _ = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
except Exception as e:
print 'warning: exception on frame {}: {}'.format(frame_idx, e)
db.execute("UPDATE video_frames SET bounds = '' WHERE id = %s", [frame_id])
prev_bounds = None
continue
prev_counter = 0
if H is None:
db.execute("UPDATE video_frames SET bounds = '' WHERE id = %s", [frame_id])
prev_bounds = None
continue
bound_points = numpy.array([
[0, 0],
[frame.shape[1], 0],
[frame.shape[1], frame.shape[0]],
[0, frame.shape[0]],
], dtype='float32').reshape(-1, 1, 2)
transformed_points = cv2.perspectiveTransform(bound_points, H)
bounds = None
for p in transformed_points[:, 0, :]:
p = geom.Point(p[0], p[1])
if bounds is None:
bounds = p.bounds()
else:
bounds = bounds.extend(p)
print bounds
if prev_bounds is not None:
intersection_area = float(bounds.intersection(prev_bounds).area())
union_area = float(bounds.area() + prev_bounds.area()) - intersection_area
iou = intersection_area / union_area
if iou < 0.6:
print 'iou failed! ({})'.format(iou)
print bounds, prev_bounds
db.execute("UPDATE video_frames SET bounds = '' WHERE id = %s", [frame_id])
prev_bounds = None
continue
poly_str = points_to_poly_str(transformed_points[:, 0, :])
db.execute("UPDATE video_frames SET bounds = %s WHERE id = %s", [poly_str, frame_id])
prev_bounds, prev_frame, prev_gray, prev_homography = bounds, frame, frame_gray, H
# transform detections
db.execute(
"SELECT id, frame_polygon FROM detections WHERE frame_id = %s AND polygon IS NULL",
[frame_id]
)
points = []
detections = []
for row in db.fetchall():
poly_parts = row[1].split(' ')
poly_points = []
for part in poly_parts:
point_parts = part.split(',')
poly_points.append((int(point_parts[0])/2, int(point_parts[1])/2))
detections.append((int(row[0]), len(poly_points)))
points.extend(poly_points)
if len(points) > 0:
points = numpy.array(points, dtype='float32').reshape(-1, 1, 2)
transformed_points = cv2.perspectiveTransform(points, H)
i = 0
for detection_id, num_points in detections:
poly_str = points_to_poly_str(transformed_points[i:i+num_points, 0, :])
db.execute("UPDATE detections SET polygon = %s WHERE id = %s", [poly_str, detection_id])
print poly_str, detection_id
i += num_points
assert i == transformed_points.shape[0]
| [
"[email protected]"
] | |
f5557d5ff6492966343a1b46c76dde955a03f5a7 | b15a9d9c7374c4a1fa5ec3ef63603a8c57e8681f | /Design-Patterns-Python/memento/caretaker.py | 6a143d567f4390a284f8bff63c8f3a579f175f00 | [] | no_license | gohils/zemr_notebook | 3f7490ef7a2559655746c3e2e0dbfb835a83891e | 00d53cea9970df44160c51e6ad2bdeadfae2c91f | refs/heads/master | 2023-08-04T14:32:35.428016 | 2023-07-20T11:51:08 | 2023-07-20T11:51:08 | 222,027,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 761 | py | "The Save/Restore Game functionality"
class CareTaker():
"Guardian. Provides a narrow interface to the mementos"
def __init__(self, originator):
self._originator = originator
self._mementos = []
def save(self):
"Store a new Memento of the Characters current state"
print("CareTaker: Game Save")
memento = self._originator.memento
self._mementos.append(memento)
def restore(self, index):
"""
Replace the Characters current attributes with the state
stored in the saved Memento
"""
print("CareTaker: Restoring Characters attributes from Memento")
memento = self._mementos[index]
self._originator.memento = memento
| [
"[email protected]"
] | |
b1850bb9f42ce970e1af828ad7e8073288267aa4 | 1929443c8e4ec6ccd79777f18d161546867e17ef | /methods/transformers/tests/test_pipelines_fill_mask.py | 2777bee42548e1da90a9efb1537e16c2cba4f6ce | [
"MIT",
"Apache-2.0"
] | permissive | INK-USC/RiddleSense | 6f4b00546d7f4d5ada12db50929c1f0d7713d541 | a3d57eaf084da9cf6b77692c608e2cd2870fbd97 | refs/heads/main | 2023-08-14T19:01:01.478946 | 2021-07-05T04:06:01 | 2021-07-05T04:06:01 | 376,487,870 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 9,295 | py | import unittest
import pytest
from transformers import pipeline
from transformers.testing_utils import require_tf, require_torch, slow
from .test_pipelines_common import MonoInputPipelineCommonMixin
EXPECTED_FILL_MASK_RESULT = [
[
{"sequence": "<s>My name is John</s>", "score": 0.00782308354973793, "token": 610, "token_str": "ĠJohn"},
{"sequence": "<s>My name is Chris</s>", "score": 0.007475061342120171, "token": 1573, "token_str": "ĠChris"},
],
[
{"sequence": "<s>The largest city in France is Paris</s>", "score": 0.3185044229030609, "token": 2201},
{"sequence": "<s>The largest city in France is Lyon</s>", "score": 0.21112334728240967, "token": 12790},
],
]
EXPECTED_FILL_MASK_TARGET_RESULT = [
[
{
"sequence": "<s>My name is Patrick</s>",
"score": 0.004992353264242411,
"token": 3499,
"token_str": "ĠPatrick",
},
{
"sequence": "<s>My name is Clara</s>",
"score": 0.00019297805556561798,
"token": 13606,
"token_str": "ĠClara",
},
]
]
class FillMaskPipelineTests(MonoInputPipelineCommonMixin, unittest.TestCase):
pipeline_task = "fill-mask"
pipeline_loading_kwargs = {"top_k": 2}
small_models = ["sshleifer/tiny-distilroberta-base"] # Models tested without the @slow decorator
large_models = ["distilroberta-base"] # Models tested with the @slow decorator
mandatory_keys = {"sequence", "score", "token"}
valid_inputs = [
"My name is <mask>",
"The largest city in France is <mask>",
]
invalid_inputs = [
"This is <mask> <mask>" # More than 1 mask_token in the input is not supported
"This is" # No mask_token is not supported
]
expected_check_keys = ["sequence"]
@require_torch
def test_torch_topk_deprecation(self):
# At pipeline initialization only it was not enabled at pipeline
# call site before
with pytest.warns(FutureWarning, match=r".*use `top_k`.*"):
pipeline(task="fill-mask", model=self.small_models[0], topk=1)
@require_torch
def test_torch_fill_mask(self):
valid_inputs = "My name is <mask>"
nlp = pipeline(task="fill-mask", model=self.small_models[0])
outputs = nlp(valid_inputs)
self.assertIsInstance(outputs, list)
# This passes
outputs = nlp(valid_inputs, targets=[" Patrick", " Clara"])
self.assertIsInstance(outputs, list)
# This used to fail with `cannot mix args and kwargs`
outputs = nlp(valid_inputs, something=False)
self.assertIsInstance(outputs, list)
@require_torch
def test_torch_fill_mask_with_targets(self):
valid_inputs = ["My name is <mask>"]
valid_targets = [[" Teven", " Patrick", " Clara"], [" Sam"]]
invalid_targets = [[], [""], ""]
for model_name in self.small_models:
nlp = pipeline(task="fill-mask", model=model_name, tokenizer=model_name, framework="pt")
for targets in valid_targets:
outputs = nlp(valid_inputs, targets=targets)
self.assertIsInstance(outputs, list)
self.assertEqual(len(outputs), len(targets))
for targets in invalid_targets:
self.assertRaises(ValueError, nlp, valid_inputs, targets=targets)
@require_tf
def test_tf_fill_mask_with_targets(self):
valid_inputs = ["My name is <mask>"]
valid_targets = [[" Teven", " Patrick", " Clara"], [" Sam"]]
invalid_targets = [[], [""], ""]
for model_name in self.small_models:
nlp = pipeline(task="fill-mask", model=model_name, tokenizer=model_name, framework="tf")
for targets in valid_targets:
outputs = nlp(valid_inputs, targets=targets)
self.assertIsInstance(outputs, list)
self.assertEqual(len(outputs), len(targets))
for targets in invalid_targets:
self.assertRaises(ValueError, nlp, valid_inputs, targets=targets)
@require_torch
@slow
def test_torch_fill_mask_results(self):
mandatory_keys = {"sequence", "score", "token"}
valid_inputs = [
"My name is <mask>",
"The largest city in France is <mask>",
]
valid_targets = [" Patrick", " Clara"]
for model_name in self.large_models:
nlp = pipeline(
task="fill-mask",
model=model_name,
tokenizer=model_name,
framework="pt",
top_k=2,
)
mono_result = nlp(valid_inputs[0], targets=valid_targets)
self.assertIsInstance(mono_result, list)
self.assertIsInstance(mono_result[0], dict)
for mandatory_key in mandatory_keys:
self.assertIn(mandatory_key, mono_result[0])
multi_result = [nlp(valid_input) for valid_input in valid_inputs]
self.assertIsInstance(multi_result, list)
self.assertIsInstance(multi_result[0], (dict, list))
for result, expected in zip(multi_result, EXPECTED_FILL_MASK_RESULT):
self.assertEqual(set([o["sequence"] for o in result]), set([o["sequence"] for o in result]))
if isinstance(multi_result[0], list):
multi_result = multi_result[0]
for result in multi_result:
for key in mandatory_keys:
self.assertIn(key, result)
self.assertRaises(Exception, nlp, [None])
valid_inputs = valid_inputs[:1]
mono_result = nlp(valid_inputs[0], targets=valid_targets)
self.assertIsInstance(mono_result, list)
self.assertIsInstance(mono_result[0], dict)
for mandatory_key in mandatory_keys:
self.assertIn(mandatory_key, mono_result[0])
multi_result = [nlp(valid_input) for valid_input in valid_inputs]
self.assertIsInstance(multi_result, list)
self.assertIsInstance(multi_result[0], (dict, list))
for result, expected in zip(multi_result, EXPECTED_FILL_MASK_TARGET_RESULT):
self.assertEqual(set([o["sequence"] for o in result]), set([o["sequence"] for o in result]))
if isinstance(multi_result[0], list):
multi_result = multi_result[0]
for result in multi_result:
for key in mandatory_keys:
self.assertIn(key, result)
self.assertRaises(Exception, nlp, [None])
@require_tf
@slow
def test_tf_fill_mask_results(self):
mandatory_keys = {"sequence", "score", "token"}
valid_inputs = [
"My name is <mask>",
"The largest city in France is <mask>",
]
valid_targets = [" Patrick", " Clara"]
for model_name in self.large_models:
nlp = pipeline(task="fill-mask", model=model_name, tokenizer=model_name, framework="tf", topk=2)
mono_result = nlp(valid_inputs[0], targets=valid_targets)
self.assertIsInstance(mono_result, list)
self.assertIsInstance(mono_result[0], dict)
for mandatory_key in mandatory_keys:
self.assertIn(mandatory_key, mono_result[0])
multi_result = [nlp(valid_input) for valid_input in valid_inputs]
self.assertIsInstance(multi_result, list)
self.assertIsInstance(multi_result[0], (dict, list))
for result, expected in zip(multi_result, EXPECTED_FILL_MASK_RESULT):
self.assertEqual(set([o["sequence"] for o in result]), set([o["sequence"] for o in result]))
if isinstance(multi_result[0], list):
multi_result = multi_result[0]
for result in multi_result:
for key in mandatory_keys:
self.assertIn(key, result)
self.assertRaises(Exception, nlp, [None])
valid_inputs = valid_inputs[:1]
mono_result = nlp(valid_inputs[0], targets=valid_targets)
self.assertIsInstance(mono_result, list)
self.assertIsInstance(mono_result[0], dict)
for mandatory_key in mandatory_keys:
self.assertIn(mandatory_key, mono_result[0])
multi_result = [nlp(valid_input) for valid_input in valid_inputs]
self.assertIsInstance(multi_result, list)
self.assertIsInstance(multi_result[0], (dict, list))
for result, expected in zip(multi_result, EXPECTED_FILL_MASK_TARGET_RESULT):
self.assertEqual(set([o["sequence"] for o in result]), set([o["sequence"] for o in result]))
if isinstance(multi_result[0], list):
multi_result = multi_result[0]
for result in multi_result:
for key in mandatory_keys:
self.assertIn(key, result)
self.assertRaises(Exception, nlp, [None])
| [
"[email protected]"
] | |
4b94ea0efb14d60e69e0110fd84977c9ba7a7611 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-compute/azure/mgmt/compute/v2018_04_01/models/virtual_machine_scale_set_public_ip_address_configuration_py3.py | 76a82b78db8773b9a74688ddbdadeac51ed6ec07 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 2,197 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetPublicIPAddressConfiguration(Model):
"""Describes a virtual machines scale set IP Configuration's PublicIPAddress
configuration.
All required parameters must be populated in order to send to Azure.
:param name: Required. The publicIP address configuration name.
:type name: str
:param idle_timeout_in_minutes: The idle timeout of the public IP address.
:type idle_timeout_in_minutes: int
:param dns_settings: The dns settings to be applied on the publicIP
addresses .
:type dns_settings:
~azure.mgmt.compute.v2018_04_01.models.VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings
:param ip_tags: The list of IP tags associated with the public IP address.
:type ip_tags:
list[~azure.mgmt.compute.v2018_04_01.models.VirtualMachineScaleSetIpTag]
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings'},
'ip_tags': {'key': 'properties.ipTags', 'type': '[VirtualMachineScaleSetIpTag]'},
}
def __init__(self, *, name: str, idle_timeout_in_minutes: int=None, dns_settings=None, ip_tags=None, **kwargs) -> None:
super(VirtualMachineScaleSetPublicIPAddressConfiguration, self).__init__(**kwargs)
self.name = name
self.idle_timeout_in_minutes = idle_timeout_in_minutes
self.dns_settings = dns_settings
self.ip_tags = ip_tags
| [
"[email protected]"
] | |
4673777d1c1a994069de18c0acda79831f581168 | 611055f18da392e5a63b2d80ce102701201981eb | /src/apps/comentarios/admin.py | 52f74fce4df52fd09dd0fe7013e06fc2089b1463 | [] | no_license | danielhuamani/django-backbone | facf6f2ced78991577957bd2f8bb8c42255cd795 | 6523e19d8599753ccf28b6a2d4f511ec0fe0f1c7 | refs/heads/master | 2021-01-10T12:47:26.514543 | 2015-11-18T17:12:02 | 2015-11-18T17:12:02 | 45,657,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | from django.contrib import admin
from .models import Comentario
# Register your models here. #
admin.site.register(Comentario)
| [
"[email protected]"
] | |
56f8397cd80c31bf0258a6c8726c43dfa3581ba0 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5688567749672960_1/Python/Jakube/A.py | 4b619d8aaec440fa7678ace44a08aae319de1d8e | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,162 | py | def splitter(number):
s = str(number)
return int(s[:len(s)//2] or "0"), int(s[len(s)//2:]), len(s[len(s)//2:])
def compute(number):
steps = 0
while number:
# get second part of the number
half1, half2, l = splitter(number)
if half2 == 0:
steps += 1
number -= 1
half1, half2, l = splitter(number)
steps += half2 - 1
number -= half2 -1
number = half1 * 10**l + 1
if number == 1:
return steps + 1
# switch
if str(number) != str(number)[::-1]:
number = int(str(number)[::-1])
steps += 1
mi = int(str(number)[1:] or str(number))
number -= mi
steps += mi
def read_number(f):
return int(f.readline().strip())
def main():
with open('A-large.in', 'r') as f:
test_cases = read_number(f)
for test_case in range(test_cases):
number = read_number(f)
#print(number)
print('Case #{}: {}'.format(test_case + 1, compute(number)))
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
76347a0bc807d2e3b00e30fef2748954370b3171 | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-2/dn5 - tviti/M-17135-2263.py | 9deedfe8d85e527c7a5c6e89ba8391269f3c8492 | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,569 | py | def unikati(s):
sez = []
for x in s:
if x not in sez:
sez.append(x)
return sez
def avtor(tvit):
a = ""
for x in range(len(tvit)):
if tvit[x] == ":":
break
else:
a += tvit[x]
return a
def izloci_besedo(beseda):
beseda_1 = ""
for x in range(len(beseda)):
if beseda[x].isalnum() == True:
beseda_1 += beseda[x]
elif beseda[x] == "-" and beseda[x-1].isalnum() == True and beseda[x+1].isalnum() == True:
beseda_1 += beseda[x]
return beseda_1
def vsi_avtorji(tviti):
sez = []
for x in tviti:
avtor_ime = avtor(x)
if avtor_ime not in sez:
sez.append(avtor_ime)
return sez
def se_zacne_z(tvit, c):
sez = tvit.split()
sez_besed = []
for x in sez:
if x[0] == c:
if x[-1].isalnum() == True:
sez_besed.append(x[1:])
else:
sez_besed.append(x[1:-1])
return sez_besed
def vse_afne(tviti):
sez_imen = []
for x in tviti:
besede = x.split()
for x in besede:
if x[0] == "@":
if x[-1].isalnum() == True:
if x[1:] not in sez_imen:
sez_imen.append(x[1:])
else:
if x[1:-1] not in sez_imen:
sez_imen.append(x[1:-1])
return sez_imen
def vse_osebe(tviti):
sez = vse_afne(tviti)
sez_imen = vsi_avtorji(tviti)
n = 0
for x in range(len(sez)):
if sez[n] not in sez_imen:
sez_imen.append(sez[n])
n += 1
sez = sorted(sez_imen)
return sez
def vsi_hashtagi(tviti):
sez = []
for x in tviti:
besede = x.split()
for x in besede:
if x[0] == "#":
if x[-1].isalnum() == True:
if x[1:] not in sez:
sez.append(x[1:])
else:
if x[1:-1] not in sez:
sez.append(x[1:-1])
return sez
def zberi_se_zacne_z(tviti, c):
sez_besed = []
for x in tviti:
sez = x.split()
for x in sez:
if x[0] == c:
if x[-1].isalnum() == True:
if x[1:] not in sez_besed:
sez_besed.append(x[1:])
else:
if x[1:-1] not in sez_besed:
sez_besed.append(x[1:-1])
return sez_besed
def custva(tviti, hashtagi):
sez_imen = []
for x in tviti:
sez = x.split()
avtor = sez[0][:-1]
for x in sez:
if x[0] == "#":
if x[1:] in hashtagi and avtor not in sez_imen:
sez_imen.append(avtor)
return sorted(sez_imen)
def se_poznata(tviti, oseba1, oseba2):
zakljucek = False
sez = [oseba1, oseba2]
for x in sez:
for y in tviti:
besede = y.split()
for s in besede:
sez_besed = []
if s[0] == "@":
if besede[0][:-1] == x:
if s[-1].isalnum() == True:
if s[1:] not in sez_besed:
sez_besed.append(s[1:])
else:
if s[1:-1] not in sez_besed:
sez_besed.append(s[1:-1])
for d in sez_besed:
if x == oseba1:
if oseba2 in sez_besed:
zakljucek = True
else:
if oseba1 in sez_besed:
zakljucek = True
return zakljucek
import unittest
class TestTviti(unittest.TestCase):
tviti = [
"sandra: Spet ta dež. #dougcajt",
"berta: @sandra Delaj domačo za #programiranje1",
"sandra: @berta Ne maram #programiranje1 #krneki",
"ana: kdo so te @berta, @cilka, @dani? #krneki",
"cilka: jst sm pa #luft",
"benjamin: pogrešam ano #zalosten",
"ema: @benjamin @ana #split? po dvopičju, za začetek?",
]
def test_unikat(self):
self.assertEqual(unikati([1, 2, 1, 1, 3, 2]), [1, 2, 3])
self.assertEqual(unikati([1, 3, 2, 1, 1, 3, 2]), [1, 3, 2])
self.assertEqual(unikati([1, 5, 4, 3, 2]), [1, 5, 4, 3, 2])
self.assertEqual(unikati([1, 1, 1, 1, 1]), [1])
self.assertEqual(unikati([1]), [1])
self.assertEqual(unikati([]), [])
self.assertEqual(unikati(["Ana", "Berta", "Cilka", "Berta"]), ["Ana", "Berta", "Cilka"])
def test_avtor(self):
self.assertEqual(avtor("janez: pred dvopičjem avtor, potem besedilo"), "janez")
self.assertEqual(avtor("ana: malo krajse ime"), "ana")
self.assertEqual(avtor("benjamin: pomembne so tri stvari: prva, druga in tretja"), "benjamin")
def test_vsi_avtorji(self):
self.assertEqual(vsi_avtorji(self.tviti), ["sandra", "berta", "ana", "cilka", "benjamin", "ema"])
self.assertEqual(vsi_avtorji(self.tviti[:3]), ["sandra", "berta"])
def test_izloci_besedo(self):
self.assertEqual(izloci_besedo("@ana"), "ana")
self.assertEqual(izloci_besedo("@@ana!!!"), "ana")
self.assertEqual(izloci_besedo("ana"), "ana")
self.assertEqual(izloci_besedo("!#$%\"=%/%()/Ben-jamin'"), "Ben-jamin")
def test_vse_na_crko(self):
self.assertEqual(se_zacne_z("Benjamin $je $skocil! Visoko!", "$"), ["je", "skocil"])
self.assertEqual(se_zacne_z("Benjamin $je $skocil! #Visoko!", "$"), ["je", "skocil"])
self.assertEqual(se_zacne_z("ana: kdo so te @berta, @cilka, @dani? #krneki", "@"), ["berta", "cilka", "dani"])
def test_zberi_na_crko(self):
self.assertEqual(zberi_se_zacne_z(self.tviti, "@"), ['sandra', 'berta', 'cilka', 'dani', 'benjamin', 'ana'])
self.assertEqual(zberi_se_zacne_z(self.tviti, "#"), ['dougcajt', 'programiranje1', 'krneki', 'luft', 'zalosten', 'split'])
def test_vse_afne(self):
self.assertEqual(vse_afne(self.tviti), ['sandra', 'berta', 'cilka', 'dani', 'benjamin', 'ana'])
def test_vsi_hashtagi(self):
self.assertEqual(vsi_hashtagi(self.tviti), ['dougcajt', 'programiranje1', 'krneki', 'luft', 'zalosten', 'split'])
def test_vse_osebe(self):
self.assertEqual(vse_osebe(self.tviti), ['ana', 'benjamin', 'berta', 'cilka', 'dani', 'ema', 'sandra'])
class TestDodatna(unittest.TestCase):
tviti = [
"sandra: Spet ta dež. #dougcajt",
"berta: @sandra Delaj domačo za #programiranje1",
"sandra: @berta Ne maram #programiranje1 #krneki",
"ana: kdo so te @berta, @cilka, @dani? #krneki",
"cilka: jst sm pa #luft",
"benjamin: pogrešam ano #zalosten",
"ema: @benjamin @ana #split? po dvopičju, za začetek?",
]
def test_custva(self):
self.assertEqual(custva(self.tviti, ["dougcajt", "krneki"]), ["ana", "sandra"])
self.assertEqual(custva(self.tviti, ["luft"]), ["cilka"])
self.assertEqual(custva(self.tviti, ["meh"]), [])
def test_se_poznata(self):
self.assertTrue(se_poznata(self.tviti, "ana", "berta"))
self.assertTrue(se_poznata(self.tviti, "ema", "ana"))
self.assertFalse(se_poznata(self.tviti, "sandra", "ana"))
self.assertFalse(se_poznata(self.tviti, "cilka", "luft"))
self.assertFalse(se_poznata(self.tviti, "cilka", "balon"))
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
d0ac595a122ecd472ef080d0b8bd510635b637ea | 6fab6422c26e00cde21f51f8f10eb88ff5c458af | /api/serializers.py | 51f7198a3741d3245a04b600aeef1d4bc543c61a | [] | no_license | nicksonlangat/alzy-api | ffa3f43198fa0a6e8f58b88ae3f206e4c69f6cfb | d4a95da469d0895eb0c8a2897f3927e61da89aa9 | refs/heads/master | 2023-02-28T08:36:15.600769 | 2021-02-09T22:37:03 | 2021-02-09T22:37:03 | 337,120,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 930 | py | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import *
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
extra_kwargs = {'password' : {'write_only': True, 'required': True}}
def create(self, validated_data):
user = User.objects.create_user(**validated_data)
return user
class ReminderSerializer(serializers.ModelSerializer):
class Meta:
model = Reminder
fields = ('id', 'title','details','deadline',)
def create(self, validated_data): #overwrite built in create fn.
# create new instance of the model
reminder=Reminder.objects.create(**validated_data)
return reminder
class FileSerializer(serializers.ModelSerializer):
class Meta:
model = File
fields = "__all__" | [
"[email protected]"
] | |
ee5d9088a648e83c220c2dc7e4f83db84f9ab93e | f02e654d5590a861804e3220ed76ba2192e1699b | /aslam/deprecated/ASLAM/deprecated/old2/test.py | bdc9562460aa075503b52776c3db9d3ae345080c | [
"MIT",
"BSD-3-Clause"
] | permissive | AmarNathH/software | 73e2afd3affaf2c1595b406480edac8b8fb2fcac | e225810c7501250f48add43349a64f49450cc79f | refs/heads/master | 2020-12-02T20:50:18.439874 | 2017-07-03T16:51:07 | 2017-07-03T16:51:07 | 96,219,939 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | #!/usr/bin/env python2.7
from classes import *
import numpy as n
S = State(5, 1, 5, 1)
for x in range(5): S.objects[str(x)] = Object()
S.update()
S.hObs('0', 45, 5)
S.dObs('0', 10**(1./2), 0.5)
S.update()
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection = '3d')
pmap = S.objects['0'].pmap
xv, yv, zv = [], [], []
for x in range(len(pmap)):
for y in range(len(pmap[0])):
xv += [x / GRIDSCALE]
yv += [y / GRIDSCALE]
zv += [pmap[x][y]]
ax.scatter(xv, yv, zv)
plt.show()
#for i in range(len(x)):
# for j in range(len(y)):
| [
"[email protected]"
] | |
1197d22b4092f0070ba99d63e0074573c7e860f4 | 6045f8519065f17b9d832a8e051723a520b58e3c | /ex Basic Sytax/2. Drink Something.py | bc6082c6982ee35b8a65971bc335d24452e1b965 | [] | no_license | a-angeliev/Python-Fundamentals-SoftUni | a308a6c94eb705a3319f6e081543c1cad0b1b37d | a9a5eba0376ebc7395daeda527408d1e59d58316 | refs/heads/master | 2023-07-19T05:55:28.104160 | 2021-09-11T18:25:58 | 2021-09-11T18:25:58 | 399,575,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | n = int(input())
if n<=14:
print("drink toddy")
elif n<=18:
print("drink coke")
elif n<=21:
print("drink beer")
else:
print("drink whisky") | [
"[email protected]"
] | |
f3c46d47d4582718dfb6dd5b01fc9693777fc6bd | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/network/azure-mgmt-dns/azure/mgmt/dns/v2023_07_01_preview/aio/_dns_management_client.py | 27d21876b1846ae591194de288047cefd6a1b680 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 5,306 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from ..._serialization import Deserializer, Serializer
from ._configuration import DnsManagementClientConfiguration
from .operations import DnsResourceReferenceOperations, DnssecConfigsOperations, RecordSetsOperations, ZonesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class DnsManagementClient: # pylint: disable=client-accepts-api-version-keyword
"""The DNS Management Client.
:ivar dnssec_configs: DnssecConfigsOperations operations
:vartype dnssec_configs:
azure.mgmt.dns.v2023_07_01_preview.aio.operations.DnssecConfigsOperations
:ivar record_sets: RecordSetsOperations operations
:vartype record_sets: azure.mgmt.dns.v2023_07_01_preview.aio.operations.RecordSetsOperations
:ivar zones: ZonesOperations operations
:vartype zones: azure.mgmt.dns.v2023_07_01_preview.aio.operations.ZonesOperations
:ivar dns_resource_reference: DnsResourceReferenceOperations operations
:vartype dns_resource_reference:
azure.mgmt.dns.v2023_07_01_preview.aio.operations.DnsResourceReferenceOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2023-07-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = DnsManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.dnssec_configs = DnssecConfigsOperations(
self._client, self._config, self._serialize, self._deserialize, "2023-07-01-preview"
)
self.record_sets = RecordSetsOperations(
self._client, self._config, self._serialize, self._deserialize, "2023-07-01-preview"
)
self.zones = ZonesOperations(
self._client, self._config, self._serialize, self._deserialize, "2023-07-01-preview"
)
self.dns_resource_reference = DnsResourceReferenceOperations(
self._client, self._config, self._serialize, self._deserialize, "2023-07-01-preview"
)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "DnsManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"[email protected]"
] | |
e8bad14d95e08fc8e990e74f3bdf81de17ebc718 | 23b7fa714698be444d82ac649314616495c66235 | /petl/transform/__init__.py | 5dac1f6cd765844d320a78b291999fef24a54ef6 | [
"MIT"
] | permissive | mbelmadani/petl | a38ed1e595157fb556fe86ae32e796f6eff60a7a | b6867f056bf44d699f8f7b8432769e4b5127e937 | refs/heads/master | 2021-04-03T09:04:56.785188 | 2019-08-06T15:09:40 | 2019-08-06T15:09:40 | 124,597,339 | 0 | 0 | MIT | 2018-03-09T21:53:44 | 2018-03-09T21:53:44 | null | UTF-8 | Python | false | false | 2,444 | py | from __future__ import absolute_import, print_function, division
from petl.transform.basics import cut, cutout, movefield, cat, annex, \
addfield, addfieldusingcontext, addrownumbers, addcolumn, rowslice, head, \
tail, skipcomments, stack
from petl.transform.headers import rename, setheader, extendheader, \
pushheader, skip, prefixheader, suffixheader, sortheader
from petl.transform.conversions import convert, convertall, replace, \
replaceall, update, convertnumbers, format, formatall, interpolate, \
interpolateall
from petl.transform.sorts import sort, mergesort, issorted
from petl.transform.selects import select, selectop, selectcontains, \
selecteq, selectfalse, selectge, selectgt, selectin, selectis, \
selectisinstance, selectisnot, selectle, selectlt, selectne, selectnone, \
selectnotin, selectnotnone, selectrangeclosed, selectrangeopen, \
selectrangeopenleft, selectrangeopenright, selecttrue, \
selectusingcontext, rowlenselect, facet, biselect
from petl.transform.joins import join, leftjoin, rightjoin, outerjoin, \
crossjoin, antijoin, lookupjoin, unjoin
from petl.transform.hashjoins import hashjoin, hashleftjoin, hashrightjoin, \
hashantijoin, hashlookupjoin
from petl.transform.reductions import rowreduce, mergeduplicates,\
aggregate, groupcountdistinctvalues, groupselectfirst, groupselectmax, \
groupselectmin, merge, fold, Conflict, groupselectlast
from petl.transform.fills import filldown, fillright, fillleft
from petl.transform.regex import capture, split, search, searchcomplement, \
sub
from petl.transform.reshape import melt, recast, transpose, pivot, flatten, \
unflatten
from petl.transform.maps import fieldmap, rowmap, rowmapmany, rowgroupmap
from petl.transform.unpacks import unpack, unpackdict
from petl.transform.dedup import duplicates, unique, distinct, conflicts, \
isunique
from petl.transform.setops import complement, intersection, \
recordcomplement, diff, recorddiff, hashintersection, hashcomplement
from petl.transform.intervals import intervaljoin, intervalleftjoin, \
intervaljoinvalues, intervalantijoin, intervallookup, intervallookupone, \
intervalrecordlookup, intervalrecordlookupone, intervalsubtract, \
facetintervallookup, facetintervallookupone, facetintervalrecordlookup, \
facetintervalrecordlookupone, collapsedintervals
from petl.transform.validation import validate
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.