hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
16a329d42e5fe4d870ae6840dac571c4c4bd741b | 221 | py | Python | ImageSearcher/admin.py | carpensa/dicom-harpooner | 2d998c22c51e372fb9b5f3508c900af6f4405cd3 | [
"BSD-3-Clause"
] | 1 | 2021-05-24T21:45:05.000Z | 2021-05-24T21:45:05.000Z | ImageSearcher/admin.py | carpensa/dicom-harpooner | 2d998c22c51e372fb9b5f3508c900af6f4405cd3 | [
"BSD-3-Clause"
] | null | null | null | ImageSearcher/admin.py | carpensa/dicom-harpooner | 2d998c22c51e372fb9b5f3508c900af6f4405cd3 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from dicoms.models import Subject
from dicoms.models import Session
from dicoms.models import Series
admin.site.register(Session)
admin.site.register(Subject)
admin.site.register(Series)
| 24.555556 | 33 | 0.837104 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
16a335de057546c0e95c5699aa9470bc30a7f928 | 334 | py | Python | src/djangoreactredux/wsgi.py | noscripter/django-react-redux-jwt-base | 078fb86005db106365df51fa11d8602fa432e3c3 | [
"MIT"
] | 4 | 2016-07-03T08:18:45.000Z | 2018-12-25T07:47:41.000Z | src/djangoreactredux/wsgi.py | noscripter/django-react-redux-jwt-base | 078fb86005db106365df51fa11d8602fa432e3c3 | [
"MIT"
] | 2 | 2021-03-20T00:02:08.000Z | 2021-06-10T23:34:26.000Z | src/djangoreactredux/wsgi.py | noscripter/django-react-redux-jwt-base | 078fb86005db106365df51fa11d8602fa432e3c3 | [
"MIT"
] | 1 | 2019-08-02T14:51:41.000Z | 2019-08-02T14:51:41.000Z | """
WSGI config for django-react-redux-jwt-base project.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangoreactredux.settings.dev")
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| 23.857143 | 80 | 0.820359 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 116 | 0.347305 |
16a40296272a4a2617c7e2666b6828a4cb958030 | 1,414 | py | Python | simple_settings/dynamic_settings/base.py | matthewh/simple-settings | dbddf8d5be7096ee7c4c3cc6d82824befa9b714f | [
"MIT"
] | null | null | null | simple_settings/dynamic_settings/base.py | matthewh/simple-settings | dbddf8d5be7096ee7c4c3cc6d82824befa9b714f | [
"MIT"
] | null | null | null | simple_settings/dynamic_settings/base.py | matthewh/simple-settings | dbddf8d5be7096ee7c4c3cc6d82824befa9b714f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import re
from copy import deepcopy
import jsonpickle
class BaseReader(object):
"""
Base class for dynamic readers
"""
_default_conf = {}
def __init__(self, conf):
self.conf = deepcopy(self._default_conf)
self.conf.update(conf)
self.key_pattern = self.conf.get('pattern')
self.auto_casting = self.conf.get('auto_casting')
self.key_prefix = self.conf.get('prefix')
def get(self, key):
if not self._is_valid_key(key):
return
result = self._get(self._qualified_key(key))
if self.auto_casting and (result is not None):
result = jsonpickle.decode(result)
return result
def set(self, key, value):
if not self._is_valid_key(key):
return
if self.auto_casting:
value = jsonpickle.encode(value)
self._set(self._qualified_key(key), value)
def _is_valid_key(self, key):
if not self.key_pattern:
return True
return bool(re.match(self.key_pattern, key))
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key)
| 28.28 | 68 | 0.609618 | 1,332 | 0.942008 | 0 | 0 | 0 | 0 | 0 | 0 | 294 | 0.207921 |
16a410bbf9dbba9b62a772c35376b67270885de8 | 3,981 | py | Python | scripts/map_frame_to_utm_tf_publisher.py | coincar-sim/lanelet2_interface_ros | f1738766dd323ed64a4ebcc8254438920a587b80 | [
"BSD-3-Clause"
] | 7 | 2019-03-27T03:59:50.000Z | 2021-10-17T10:46:29.000Z | scripts/map_frame_to_utm_tf_publisher.py | coincar-sim/lanelet2_interface_ros | f1738766dd323ed64a4ebcc8254438920a587b80 | [
"BSD-3-Clause"
] | 6 | 2019-04-13T15:55:55.000Z | 2021-06-01T21:08:18.000Z | scripts/map_frame_to_utm_tf_publisher.py | coincar-sim/lanelet2_interface_ros | f1738766dd323ed64a4ebcc8254438920a587b80 | [
"BSD-3-Clause"
] | 4 | 2021-03-25T09:22:55.000Z | 2022-03-22T05:40:49.000Z | #!/usr/bin/env python
#
# Copyright (c) 2018
# FZI Forschungszentrum Informatik, Karlsruhe, Germany (www.fzi.de)
# KIT, Institute of Measurement and Control, Karlsruhe, Germany (www.mrt.kit.edu)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import roslib
import rospy
import tf
import tf2_ros
import geometry_msgs.msg
import lanelet2
stb = None
static_transform = None
lat_origin = None
lon_origin = None
map_frame_id = None
actual_utm_with_no_offset_frame_id = None
def timer_callback(event):
global stb, static_transform
static_transform.header.stamp = rospy.Time.now()
stb.sendTransform(static_transform)
def wait_for_params_successful():
global lat_origin, lon_origin, map_frame_id, actual_utm_with_no_offset_frame_id
for i in range(3000):
try:
lat_origin = float(rospy.get_param("/lanelet2_interface_ros/lat_origin"))
lon_origin = float(rospy.get_param("/lanelet2_interface_ros/lon_origin"))
map_frame_id = rospy.get_param("/lanelet2_interface_ros/map_frame_id")
actual_utm_with_no_offset_frame_id = rospy.get_param(
"/lanelet2_interface_ros/actual_utm_with_no_offset_frame_id")
except Exception:
rospy.sleep(0.01)
continue
return True
return False
if __name__ == '__main__':
rospy.init_node('map_frame_to_utm_tf_publisher')
if not wait_for_params_successful():
rospy.logerr("map_frame_to_utm_tf_publisher: Could not initialize")
exit()
origin_latlon = lanelet2.core.GPSPoint(lat_origin, lon_origin)
projector = lanelet2.projection.UtmProjector(
lanelet2.io.Origin(origin_latlon), False, False)
origin_xy = projector.forward(origin_latlon)
stb = tf2_ros.TransformBroadcaster()
static_transform = geometry_msgs.msg.TransformStamped()
static_transform.header.stamp = rospy.Time.now()
static_transform.header.frame_id = map_frame_id
static_transform.child_frame_id = actual_utm_with_no_offset_frame_id
static_transform.transform.translation.x = -origin_xy.x
static_transform.transform.translation.y = -origin_xy.y
static_transform.transform.translation.z = 0.0
q = tf.transformations.quaternion_from_euler(0, 0, 0)
static_transform.transform.rotation.x = q[0]
static_transform.transform.rotation.y = q[1]
static_transform.transform.rotation.z = q[2]
static_transform.transform.rotation.w = q[3]
rospy.Timer(rospy.Duration(1.), timer_callback)
rospy.spin()
| 38.278846 | 85 | 0.757096 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,939 | 0.487064 |
16a5b8fdf510e7bdeb3c6bd8d9c144db7f897552 | 52 | py | Python | lectures/05-python-intro/examples/argv.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | 14 | 2019-07-14T08:29:04.000Z | 2022-03-07T06:33:26.000Z | lectures/05-python-intro/examples/argv.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | 4 | 2020-03-24T18:25:26.000Z | 2021-08-23T20:44:07.000Z | lectures/05-python-intro/examples/argv.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | 33 | 2019-01-05T17:03:47.000Z | 2019-11-11T20:48:24.000Z | #!/usr/bin/env python3
import sys
print(sys.argv)
| 8.666667 | 22 | 0.711538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 0.423077 |
16a6cc579db685a8a411c51c09771255b3e6c2c9 | 366 | py | Python | tests/fixtures.py | easyas314159/cnftools | 67896cf3d17587accfc5ad7e30730fea2394f558 | [
"MIT"
] | null | null | null | tests/fixtures.py | easyas314159/cnftools | 67896cf3d17587accfc5ad7e30730fea2394f558 | [
"MIT"
] | null | null | null | tests/fixtures.py | easyas314159/cnftools | 67896cf3d17587accfc5ad7e30730fea2394f558 | [
"MIT"
] | null | null | null | from itertools import chain
def make_comparable(*clauses):
return set((frozenset(c) for c in chain(*clauses)))
def count_clauses(*clauses):
total = 0
for subclauses in clauses:
total += len(subclauses)
return total
def unique_literals(*clauses):
literals = set()
for clause in chain(*clauses):
literals.update((abs(l) for l in clause))
return literals
| 21.529412 | 52 | 0.734973 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
16a762cb2b4ddc4c0f253e56da58680346091ea8 | 7,879 | py | Python | applications/FluidDynamicsApplication/tests/sod_shock_tube_test.py | Rodrigo-Flo/Kratos | f718cae5d1618e9c0e7ed1da9e95b7a853e62b1b | [
"BSD-4-Clause"
] | null | null | null | applications/FluidDynamicsApplication/tests/sod_shock_tube_test.py | Rodrigo-Flo/Kratos | f718cae5d1618e9c0e7ed1da9e95b7a853e62b1b | [
"BSD-4-Clause"
] | null | null | null | applications/FluidDynamicsApplication/tests/sod_shock_tube_test.py | Rodrigo-Flo/Kratos | f718cae5d1618e9c0e7ed1da9e95b7a853e62b1b | [
"BSD-4-Clause"
] | null | null | null | # Import kratos core and applications
import KratosMultiphysics
import KratosMultiphysics.KratosUnittest as KratosUnittest
import KratosMultiphysics.kratos_utilities as KratosUtilities
from KratosMultiphysics.FluidDynamicsApplication.fluid_dynamics_analysis import FluidDynamicsAnalysis
class SodShockTubeTest(KratosUnittest.TestCase):
def testSodShockTubeExplicitASGS(self):
self.solver_type = "CompressibleExplicit"
self.use_oss = False
self.shock_capturing = False
self._CustomizeSimulationSettings()
def testSodShockTubeExplicitASGSShockCapturing(self):
self.solver_type = "CompressibleExplicit"
self.use_oss = False
self.shock_capturing = True
self._CustomizeSimulationSettings()
def testSodShockTubeExplicitOSS(self):
self.solver_type = "CompressibleExplicit"
self.use_oss = True
self.shock_capturing = False
self._CustomizeSimulationSettings()
def testSodShockTubeExplicitOSSShockCapturing(self):
self.solver_type = "CompressibleExplicit"
self.use_oss = True
self.shock_capturing = True
self._CustomizeSimulationSettings()
def setUp(self):
self.print_output = False
self.print_reference_values = False
self.check_absolute_tolerance = 1.0e-8
self.check_relative_tolerance = 1.0e-10
self.work_folder = "sod_shock_tube_test"
settings_filename = "ProjectParameters.json"
# Read the simulation settings
with KratosUnittest.WorkFolderScope(self.work_folder,__file__):
with open(settings_filename,'r') as parameter_file:
self.parameters = KratosMultiphysics.Parameters(parameter_file.read())
def runTest(self):
# If required, add the output process to the test settings
if self.print_output:
self._AddOutput()
# If required, add the reference values output process to the test settings
if self.print_reference_values:
self._AddReferenceValuesOutput()
else:
self._AddReferenceValuesCheck()
# Create the test simulation
with KratosUnittest.WorkFolderScope(self.work_folder,__file__):
self.model = KratosMultiphysics.Model()
simulation = FluidDynamicsAnalysis(self.model, self.parameters)
simulation.Run()
def tearDown(self):
with KratosUnittest.WorkFolderScope(self.work_folder, __file__):
KratosUtilities.DeleteFileIfExisting('sod_shock_tube_geom_coarse.time')
def _CustomizeSimulationSettings(self):
# Customize simulation settings
self.parameters["solver_settings"]["solver_type"].SetString(self.solver_type)
self.parameters["solver_settings"]["use_oss"].SetBool(self.use_oss)
self.parameters["solver_settings"]["shock_capturing"].SetBool(self.shock_capturing)
def _AddOutput(self):
gid_output_settings = KratosMultiphysics.Parameters("""{
"python_module" : "gid_output_process",
"kratos_module" : "KratosMultiphysics",
"process_name" : "GiDOutputProcess",
"help" : "This process writes postprocessing files for GiD",
"Parameters" : {
"model_part_name" : "FluidModelPart",
"output_name" : "TO_BE_DEFINED",
"postprocess_parameters" : {
"result_file_configuration" : {
"gidpost_flags" : {
"GiDPostMode" : "GiD_PostBinary",
"WriteDeformedMeshFlag" : "WriteDeformed",
"WriteConditionsFlag" : "WriteConditions",
"MultiFileFlag" : "SingleFile"
},
"file_label" : "step",
"output_control_type" : "step",
"output_frequency" : 1.0,
"body_output" : true,
"node_output" : false,
"skin_output" : false,
"plane_output" : [],
"nodal_results" : ["DENSITY","MOMENTUM","TOTAL_ENERGY"],
"gauss_point_results" : ["SHOCK_SENSOR","THERMAL_SENSOR","SHEAR_SENSOR"],
"nodal_nonhistorical_results" : ["ARTIFICIAL_BULK_VISCOSITY","ARTIFICIAL_CONDUCTIVITY","ARTIFICIAL_DYNAMIC_VISCOSITY"]
},
"point_data_configuration" : []
}
}
}""")
output_name = "sod_shock_tube{0}{1}{2}".format(
"_explicit" if self.solver_type == "CompressibleExplicit" else "_implicit",
"_ASGS" if self.use_oss == False else "_OSS",
"_SC" if self.shock_capturing else "")
gid_output_settings["Parameters"]["output_name"].SetString(output_name)
self.parameters["output_processes"]["gid_output"].Append(gid_output_settings)
def _AddReferenceValuesOutput(self):
json_output_settings = KratosMultiphysics.Parameters("""{
"python_module" : "json_output_process",
"kratos_module" : "KratosMultiphysics",
"process_name" : "JsonOutputProcess",
"Parameters" : {
"output_variables" : ["DENSITY","MOMENTUM_X","MOMENTUM_Y","TOTAL_ENERGY"],
"output_file_name" : "TO_BE_DEFINED",
"model_part_name" : "FluidModelPart.FluidParts_Fluid",
"time_frequency" : 0.025
}
}""")
output_file_name = "sod_shock_tube{0}{1}{2}_results.json".format(
"_explicit" if self.solver_type == "CompressibleExplicit" else "_implicit",
"_ASGS" if self.use_oss == False else "_OSS",
"_SC" if self.shock_capturing else "")
json_output_settings["Parameters"]["output_file_name"].SetString(output_file_name)
self.parameters["processes"]["json_check_process_list"].Append(json_output_settings)
def _AddReferenceValuesCheck(self):
json_check_settings = KratosMultiphysics.Parameters("""{
"python_module" : "from_json_check_result_process",
"kratos_module" : "KratosMultiphysics",
"process_name" : "FromJsonCheckResultProcess",
"Parameters" : {
"check_variables" : ["DENSITY","MOMENTUM_X","MOMENTUM_Y","TOTAL_ENERGY"],
"input_file_name" : "TO_BE_DEFINED",
"model_part_name" : "FluidModelPart.FluidParts_Fluid",
"tolerance" : 0.0,
"relative_tolerance" : 0.0,
"time_frequency" : 0.025
}
}""")
input_file_name = "sod_shock_tube{0}{1}{2}_results.json".format(
"_explicit" if self.solver_type == "CompressibleExplicit" else "_implicit",
"_ASGS" if self.use_oss == False else "_OSS",
"_SC" if self.shock_capturing else "")
json_check_settings["Parameters"]["input_file_name"].SetString(input_file_name)
json_check_settings["Parameters"]["tolerance"].SetDouble(self.check_absolute_tolerance)
json_check_settings["Parameters"]["relative_tolerance"].SetDouble(self.check_relative_tolerance)
self.parameters["processes"]["json_check_process_list"].Append(json_check_settings)
if __name__ == '__main__':
test = SodShockTubeTest()
test.setUp()
# test.testSodShockTubeExplicitASGS()
test.testSodShockTubeExplicitASGSShockCapturing()
# test.testSodShockTubeExplicitOSS()
# test.testSodShockTubeExplicitOSSShockCapturing()
test.runTest()
test.tearDown()
| 48.635802 | 142 | 0.615053 | 7,284 | 0.924483 | 0 | 0 | 0 | 0 | 0 | 0 | 4,031 | 0.511613 |
16a7758cb5092239aa048ae598f5849367159b11 | 647 | py | Python | src/controllers/__init__.py | TonghanWang/NDQ | 575f2e243bac1a567c072dbea8e093aaa4959511 | [
"Apache-2.0"
] | 63 | 2020-02-23T09:37:15.000Z | 2022-01-17T01:30:50.000Z | src/controllers/__init__.py | fringsoo/NDQ | e243ba917e331065e82c6634cb1d756873747be5 | [
"Apache-2.0"
] | 14 | 2020-04-20T02:20:11.000Z | 2022-03-12T00:16:33.000Z | src/controllers/__init__.py | mig-zh/NDQ | 5720e3e8b529724e8d96a9a24c73bca24a11e7f9 | [
"Apache-2.0"
] | 16 | 2020-03-12T02:57:52.000Z | 2021-11-27T13:07:08.000Z | from .basic_controller import BasicMAC
from .cate_broadcast_comm_controller import CateBCommMAC
from .cate_broadcast_comm_controller_full import CateBCommFMAC
from .cate_broadcast_comm_controller_not_IB import CateBCommNIBMAC
from .tar_comm_controller import TarCommMAC
from .cate_pruned_broadcast_comm_controller import CatePBCommMAC
REGISTRY = {"basic_mac": BasicMAC,
"cate_broadcast_comm_mac": CateBCommMAC,
"cate_broadcast_comm_mac_full": CateBCommFMAC,
"cate_broadcast_comm_mac_not_IB": CateBCommNIBMAC,
"tar_comm_mac": TarCommMAC,
"cate_pruned_broadcast_comm_mac": CatePBCommMAC}
| 46.214286 | 66 | 0.797527 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 144 | 0.222566 |
16a89cacbc82dd93659b9a841883e22a139d8576 | 447 | py | Python | main.py | 1999foxes/run-cmd-from-websocket | 0e2a080fe92b93c6cba63dfe5649ac2a3e745009 | [
"Apache-2.0"
] | null | null | null | main.py | 1999foxes/run-cmd-from-websocket | 0e2a080fe92b93c6cba63dfe5649ac2a3e745009 | [
"Apache-2.0"
] | null | null | null | main.py | 1999foxes/run-cmd-from-websocket | 0e2a080fe92b93c6cba63dfe5649ac2a3e745009 | [
"Apache-2.0"
] | null | null | null | import asyncio
import json
import logging
import websockets
logging.basicConfig()
async def counter(websocket, path):
try:
print("connect")
async for message in websocket:
print(message)
finally:
USERS.remove(websocket)
async def main():
async with websockets.serve(counter, "localhost", 5000):
await asyncio.Future() # run forever
if __name__ == "__main__":
asyncio.run(main())
| 17.88 | 60 | 0.657718 | 0 | 0 | 0 | 0 | 0 | 0 | 305 | 0.682327 | 43 | 0.096197 |
16a8a652721deb01765dac84306cf8e790d8b09a | 3,998 | py | Python | 3d_Vnet/3dvnet.py | GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks | f627ce48e44bcc7d295ee1cf4086bfdfd7705d44 | [
"MIT"
] | 100 | 2020-05-21T10:23:31.000Z | 2022-03-26T18:26:38.000Z | 3d_Vnet/3dvnet.py | GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks | f627ce48e44bcc7d295ee1cf4086bfdfd7705d44 | [
"MIT"
] | 3 | 2020-08-19T18:14:01.000Z | 2021-01-04T09:53:07.000Z | 3d_Vnet/3dvnet.py | GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks | f627ce48e44bcc7d295ee1cf4086bfdfd7705d44 | [
"MIT"
] | 25 | 2020-09-05T04:19:22.000Z | 2022-02-09T19:30:29.000Z | import random
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
import tensorflow as tf
import keras.backend as K
from keras.utils import to_categorical
from keras import metrics
from keras.models import Model, load_model
from keras.layers import Input, BatchNormalization, Activation, Dense, Dropout,Maximum
from keras.layers.core import Lambda, RepeatVector, Reshape
from keras.layers.convolutional import Conv2D, Conv2DTranspose,Conv3D,Conv3DTranspose
from keras.layers.pooling import MaxPooling2D, GlobalMaxPool2D,MaxPooling3D
from keras.layers.merge import concatenate, add
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
from skimage.io import imread, imshow, concatenate_images
from skimage.transform import resize
from sklearn.utils import class_weight
from keras.callbacks import ModelCheckpoint
from keras.callbacks import CSVLogger
from keras.callbacks import EarlyStopping
from keras.layers.advanced_activations import PReLU
import os
from skimage.io import imread, imshow, concatenate_images
from skimage.transform import resize
# from medpy.io import load
import numpy as np
#import cv2
import nibabel as nib
from PIL import Image
def conv_block(input_mat,num_filters,kernel_size,batch_norm):
X = Conv3D(num_filters,kernel_size=(kernel_size,kernel_size,kernel_size),strides=(1,1,1),padding='same')(input_mat)
if batch_norm:
X = BatchNormalization()(X)
X = Activation('relu')(X)
X = Conv3D(num_filters,kernel_size=(kernel_size,kernel_size,kernel_size),strides=(1,1,1),padding='same')(X)
if batch_norm:
X = BatchNormalization()(X)
X = Activation('relu')(X)
X = add([input_mat,X]);
return X
def Vnet_3d(input_img, n_filters = 8, dropout = 0.2, batch_norm = True):
#c1 = conv_block(input_img,n_filters,3,batch_norm)
c1 = Conv3D(n_filters,kernel_size = (5,5,5) , strides = (1,1,1) , padding='same')(input_img)
#c1 = add([c1,input_img])
c2 = Conv3D(n_filters*2,kernel_size = (2,2,2) , strides = (2,2,2) , padding = 'same' )(c1)
c3 = conv_block(c2 , n_filters*2,5,True)
p3 = Conv3D(n_filters*4,kernel_size = (2,2,2) , strides = (2,2,2), padding = 'same')(c3)
p3 = Dropout(dropout)(p3)
c4 = conv_block(p3, n_filters*4,5,True)
p4 = Conv3D(n_filters*8,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c4)
p4 = Dropout(dropout)(p4)
c5 = conv_block(p4, n_filters*8,5,True)
p6 = Conv3D(n_filters*16,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c5)
p6 = Dropout(dropout)(p6)
#c6 = conv_block(p5, n_filters*8,5,True)
#p6 = Conv3D(n_filters*16,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c6)
p7 = conv_block(p6,n_filters*16,5,True)
u6 = Conv3DTranspose(n_filters*8, (2,2,2), strides=(2, 2, 2), padding='same')(p7);
u6 = concatenate([u6,c5]);
c7 = conv_block(u6,n_filters*16,5,True)
c7 = Dropout(dropout)(c7)
u7 = Conv3DTranspose(n_filters*4,(2,2,2),strides = (2,2,2) , padding= 'same')(c7);
u8 = concatenate([u7,c4]);
c8 = conv_block(u8,n_filters*8,5,True)
c8 = Dropout(dropout)(c8)
u9 = Conv3DTranspose(n_filters*2,(2,2,2),strides = (2,2,2) , padding= 'same')(c8);
u9 = concatenate([u9,c3]);
c9 = conv_block(u9,n_filters*4,5,True)
c9 = Dropout(dropout)(c9)
u10 = Conv3DTranspose(n_filters,(2,2,2),strides = (2,2,2) , padding= 'same')(c9);
u10 = concatenate([u10,c1]);
c10 = Conv3D(n_filters*2,kernel_size = (5,5,5),strides = (1,1,1) , padding = 'same')(u10);
c10 = Dropout(dropout)(c10)
c10 = add([c10,u10]);
#c9 = conv_block(u9,n_filters,3,batch_norm)
outputs = Conv3D(4, (1,1,1), activation='softmax')(c10)
model = Model(inputs=input_img, outputs=outputs)
return model
| 34.465517 | 118 | 0.693847 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 405 | 0.101301 |
16a92c971c54838ec2fe27ba303cf0b8622f86ad | 347 | py | Python | vk/types/additional/active_offer.py | Inzilkin/vk.py | 969f01e666c877c1761c3629a100768f93de27eb | [
"MIT"
] | 24 | 2019-09-13T15:30:09.000Z | 2022-03-09T06:35:59.000Z | vk/types/additional/active_offer.py | Inzilkin/vk.py | 969f01e666c877c1761c3629a100768f93de27eb | [
"MIT"
] | null | null | null | vk/types/additional/active_offer.py | Inzilkin/vk.py | 969f01e666c877c1761c3629a100768f93de27eb | [
"MIT"
] | 12 | 2019-09-13T15:30:31.000Z | 2022-03-01T10:13:32.000Z | from ..base import BaseModel
# returned from https://vk.com/dev/account.getActiveOffers
class ActiveOffer(BaseModel):
id: str = None
title: str = None
instruction: str = None
instruction_html: str = None
short_description: str = None
description: str = None
img: str = None
tag: str = None
price: int = None
| 21.6875 | 58 | 0.665706 | 255 | 0.73487 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.167147 |
16a9cd5f8c3947e5f770014cb07528f411173928 | 18,818 | py | Python | lib/networks/Resnet50_train.py | yangxue0827/TF_Deformable_Net | 00c86380fd2725ebe7ae22f41d460ffc0bca378d | [
"MIT"
] | 193 | 2017-07-19T14:29:38.000Z | 2021-10-20T07:35:42.000Z | lib/networks/Resnet50_train.py | yangxue0827/TF_Deformable_Net | 00c86380fd2725ebe7ae22f41d460ffc0bca378d | [
"MIT"
] | 29 | 2017-07-24T10:07:22.000Z | 2020-01-03T20:38:36.000Z | lib/networks/Resnet50_train.py | Zardinality/TF_Deformable_Net | 00c86380fd2725ebe7ae22f41d460ffc0bca378d | [
"MIT"
] | 67 | 2017-07-27T14:32:47.000Z | 2021-12-27T13:10:37.000Z | # --------------------------------------------------------
# TFFRCNN - Resnet50
# Copyright (c) 2016
# Licensed under The MIT License [see LICENSE for details]
# Written by miraclebiu
# --------------------------------------------------------
import tensorflow as tf
from .network import Network
from ..fast_rcnn.config import cfg
class Resnet50_train(Network):
def __init__(self, trainable=True):
self.inputs = []
self.data = tf.placeholder(tf.float32, shape=[None, None, None, 3], name='data')
self.im_info = tf.placeholder(tf.float32, shape=[None, 3], name='im_info')
self.gt_boxes = tf.placeholder(tf.float32, shape=[None, 5], name='gt_boxes')
self.gt_ishard = tf.placeholder(tf.int32, shape=[None], name='gt_ishard')
self.dontcare_areas = tf.placeholder(tf.float32, shape=[None, 4], name='dontcare_areas')
self.keep_prob = tf.placeholder(tf.float32)
self.layers = dict({'data':self.data, 'im_info':self.im_info, 'gt_boxes':self.gt_boxes,\
'gt_ishard': self.gt_ishard, 'dontcare_areas': self.dontcare_areas})
self.trainable = trainable
self.setup()
def setup(self):
n_classes = cfg.NCLASSES
# anchor_scales = [8, 16, 32]
anchor_scales = cfg.ANCHOR_SCALES
_feat_stride = [16, ]
(self.feed('data')
.conv(7, 7, 64, 2, 2, relu=False, name='conv1')
.batch_normalization(relu=True, name='bn_conv1', is_training=False)
.max_pool(3, 3, 2, 2, padding='VALID',name='pool1')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2a_branch1')
.batch_normalization(name='bn2a_branch1',is_training=False,relu=False))
(self.feed('pool1')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2a_branch2a')
.batch_normalization(relu=True, name='bn2a_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2a_branch2b')
.batch_normalization(relu=True, name='bn2a_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2a_branch2c')
.batch_normalization(name='bn2a_branch2c',is_training=False,relu=False))
(self.feed('bn2a_branch1',
'bn2a_branch2c')
.add(name='res2a')
.relu(name='res2a_relu')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2b_branch2a')
.batch_normalization(relu=True, name='bn2b_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2b_branch2b')
.batch_normalization(relu=True, name='bn2b_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2b_branch2c')
.batch_normalization(name='bn2b_branch2c',is_training=False,relu=False))
(self.feed('res2a_relu',
'bn2b_branch2c')
.add(name='res2b')
.relu(name='res2b_relu')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2c_branch2a')
.batch_normalization(relu=True, name='bn2c_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2c_branch2b')
.batch_normalization(relu=True, name='bn2c_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2c_branch2c')
.batch_normalization(name='bn2c_branch2c',is_training=False,relu=False))
(self.feed('res2b_relu',
'bn2c_branch2c')
.add(name='res2c')
.relu(name='res2c_relu')
.conv(1, 1, 512, 2, 2, biased=False, relu=False, name='res3a_branch1', padding='VALID')
.batch_normalization(name='bn3a_branch1',is_training=False,relu=False))
(self.feed('res2c_relu')
.conv(1, 1, 128, 2, 2, biased=False, relu=False, name='res3a_branch2a', padding='VALID')
.batch_normalization(relu=True, name='bn3a_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3a_branch2b')
.batch_normalization(relu=True, name='bn3a_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3a_branch2c')
.batch_normalization(name='bn3a_branch2c',is_training=False,relu=False))
(self.feed('bn3a_branch1',
'bn3a_branch2c')
.add(name='res3a')
.relu(name='res3a_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3b_branch2a')
.batch_normalization(relu=True, name='bn3b_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3b_branch2b')
.batch_normalization(relu=True, name='bn3b_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3b_branch2c')
.batch_normalization(name='bn3b_branch2c',is_training=False,relu=False))
(self.feed('res3a_relu',
'bn3b_branch2c')
.add(name='res3b')
.relu(name='res3b_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3c_branch2a')
.batch_normalization(relu=True, name='bn3c_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3c_branch2b')
.batch_normalization(relu=True, name='bn3c_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3c_branch2c')
.batch_normalization(name='bn3c_branch2c',is_training=False,relu=False))
(self.feed('res3b_relu',
'bn3c_branch2c')
.add(name='res3c')
.relu(name='res3c_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3d_branch2a')
.batch_normalization(relu=True, name='bn3d_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3d_branch2b')
.batch_normalization(relu=True, name='bn3d_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3d_branch2c')
.batch_normalization(name='bn3d_branch2c',is_training=False,relu=False))
(self.feed('res3c_relu',
'bn3d_branch2c')
.add(name='res3d')
.relu(name='res3d_relu')
.conv(1, 1, 1024, 2, 2, biased=False, relu=False, name='res4a_branch1', padding='VALID')
.batch_normalization(name='bn4a_branch1',is_training=False,relu=False))
(self.feed('res3d_relu')
.conv(1, 1, 256, 2, 2, biased=False, relu=False, name='res4a_branch2a', padding='VALID')
.batch_normalization(relu=True, name='bn4a_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4a_branch2b')
.batch_normalization(relu=True, name='bn4a_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4a_branch2c')
.batch_normalization(name='bn4a_branch2c',is_training=False,relu=False))
(self.feed('bn4a_branch1',
'bn4a_branch2c')
.add(name='res4a')
.relu(name='res4a_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4b_branch2a')
.batch_normalization(relu=True, name='bn4b_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4b_branch2b')
.batch_normalization(relu=True, name='bn4b_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4b_branch2c')
.batch_normalization(name='bn4b_branch2c',is_training=False,relu=False))
(self.feed('res4a_relu',
'bn4b_branch2c')
.add(name='res4b')
.relu(name='res4b_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4c_branch2a')
.batch_normalization(relu=True, name='bn4c_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4c_branch2b')
.batch_normalization(relu=True, name='bn4c_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4c_branch2c')
.batch_normalization(name='bn4c_branch2c',is_training=False,relu=False))
(self.feed('res4b_relu',
'bn4c_branch2c')
.add(name='res4c')
.relu(name='res4c_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4d_branch2a')
.batch_normalization(relu=True, name='bn4d_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4d_branch2b')
.batch_normalization(relu=True, name='bn4d_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4d_branch2c')
.batch_normalization(name='bn4d_branch2c',is_training=False,relu=False))
(self.feed('res4c_relu',
'bn4d_branch2c')
.add(name='res4d')
.relu(name='res4d_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4e_branch2a')
.batch_normalization(relu=True, name='bn4e_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4e_branch2b')
.batch_normalization(relu=True, name='bn4e_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4e_branch2c')
.batch_normalization(name='bn4e_branch2c',is_training=False,relu=False))
(self.feed('res4d_relu',
'bn4e_branch2c')
.add(name='res4e')
.relu(name='res4e_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4f_branch2a')
.batch_normalization(relu=True, name='bn4f_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4f_branch2b')
.batch_normalization(relu=True, name='bn4f_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4f_branch2c')
.batch_normalization(name='bn4f_branch2c',is_training=False,relu=False))
(self.feed('res4e_relu',
'bn4f_branch2c')
.add(name='res4f')
.relu(name='res4f_relu'))
#========= RPN ============
(self.feed('res4f_relu')
.conv(3,3,512,1,1,name='rpn_conv/3x3')
.conv(1,1,len(anchor_scales)*3*2 ,1 , 1, padding='VALID', relu = False, name='rpn_cls_score'))
(self.feed('rpn_cls_score', 'gt_boxes', 'gt_ishard', 'dontcare_areas', 'im_info')
.anchor_target_layer(_feat_stride, anchor_scales, name = 'rpn-data' ))
# Loss of rpn_cls & rpn_boxes
(self.feed('rpn_conv/3x3')
.conv(1,1,len(anchor_scales)*3*4, 1, 1, padding='VALID', relu = False, name='rpn_bbox_pred'))
#========= RoI Proposal ============
(self.feed('rpn_cls_score')
.spatial_reshape_layer(2, name = 'rpn_cls_score_reshape')
.spatial_softmax(name='rpn_cls_prob'))
(self.feed('rpn_cls_prob')
.spatial_reshape_layer(len(anchor_scales)*3*2, name = 'rpn_cls_prob_reshape'))
(self.feed('rpn_cls_prob_reshape','rpn_bbox_pred','im_info')
.proposal_layer(_feat_stride, anchor_scales, 'TRAIN',name = 'rpn_rois'))
(self.feed('rpn_rois','gt_boxes', 'gt_ishard', 'dontcare_areas')
.proposal_target_layer(n_classes,name = 'roi-data'))
#========= RCNN ============
(self.feed('res4f_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch1', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch1'))
(self.feed('res4f_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5a_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch2a')
.relu(name='res5a_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5a_branch2b_offset', padding='SAME', initializer='zeros'))
(self.feed('res5a_branch2a_relu', 'res5a_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5a_branch2b')
.batch_normalization(relu=False, name='bn5a_branch2b')
.relu(name='res5a_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch2c'))
(self.feed('bn5a_branch1', 'bn5a_branch2c')
.add(name='res5a')
.relu(name='res5a_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5b_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5b_branch2a')
.relu(name='res5b_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5b_branch2b_offset', padding='SAME', initializer='zeros'))
(self.feed('res5b_branch2a_relu', 'res5b_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5b_branch2b')
.batch_normalization(relu=False, name='bn5b_branch2b')
.relu(name='res5b_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5b_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5b_branch2c'))
(self.feed('res5a_relu', 'bn5b_branch2c')
.add(name='res5b')
.relu(name='res5b_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5c_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5c_branch2a')
.relu(name='res5c_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5c_branch2b_offset', padding='SAME', initializer='zeros') )
(self.feed('res5c_branch2a_relu', 'res5c_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5c_branch2b')
.batch_normalization(relu=False, name='bn5c_branch2b')
.relu(name='res5c_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5c_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5c_branch2c'))
(self.feed('res5b_relu', 'bn5c_branch2c')
.add(name='res5c')
.relu(name='res5c_relu')
.conv(1, 1, 256, 1, 1, relu=False, name='conv_new_1')
.relu(name='conv_new_1_relu'))
(self.feed('conv_new_1_relu', 'roi-data')
.deform_psroi_pool(group_size=1, pooled_size=7, sample_per_part=4, no_trans=True, part_size=7, output_dim=256, trans_std=1e-1, spatial_scale=0.0625, name='offset_t')
# .flatten_data(name='offset_flatten')
.fc(num_out=7 * 7 * 2, name='offset', relu=False)
.reshape(shape=(-1,2,7,7), name='offset_reshape'))
(self.feed('conv_new_1_relu', 'roi-data', 'offset_reshape')
.deform_psroi_pool(group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7, output_dim=256, trans_std=1e-1, spatial_scale=0.0625, name='deformable_roi_pool')
.fc(num_out=1024, name='fc_new_1')
.fc(num_out=1024, name='fc_new_2'))
(self.feed('fc_new_2')
.fc(num_out=n_classes, name='cls_score', relu=False)
.softmax(name='cls_prob'))
(self.feed('fc_new_2')
.fc(num_out=4*n_classes, name='bbox_pred', relu=False))
# (self.feed('res4f_relu','roi-data')
# .roi_pool(7,7,1.0/16,name='res5a_branch2a_roipooling')
# .conv(1, 1, 512, 2, 2, biased=False, relu=False, name='res5a_branch2a', padding='VALID')
# .batch_normalization(relu=True, name='bn5a_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5a_branch2b')
# .batch_normalization(relu=True, name='bn5a_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch2c')
# .batch_normalization(name='bn5a_branch2c',is_training=False,relu=False))
# (self.feed('res5a_branch2a_roipooling')
# .conv(1,1,2048,2,2,biased=False, relu=False, name='res5a_branch1', padding='VALID')
# .batch_normalization(name='bn5a_branch1',is_training=False,relu=False))
# (self.feed('bn5a_branch2c','bn5a_branch1')
# .add(name='res5a')
# .relu(name='res5a_relu')
# .conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5b_branch2a')
# .batch_normalization(relu=True, name='bn5b_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5b_branch2b')
# .batch_normalization(relu=True, name='bn5b_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5b_branch2c')
# .batch_normalization(name='bn5b_branch2c',is_training=False,relu=False))
# #pdb.set_trace()
# (self.feed('res5a_relu',
# 'bn5b_branch2c')
# .add(name='res5b')
# .relu(name='res5b_relu')
# .conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5c_branch2a')
# .batch_normalization(relu=True, name='bn5c_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5c_branch2b')
# .batch_normalization(relu=True, name='bn5c_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5c_branch2c')
# .batch_normalization(name='bn5c_branch2c',is_training=False,relu=False))
# #pdb.set_trace()
# (self.feed('res5b_relu',
# 'bn5c_branch2c')
# .add(name='res5c')
# .relu(name='res5c_relu')
# .fc(n_classes, relu=False, name='cls_score')
# .softmax(name='cls_prob'))
# (self.feed('res5c_relu')
# .fc(n_classes*4, relu=False, name='bbox_pred'))
| 58.080247 | 189 | 0.597619 | 18,483 | 0.982198 | 0 | 0 | 0 | 0 | 0 | 0 | 6,342 | 0.337018 |
16aafc257a8e2aae93d3cae037dc8cf239e63a42 | 20,180 | py | Python | lib/aws_sso_lib/assignments.py | vdesjardins/aws-sso-util | bf092a21674e8286c4445df7f4aae8ad061444ca | [
"Apache-2.0"
] | 330 | 2020-11-11T15:53:22.000Z | 2022-03-30T06:45:57.000Z | lib/aws_sso_lib/assignments.py | vdesjardins/aws-sso-util | bf092a21674e8286c4445df7f4aae8ad061444ca | [
"Apache-2.0"
] | 47 | 2020-11-11T01:32:29.000Z | 2022-03-30T01:33:28.000Z | lib/aws_sso_lib/assignments.py | vdesjardins/aws-sso-util | bf092a21674e8286c4445df7f4aae8ad061444ca | [
"Apache-2.0"
] | 23 | 2020-11-25T14:12:37.000Z | 2022-03-30T02:16:26.000Z | import re
import numbers
import collections
import logging
from collections.abc import Iterable
import itertools
import aws_error_utils
from .lookup import Ids, lookup_accounts_for_ou
from .format import format_account_id
LOGGER = logging.getLogger(__name__)
_Context = collections.namedtuple("_Context", [
"session",
"ids",
"principal",
"principal_filter",
"permission_set",
"permission_set_filter",
"target",
"target_filter",
"get_principal_names",
"get_permission_set_names",
"get_target_names",
"ou_recursive",
"cache",
"filter_cache"
])
def _filter(filter_cache, key, func, args):
if not func:
return True
if key not in filter_cache:
filter_cache[key] = func(*args)
return filter_cache[key]
def _flatten(list_of_lists):
return list(itertools.chain(*list_of_lists))
def _is_principal_tuple(principal):
try:
return all([
len(principal) == 2,
isinstance(principal[0], str),
principal[0] in ["GROUP", "USER"],
isinstance(principal[1], str),
])
except:
return False
def _process_principal(principal):
if not principal:
return None
if isinstance(principal, str):
return [(None, principal)]
if _is_principal_tuple(principal):
return [tuple(principal)]
else:
return _flatten(_process_principal(p) for p in principal)
def _process_permission_set(ids, permission_set):
if not permission_set:
return None
if not isinstance(permission_set, str) and isinstance(permission_set, Iterable):
return _flatten(_process_permission_set(ids, ps) for ps in permission_set)
if permission_set.startswith("arn"):
permission_set_arn = permission_set
elif permission_set.startswith("ssoins-") or permission_set.startswith("ins-"):
permission_set_arn = f"arn:aws:sso:::permissionSet/{permission_set}"
elif permission_set.startswith("ps-"):
permission_set_arn = f"arn:aws:sso:::permissionSet/{ids.instance_id}/{permission_set}"
else:
raise TypeError(f"Invalid permission set id {permission_set}")
return [permission_set_arn]
def _is_target_tuple(target):
try:
return all([
len(target) == 2,
isinstance(target[0], str),
target[0] in ["AWS_OU", "AWS_ACCOUNT"],
isinstance(target[1], str),
])
except:
return False
def _process_target(target):
if not target:
return None
if isinstance(target, numbers.Number):
return [("AWS_ACCOUNT", format_account_id(target))]
if isinstance(target, str):
if re.match(r"^\d+$", target):
return [("AWS_ACCOUNT", format_account_id(target))]
elif re.match(r"^r-[a-z0-9]{4,32}$", target) or re.match(r"^ou-[a-z0-9]{4,32}-[a-z0-9]{8,32}$", target):
return [("AWS_OU", target)]
else:
raise TypeError(f"Invalid target {target}")
elif _is_target_tuple(target):
target_type, target_id = target
if target_type not in ["AWS_ACCOUNT", "AWS_OU"]:
raise TypeError(f"Invalid target type {target_type}")
return [(target_type, target_id)]
else:
value = _flatten(_process_target(t) for t in target)
return value
def _get_account_iterator(target, context: _Context):
def target_iterator():
target_name = None
if context.get_target_names:
organizations_client = context.session.client("organizations")
account = organizations_client.describe_account(AccountId=target[1])["Account"]
if account.get("Name"):
target_name = account["Name"]
value = (*target, target_name)
if not _filter(context.filter_cache, value[1], context.target_filter, value):
LOGGER.debug(f"Account is filtered: {value}")
else:
LOGGER.debug(f"Visiting single account: {value}")
yield value
return target_iterator
def _get_ou_iterator(target, context: _Context):
def target_iterator():
target_name = None
# if context.get_target_names:
# organizations_client = context.session.client("organizations")
# ou = organizations_client.describe_organizational_unit(OrganizationalUnitId=target[1])["OrganizationalUnit"]
# if ou.get("Name"):
# target_name = ou("Name")
value = (*target, target_name)
accounts = lookup_accounts_for_ou(context.session, value[1], recursive=context.ou_recursive)
for account in accounts:
yield "AWS_ACCOUNT", account["Id"], account["Name"]
return target_iterator
def _get_single_target_iterator(target, context: _Context):
target_type = target[0]
if target_type == "AWS_ACCOUNT":
return _get_account_iterator(target, context)
elif target_type == "AWS_OU":
return _get_ou_iterator(target, context)
else:
raise TypeError(f"Invalid target type {target_type}")
def _get_all_accounts_iterator(context: _Context):
def target_iterator():
organizations_client = context.session.client("organizations")
accounts_paginator = organizations_client.get_paginator("list_accounts")
for response in accounts_paginator.paginate():
LOGGER.debug(f"ListAccounts page: {response}")
for account in response["Accounts"]:
account_id = account["Id"]
account_name = account["Name"]
value = ("AWS_ACCOUNT", account_id, account_name)
if not _filter(context.filter_cache, account_id, context.target_filter, value):
LOGGER.debug(f"Account is filtered: {value}")
continue
LOGGER.debug(f"Visiting account: {value}")
yield value
return target_iterator
def _get_target_iterator(context: _Context):
if context.target:
iterables = [_get_single_target_iterator(t, context) for t in context.target]
def target_iterator():
return itertools.chain(*[it() for it in iterables])
return target_iterator
else:
LOGGER.debug(f"Iterating for all accounts")
return _get_all_accounts_iterator(context)
def _get_single_permission_set_iterator(permission_set, context: _Context):
permission_set_arn = permission_set
permission_set_id = permission_set_arn.split("/")[-1]
def permission_set_iterator(target_type, target_id, target_name):
if not context.get_permission_set_names:
permission_set_name = None
else:
sso_admin_client = context.session.client("sso-admin")
response = sso_admin_client.describe_permission_set(
InstanceArn=context.ids.instance_arn,
PermissionSetArn=permission_set_arn
)
LOGGER.debug(f"DescribePermissionSet response: {response}")
permission_set_name = response["PermissionSet"]["Name"]
if not _filter(context.filter_cache, permission_set_arn, context.permission_set_filter, (permission_set_arn, permission_set_name)):
LOGGER.debug(f"Single permission set is filtered: {(permission_set_id, permission_set_name)}")
else:
LOGGER.debug(f"Visiting single permission set {(permission_set_id, permission_set_name)}")
yield permission_set_arn, permission_set_id, permission_set_name
return permission_set_iterator
def _get_all_permission_sets_iterator(context: _Context):
def permission_set_iterator(target_type, target_id, target_name):
if target_type != "AWS_ACCOUNT":
raise TypeError(f"Unsupported target type {target_type}")
sso_admin_client = context.session.client("sso-admin")
permission_sets_paginator = sso_admin_client.get_paginator("list_permission_sets_provisioned_to_account")
for response in permission_sets_paginator.paginate(
InstanceArn=context.ids.instance_arn,
AccountId=target_id):
LOGGER.debug(f"ListPermissionSetsProvisionedToAccount {target_id} page: {response}")
if "PermissionSets" not in response:
continue
for permission_set_arn in response["PermissionSets"]:
permission_set_id = permission_set_arn.split("/", 2)[-1]
if not context.get_permission_set_names:
permission_set_name = None
else:
if permission_set_arn not in context.cache:
response = sso_admin_client.describe_permission_set(
InstanceArn=context.ids.instance_arn,
PermissionSetArn=permission_set_arn
)
LOGGER.debug(f"DescribePermissionSet response: {response}")
context.cache[permission_set_arn] = response["PermissionSet"]["Name"]
permission_set_name = context.cache[permission_set_arn]
if not _filter(context.filter_cache, permission_set_arn, context.permission_set_filter, (permission_set_arn, permission_set_name)):
LOGGER.debug(f"Permission set is filtered: {(permission_set_id, permission_set_name)}")
continue
LOGGER.debug(f"Visiting permission set: {(permission_set_id, permission_set_name)}")
yield permission_set_arn, permission_set_id, permission_set_name
return permission_set_iterator
def _get_permission_set_iterator(context: _Context):
if context.permission_set:
iterables = [_get_single_permission_set_iterator(ps, context) for ps in context.permission_set]
def permission_set_iterator(target_type, target_id, target_name):
return itertools.chain(*[it(target_type, target_id, target_name) for it in iterables])
return permission_set_iterator
else:
LOGGER.debug("Iterating for all permission sets")
return _get_all_permission_sets_iterator(context)
def _get_principal_iterator(context: _Context):
def principal_iterator(
target_type, target_id, target_name,
permission_set_arn, permission_set_id, permission_set_name):
if target_type != "AWS_ACCOUNT":
raise TypeError(f"Unsupported target type {target_type}")
sso_admin_client = context.session.client("sso-admin")
identity_store_client = context.session.client("identitystore")
assignments_paginator = sso_admin_client.get_paginator("list_account_assignments")
for response in assignments_paginator.paginate(
InstanceArn=context.ids.instance_arn,
AccountId=target_id,
PermissionSetArn=permission_set_arn):
LOGGER.debug(f"ListAccountAssignments for {target_id} {permission_set_arn.split('/')[-1]} page: {response}")
if not response["AccountAssignments"] and not "NextToken" in response:
LOGGER.debug(f"No assignments for {target_id} {permission_set_arn.split('/')[-1]}")
for assignment in response["AccountAssignments"]:
principal_type = assignment["PrincipalType"]
principal_id = assignment["PrincipalId"]
LOGGER.debug(f"Visiting principal {principal_type}:{principal_id}")
if context.principal:
for principal in context.principal:
type_matches = (principal[0] is None or principal[0] != principal_type)
if type_matches and principal[1] == principal_id:
LOGGER.debug(f"Found principal {principal_type}:{principal_id}")
break
else:
LOGGER.debug(f"Principal {principal_type}:{principal_id} does not match principals")
continue
principal_key = (principal_type, principal_id)
if not context.get_principal_names:
principal_name = None
else:
if principal_key not in context.cache:
if principal_type == "GROUP":
try:
response = identity_store_client.describe_group(
IdentityStoreId=context.ids.identity_store_id,
GroupId=principal_id
)
LOGGER.debug(f"DescribeGroup response: {response}")
context.cache[principal_key] = response["DisplayName"]
except aws_error_utils.catch_aws_error("ResourceNotFoundException"):
context.cache[principal_key] = None
elif principal_type == "USER":
try:
response = identity_store_client.describe_user(
IdentityStoreId=context.ids.identity_store_id,
UserId=principal_id
)
LOGGER.debug(f"DescribeUser response: {response}")
context.cache[principal_key] = response["UserName"]
except aws_error_utils.catch_aws_error("ResourceNotFoundException"):
context.cache[principal_key] = None
else:
raise ValueError(f"Unknown principal type {principal_type}")
principal_name = context.cache[principal_key]
if not _filter(context.filter_cache, principal_key, context.principal_filter, (principal_type, principal_id, principal_name)):
if context.principal:
LOGGER.debug(f"Principal is filtered: {principal_type}:{principal_id}")
else:
LOGGER.debug(f"Principal is filtered: {principal_type}:{principal_id}")
continue
LOGGER.debug(f"Visiting principal: {principal_type}:{principal_id}")
yield principal_type, principal_id, principal_name
return principal_iterator
Assignment = collections.namedtuple("Assignment", [
"instance_arn",
"principal_type",
"principal_id",
"principal_name",
"permission_set_arn",
"permission_set_name",
"target_type",
"target_id",
"target_name",
])
def list_assignments(
session,
instance_arn=None,
identity_store_id=None,
principal=None,
principal_filter=None,
permission_set=None,
permission_set_filter=None,
target=None,
target_filter=None,
get_principal_names=False,
get_permission_set_names=False,
get_target_names=False,
ou_recursive=False):
"""Iterate over AWS SSO assignments.
Args:
session (boto3.Session): boto3 session to use
instance_arn (str): The SSO instance to use, or it will be looked up using ListInstances
identity_store_id (str): The identity store to use if principal names are being retrieved
or it will be looked up using ListInstances
principal: A principal specification or list of principal specifications.
A principal specification is a principal id or a 2-tuple of principal type and id.
principal_filter: A callable taking principal type, principal id, and principal name
(which may be None), and returning True if the principal should be included.
permission_set: A permission set arn or id, or a list of the same.
permission_set_filter: A callable taking permission set arn and name (name may be None),
returning True if the permission set should be included.
target: A target specification or list of target specifications.
A target specification is an account or OU id, or a 2-tuple of target type, which
is either AWS_ACCOUNT or AWS_OU, and target id.
target_filter: A callable taking target type, target id, and target name
(which may be None), and returning True if the target should be included.
get_principal_names (bool): Retrieve names for principals in assignments.
get_permission_set_names (bool): Retrieve names for permission sets in assignments.
get_target_names (bool): Retrieve names for targets in assignments.
ou_recursive (bool): Set to True if an OU is provided as a target to get all accounts
including those in child OUs.
Returns:
An iterator over Assignment namedtuples
"""
ids = Ids(lambda: session, instance_arn, identity_store_id)
return _list_assignments(
session,
ids,
principal=principal,
principal_filter=principal_filter,
permission_set=permission_set,
permission_set_filter=permission_set_filter,
target=target,
target_filter=target_filter,
get_principal_names=get_principal_names,
get_permission_set_names=get_permission_set_names,
get_target_names=get_target_names,
ou_recursive=ou_recursive,
)
def _list_assignments(
session,
ids,
principal=None,
principal_filter=None,
permission_set=None,
permission_set_filter=None,
target=None,
target_filter=None,
get_principal_names=False,
get_permission_set_names=False,
get_target_names=False,
ou_recursive=False):
principal = _process_principal(principal)
permission_set = _process_permission_set(ids, permission_set)
target = _process_target(target)
cache = {}
filter_cache = {}
context = _Context(
session = session,
ids=ids,
principal=principal,
principal_filter=principal_filter,
permission_set=permission_set,
permission_set_filter=permission_set_filter,
target=target,
target_filter=target_filter,
get_principal_names=get_principal_names,
get_permission_set_names=get_permission_set_names,
get_target_names=get_target_names,
ou_recursive=ou_recursive,
cache=cache,
filter_cache=filter_cache,
)
target_iterator = _get_target_iterator(context)
permission_set_iterator = _get_permission_set_iterator(context)
principal_iterator = _get_principal_iterator(context)
for target_type, target_id, target_name in target_iterator():
for permission_set_arn, permission_set_id, permission_set_name, in permission_set_iterator(target_type, target_id, target_name):
for principal_type, principal_id, principal_name in principal_iterator(
target_type, target_id, target_name,
permission_set_arn, permission_set_id, permission_set_name):
assignment = Assignment(
ids.instance_arn,
principal_type,
principal_id,
principal_name,
permission_set_arn,
permission_set_name,
target_type,
target_id,
target_name,
)
LOGGER.debug(f"Visiting assignment: {assignment}")
yield assignment
if __name__ == "__main__":
import boto3
import sys
import json
logging.basicConfig(level=logging.INFO)
kwargs = {}
for v in sys.argv[1:]:
if hasattr(logging, v):
LOGGER.setLevel(getattr(logging, v))
else:
kwargs = json.loads(v)
def fil(*args):
print(args)
return True
kwargs["target_filter"] = fil
try:
session = boto3.Session()
print(",".join(Assignment._fields))
for value in list_assignments(session, **kwargs):
print(",".join(v or "" for v in value))
except KeyboardInterrupt:
pass
| 41.608247 | 147 | 0.637413 | 0 | 0 | 11,947 | 0.592022 | 0 | 0 | 0 | 0 | 4,857 | 0.240684 |
16abab9c314c051765ffd991fb6c764e6cf24cb5 | 235 | py | Python | solutions/pic_search/webserver/src/service/theardpool.py | naetimus/bootcamp | 0182992df7c54012944b51fe9b70532ab6a0059b | [
"Apache-2.0"
] | 1 | 2020-03-10T07:43:08.000Z | 2020-03-10T07:43:08.000Z | solutions/pic_search/webserver/src/service/theardpool.py | naetimus/bootcamp | 0182992df7c54012944b51fe9b70532ab6a0059b | [
"Apache-2.0"
] | null | null | null | solutions/pic_search/webserver/src/service/theardpool.py | naetimus/bootcamp | 0182992df7c54012944b51fe9b70532ab6a0059b | [
"Apache-2.0"
] | 1 | 2020-04-03T05:24:47.000Z | 2020-04-03T05:24:47.000Z | import threading
from concurrent.futures import ThreadPoolExecutor
from service.train import do_train
def thread_runner(thread_num, func, *args):
executor = ThreadPoolExecutor(thread_num)
f = executor.submit(do_train, *args)
| 26.111111 | 49 | 0.795745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
16ac3137138a7e3b002c9c9337af2623d4ef26d0 | 2,600 | py | Python | buildutil/main.py | TediCreations/buildutils | 49a35e0926baf65f7688f89e53f525812540101c | [
"MIT"
] | null | null | null | buildutil/main.py | TediCreations/buildutils | 49a35e0926baf65f7688f89e53f525812540101c | [
"MIT"
] | null | null | null | buildutil/main.py | TediCreations/buildutils | 49a35e0926baf65f7688f89e53f525812540101c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import os
import argparse
import subprocess
if __name__ == '__main__':
from version import __version__
from configParser import ConfigParser
else:
from .version import __version__
from .configParser import ConfigParser
def command(cmd):
"""Run a shell command"""
subprocess.call(cmd, shell=True)
"""
cmd_split = cmd.split()
process = subprocess.Popen(cmd_split,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = process.communicate()
return stdout, stderr
"""
def main():
absFilePath = os.path.dirname(os.path.abspath(__file__))
cwdPath = os.path.abspath(os.getcwd())
parser = argparse.ArgumentParser(
prog="buildutil",
description="Assembly/C/C++ utility to build embedded systems",
epilog="Author: Kanelis Elias",
fromfile_prefix_chars='@')
# parser.add_argument('-v', '--verbose',
# action='store_true',
# help='an optional argument')
"""
parser.add_argument('Path',
metavar='path',
type=str,
default=cwdPath,
help='the config filepath')
"""
parser.add_argument(
'-d', '--directory',
type=str,
default=cwdPath,
help='the config filepath')
parser.add_argument(
'-v', '--version',
action='store_true',
help='get the version of the build system')
# parser.add_argument(
# '-f',
# '--file',
# help='A readable file',
# metavar='FILE',
# type=argparse.FileType('r'),
# default=None)
cmd_parser = parser.add_subparsers(dest='cmd', description="")
parser_build = cmd_parser.add_parser(
'build',
help="build the project")
parser_get_version = cmd_parser.add_parser(
'get_version',
help="try to get the version from git")
# parser_get_version.add_argument(
# '-a', '--alpha',
# dest='alpha',
# help='try to get the version')
# Execute parse_args()
args = parser.parse_args()
subcommand = parser.parse_args().cmd
if args.version is True:
print(f"version: {__version__}")
exit(0)
# if subcommand is None or subcommand == "build":
if subcommand == "build":
makefilePath = os.path.join(absFilePath, "conf/make/Makefile")
command(f"make -f {makefilePath}")
elif subcommand == "get_version":
print("version")
else:
ConfigParser()
print("fuck")
return
# Working directory
wd = os.path.abspath(args.directory)
print(f"File: {absFilePath}")
print(F"CWD: {cwdPath}")
print(F"Working directory: {wd}")
print(F"makefile path: {makefilePath}")
print()
command(f"make -f {makefilePath}")
if __name__ == '__main__':
main()
| 20.967742 | 65 | 0.672692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,395 | 0.536538 |
16ad65c0a3b3c48d1d5528a704a36242b69e1b30 | 590 | py | Python | python/get_links.py | quiddity-wp/mediawiki-api-demos | 98910dbd9c2cbbb13db790f3e8979419aeab34d4 | [
"MIT"
] | 63 | 2019-05-19T13:22:37.000Z | 2022-03-30T13:21:40.000Z | python/get_links.py | quiddity-wp/mediawiki-api-demos | 98910dbd9c2cbbb13db790f3e8979419aeab34d4 | [
"MIT"
] | 67 | 2019-05-03T17:17:19.000Z | 2021-06-21T11:02:10.000Z | python/get_links.py | quiddity-wp/mediawiki-api-demos | 98910dbd9c2cbbb13db790f3e8979419aeab34d4 | [
"MIT"
] | 49 | 2019-02-19T09:28:33.000Z | 2019-03-24T04:36:53.000Z | #This file is auto-generated. See modules.json and autogenerator.py for details
#!/usr/bin/python3
"""
get_links.py
MediaWiki API Demos
Demo of `Links` module: Get all links on the given page(s)
MIT License
"""
import requests
S = requests.Session()
URL = "https://en.wikipedia.org/w/api.php"
PARAMS = {
"action": "query",
"format": "json",
"titles": "Albert Einstein",
"prop": "links"
}
R = S.get(url=URL, params=PARAMS)
DATA = R.json()
PAGES = DATA["query"]["pages"]
for k, v in PAGES.items():
for l in v["links"]:
print(l["title"])
| 16.857143 | 79 | 0.618644 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 357 | 0.605085 |
16adc3c8486e2f9e557cbef70e8a437e66aeb740 | 19,267 | py | Python | gautools/submit_gaussian.py | thompcinnamon/QM-calc-scripts | 60b06e14b2efd307d419201079bb24152ab0bd3c | [
"Apache-2.0"
] | null | null | null | gautools/submit_gaussian.py | thompcinnamon/QM-calc-scripts | 60b06e14b2efd307d419201079bb24152ab0bd3c | [
"Apache-2.0"
] | 2 | 2018-07-18T19:53:08.000Z | 2019-02-25T23:25:51.000Z | gautools/submit_gaussian.py | theavey/QM-calc-scripts | 60b06e14b2efd307d419201079bb24152ab0bd3c | [
"Apache-2.0"
] | 1 | 2017-01-04T20:50:21.000Z | 2017-01-04T20:50:21.000Z | #! /usr/bin/env python3
########################################################################
# #
# This script was written by Thomas Heavey in 2015. #
# [email protected] [email protected] #
# #
# Copyright 2015 Thomas J. Heavey IV #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or #
# implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
########################################################################
# This is written to work with python 3 because it should be good to
# be working on the newest version of python.
from __future__ import print_function
import argparse # For parsing commandline arguments
import datetime
import glob # Allows referencing file system/file names
import os
import re
import readline # Allows easier file input (with tab completion?)
import subprocess # Allows for submitting commands to the shell
from warnings import warn
from thtools import cd, make_obj_dir, save_obj, resolve_path
yes = ['y', 'yes', '1']
# An input function that can prefill in the text entry
# Not sure if this works in 3.5+ because raw_input is gone
def rlinput(prompt, prefill=''):
readline.set_startup_hook(lambda: readline.insert_text(prefill))
try:
return input(prompt)
finally:
readline.set_startup_hook()
def _dir_and_file(path):
warn('_dir_and_file is deprecated. Use os.path.split instead',
DeprecationWarning)
if '/' in path:
rel_dir, f_name = path.rsplit('/', 1)
rel_dir = rel_dir + '/'
else:
rel_dir = ''
f_name = path
return rel_dir, f_name
def create_gau_input(coord_name, template, verbose=True):
"""
make gaussian input file by combining header and coordinates files
This function takes as input a file with a set of molecular
coordinates (the form should not matter, it will just be copied
into the next file) and a template file that should be the header
for the desired calculation (including charge and multiplicity),
returns the name of the file, and creates a Gaussian input file ending
with '.com'
:param str coord_name: name of file with coordinates in a format
Gaussian can read
:param str template: name of file with header for Gaussian calculation
(up to and including the charge and multiplicity)
:param bool verbose: If True, some status messages will be printed
(including file names)
:return: name of the written file
:rtype: str
"""
if verbose:
print('Creating Gaussian input file...')
_out_name = coord_name.rsplit('.', 1)[0] + '.com'
with open(_out_name, 'w') as out_file:
with open(template, 'r') as templ_file:
if verbose:
print('opened {}'.format(template))
for line in templ_file:
out_file.write(line)
if '\n' not in line:
out_file.write('\n')
with open(coord_name, 'r') as in_file:
if verbose:
print('opened {}'.format(coord_name))
for i, line in enumerate(in_file):
if i < 2:
# ignore first two lines
# number of atoms and the title/comment
continue
# if line.strip().isdigit():
# # the first line is the number of atoms
# continue
# # XYZ files created by mathematica have a comment
# # as the second line saying something like:
# # "Created by mathematica". Obv. want to ignore that
# if line.strip().startswith('Create') or
# line.strip().startswith('generated'):
# continue
# else:
out_file.write(line)
out_file.write('\n\n\n')
if verbose:
print('created Gaussian input file {}'.format(_out_name))
return _out_name
def get_input_files(base_name, batch):
_in_name_list = glob.glob(base_name + '*')
_in_name_list.sort() # sort files alphanumerically
_in_name_list.sort(key=len) # sort by length (because otherwise would
# put 1,10,11,... as opposed to 1,...,9,10,...
# if number 01,02,... They should all be the same length and the
# second sort won't do anything.
if not batch:
num_files = len(_in_name_list)
if num_files > 1:
print('Multiple files starting with {}'.format(base_name))
if input('Did you mean to execute a batch job? ') in yes:
batch = True
else:
print('What file name shall I use?')
_in_name_list = [rlinput('file name: ', base_name)]
return _in_name_list, batch
def use_template(template, in_names, verbose):
made_name_list = []
for in_name in in_names:
out_name = create_gau_input(in_name, template, verbose=verbose)
made_name_list.append(out_name)
if verbose:
print('Added {} to files to possibly submit.'.format(out_name))
_in_name_list = made_name_list
_in_name_list.sort()
_in_name_list.sort(key=len)
return _in_name_list
def write_sub_script(input_name, num_cores=16, time='12:00:00', verbose=False,
mem='125', executable='g09',
chk_file=None, copy_chk=False,
ln_running=None,
hold_jid=None, xyz=None, make_xyz=None, make_input=False,
ugt_dict=None):
"""
Write submission script for (Gaussian) jobs for submission to queue
If make_xyz is not None, the file make_xyz will be checked to exist
first to make sure to not waste time when missing a necessary input file.
:param str input_name: Name of the file to use as input
:param int num_cores: Number of cores to request
:param str time: Amount of time to request in the format 'hh:mm:ss'
:param bool verbose: If True, print out some status messages and such
:type mem: int or str
:param mem: Minimum amount of memory to request
:param str executable: Executable file to use for the job
Example, 'g09', 'g16'
:param str chk_file: If not None, this file will be copied back after the
job has completed. If this is not None and make_input is True,
this will also be passed to use_gen_template.
:param bool copy_chk: If this is True, the script will attempt to copy
what should be an existing checkpoint file to the scratch directory
before running the job. `chk_file` must be not None as well.
:param str ln_running: If not None, this will be the base name for
linking the output file to the current directory. If chk_file is not
None, it will also be linked with the same base name.
:param str hold_jid: Job on which this job should depend.
This should be the name of another job in the queuing system.
:param str xyz: Name of an xyz file to use as input to use_gen_template
(if make_input is True).
:param str make_xyz: The name of a file to pass to obabel to be used to
create an xyz file to pass to use_gen_template.
:param bool make_input: If True, use_gen_template will be used to create
input for the Gaussian calculation.
:param dict ugt_dict: dict of arguments to pass to use_gen_template.
This should not include out_file, xyz, nproc, mem, or checkpoint
because those will all be used from other arguments to this function.
out_file will be input_name; xyz will be xyz or a time-based name if
make_xyz is not None; nproc will be $NSLOTS (useful if this gets
changed after job submission); mem will be mem; and checkpoint will
be chk_file.
:return: The name of the script file
:rtype: str
"""
rel_dir, file_name = os.path.split(input_name)
if file_name.endswith('.com'):
short_name = os.path.splitext(file_name)[0]
if not short_name + '.com' == file_name:
raise SyntaxError('problem interpreting file name. ' +
'Period in file name?')
out_name = short_name + '.out'
elif '.' in file_name:
short_name, input_extension = os.path.splitext(file_name)
if not short_name + '.' + input_extension == file_name:
raise SyntaxError('problem interpreting file name. ' +
'Period in file name?')
out_name = short_name + '.out'
else:
short_name = file_name
file_name = short_name + '.com'
print('Assuming input file is {}'.format(file_name))
out_name = short_name + '.out'
job_name = re.match(r'.*?([a-zA-Z].*)', short_name).group(1)
if len(job_name) == 0:
job_name = 'default'
_script_name = os.path.join(rel_dir, 'submit'+short_name+'.sh')
temp_xyz = os.path.abspath('.temp' +
datetime.datetime.now().strftime('%H%M%S%f') +
'.xyz')
if xyz is None or make_xyz is not None:
n_xyz = temp_xyz
else:
n_xyz = resolve_path(xyz)
temp_pkl = temp_xyz[:-4]
if ugt_dict is not None:
make_obj_dir()
pkl_path = save_obj(ugt_dict, temp_pkl)
if chk_file is not None:
chk_line = 'checkpoint=\'{}\','.format(chk_file)
else:
chk_line = ''
with open(_script_name, 'w') as script_file:
sfw = script_file.write
sfw('#!/bin/bash -l\n\n')
sfw('#$ -pe omp {}\n'.format(num_cores))
sfw('#$ -M [email protected]\n')
sfw('#$ -m eas\n')
sfw('#$ -l h_rt={}\n'.format(time))
sfw('#$ -l mem_total={}G\n'.format(mem))
sfw('#$ -N {}\n'.format(job_name))
sfw('#$ -j y\n')
sfw('#$ -o {}.log\n\n'.format(short_name))
if hold_jid is not None:
sfw('#$ -hold_jid {}\n\n'.format(hold_jid))
if make_xyz is not None:
sfw('if [ ! -f {} ]; then\n'.format(
os.path.abspath(make_xyz)) +
' exit 17\n'
'fi\n\n')
sfw('module load wxwidgets/3.0.2\n')
sfw('module load openbabel/2.4.1\n\n')
sfw('obabel {} -O {}\n\n'.format(os.path.abspath(
make_xyz), os.path.abspath(n_xyz)))
if make_input:
sfw('python -c "from gautools.tools import '
'use_gen_template as ugt;\n'
'from thtools import load_obj, get_node_mem;\n'
'm = get_node_mem();\n'
'd = load_obj(\'{}\');\n'.format(
os.path.abspath(pkl_path)) +
'ugt(\'{}\',\'{}\','.format(
file_name, os.path.abspath(n_xyz)) +
'nproc=$NSLOTS,mem=m,{}'.format(chk_line) +
'**d)"\n\n')
sfw('INPUTFILE={}\n'.format(file_name))
sfw('OUTPUTFILE={}\n'.format(out_name))
if chk_file is not None:
sfw('CHECKFILE={}\n\n'.format(chk_file))
else:
sfw('\n')
if ln_running is not None:
sfw('WORKINGOUT={}.out\n'.format(ln_running))
if chk_file is not None:
sfw('WORKINGCHK={}.chk\n\n'.format(ln_running))
else:
sfw('\n')
sfw('CURRENTDIR=`pwd`\n')
sfw('SCRATCHDIR=/scratch/$USER\n')
sfw('mkdir -p $SCRATCHDIR\n\n')
sfw('cd $SCRATCHDIR\n\n')
sfw('cp $CURRENTDIR/$INPUTFILE .\n')
if chk_file is not None:
sfw('# ') if not copy_chk else None
sfw('cp $CURRENTDIR/$CHECKFILE .\n\n')
else:
sfw('\n')
if ln_running is not None:
sfw('ln -s -b /net/`hostname -s`$PWD/$OUTPUTFILE '
'$CURRENTDIR/$WORKINGOUT\n')
if chk_file is not None:
sfw('ln -s -b /net/`hostname -s`$PWD/$CHECKFILE '
'$CURRENTDIR/$WORKINGCHK\n\n')
else:
sfw('\n')
sfw('echo About to run {} in /net/`'.format(executable) +
'hostname -s`$SCRATCHDIR\n\n')
sfw('{} <$INPUTFILE > $OUTPUTFILE'.format(executable))
sfw('\n\n')
if ln_running is not None:
sfw('rm $CURRENTDIR/$WORKINGOUT')
if chk_file is not None:
sfw(' $CURRENTDIR/$WORKINGCHK\n\n')
else:
sfw('\n\n')
sfw('cp $OUTPUTFILE $CURRENTDIR/.\n')
if chk_file is not None:
sfw('cp $CHECKFILE $CURRENTDIR/.\n\n')
else:
sfw('\n')
sfw('echo ran in /net/`hostname -s`$SCRATCHDIR\n')
sfw('echo output was copied to $CURRENTDIR\n\n')
if verbose:
print('script written to {}'.format(_script_name))
return _script_name
def submit_scripts(scripts, batch=False, submit=False, verbose=False):
outputs = []
if batch:
if submit or input('submit all jobs? ') in yes:
for script in scripts:
rd, f = _dir_and_file(script)
with cd(rd, ignore_blank=True):
cl = ['qsub', f]
# Don't really know how this works. Copied from
# http://stackoverflow.com/questions/4256107/
# running-bash-commands-in-python
process = subprocess.Popen(cl,
stdout=subprocess.PIPE,
universal_newlines=True)
output = process.communicate()[0]
if verbose:
print(output)
outputs.append(output)
else:
if verbose:
print('No jobs submitted, but scripts created')
else:
if submit or input('submit job {}? '.format(scripts[0])) in yes:
rd, f = _dir_and_file(scripts[0])
with cd(rd, ignore_blank=True):
cl = ['qsub', f]
# Don't really know how this works. Copied from
# http://stackoverflow.com/questions/4256107/
# running-bash-commands-in-python
process = subprocess.Popen(cl,
stdout=subprocess.PIPE,
universal_newlines=True)
output = process.communicate()[0]
if verbose:
print(output)
outputs.append(output)
else:
if verbose:
print('{} not submitted'.format(scripts))
_job_info = [' '.join(output.split(' ')[2:4]) for output in outputs]
return _job_info
if __name__ == '__main__':
description = 'Create and submit a script to run a Gaussian job on SCC'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('in_name',
help='Name of Gaussian input file')
parser.add_argument('-c', '--numcores', type=int, default=16,
help='Number of cores for job')
# I should probably check validity of this time request
# Maybe it doesn't matter so much because it just won't
# submit the job and it will give quick feedback about that?
parser.add_argument('-t', '--time',
help='Time required as "hh:mm:ss"',
default='12:00:00')
parser.add_argument('-e', '--executable', type=str, default='g09',
help='name of executable to run')
parser.add_argument('-b', '--batch', action='store_true',
help='create multiple scripts (batch job)')
parser.add_argument('-x', '--template', default=None,
help='template file for creating input from coords')
parser.add_argument('-s', '--submit', action='store_true',
help='Automatically submit jobs?')
parser.add_argument('-v', '--verbose', action='store_true',
help='make program more verbose')
parser.add_argument('-j', '--nojobinfo', action='store_false',
help='Do not return the submitted job information')
parser.add_argument('-k', '--chk_file', default=None,
help='checkpoint file to be written and copied back')
parser.add_argument('--copy_chk', action='store_true',
help='Copy check file to the scratch directory')
parser.add_argument('-l', '--ln_running', type=str, default=None,
help='base name for linking output to cwd while '
'running')
parser.add_argument('-d', '--hold_jid', default=None,
help='job on which this job should depend')
args = parser.parse_args()
in_name_list, args.batch = get_input_files(args.in_name, args.batch)
if args.template:
in_name_list = use_template(args.template, in_name_list, args.verbose)
script_list = []
for in_name in in_name_list:
script_name = write_sub_script(input_name=in_name,
num_cores=args.numcores,
time=args.time,
verbose=args.verbose,
executable=args.executable,
chk_file=args.chk_file,
copy_chk=args.copy_chk,
ln_running=args.ln_running,
hold_jid=args.hold_jid)
script_list.append(script_name)
if not len(script_list) == len(in_name_list):
# This should never be the case as far as I know, but I would
# like to make sure everything input gets a script and all the
# script names are there to be submitted.
raise IOError('num scripts dif. from num names given')
job_info = submit_scripts(script_list, args.batch, args.submit,
args.verbose)
if job_info and args.nojobinfo:
for job in job_info:
print(job)
if args.verbose:
print('Done. Completed normally.')
| 44.496536 | 78 | 0.553537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9,207 | 0.477864 |
16aff0c4c406b2f10dac6cda72a39c612f61400e | 2,036 | py | Python | experiments/recorder.py | WeiChengTseng/maddpg | f2813ab8bc43e2acbcc69818672e2e2fd305a007 | [
"MIT"
] | 3 | 2022-01-04T13:32:11.000Z | 2022-01-11T05:59:22.000Z | experiments/recorder.py | WeiChengTseng/maddpg | f2813ab8bc43e2acbcc69818672e2e2fd305a007 | [
"MIT"
] | null | null | null | experiments/recorder.py | WeiChengTseng/maddpg | f2813ab8bc43e2acbcc69818672e2e2fd305a007 | [
"MIT"
] | null | null | null | import json
import copy
import pdb
import numpy as np
import pickle
def listify_mat(matrix):
matrix = np.array(matrix).astype(str)
if len(matrix.shape) > 1:
matrix_list = []
for row in matrix:
try:
matrix_list.append(list(row))
except:
pdb.set_trace()
return matrix_list
else:
return list(matrix)
class Recorder():
def __init__(self):
self._traj, self._cur_traj = [], []
return
def pack_traj(self):
self._traj.append(copy.deepcopy(self._cur_traj))
self._cur_traj = []
return
def add(self, o, a, r, d):
# self._cur_traj.append((o, a, r, d))
self._cur_traj.append(
(listify_mat(o), listify_mat(a), listify_mat(r), d))
return
def export_pickle(self, filename='traj'):
if filename == '':
raise ValueError('incorrect file name')
traj = []
for t in self._traj:
obs = np.array([tt[0] for tt in t]).astype(np.float32)
act = np.array([tt[1] for tt in t]).astype(np.float32)
rwd = np.array([tt[2] for tt in t]).astype(np.float32)
done = np.array([tt[3] for tt in t])
# pdb.set_trace()
traj.append({
'observations': obs[:-1],
'next_observations': obs[1:],
'actions': act[:-1],
'rewards': rwd[:-1],
'terminals': done[:-1]
})
with open('{}.pkl'.format(filename), 'wb') as outfile:
pickle.dump(traj, outfile)
return
def export(self, filename='traj'):
if filename == '':
raise ValueError('incorrect file name')
traj = {'traj': []}
for t in self._traj:
traj['traj'].append(t)
# json.dumps(traj, sort_keys=True, indent=4)
pdb.set_trace()
with open('{}.json'.format(filename), 'w') as outfile:
json.dump(traj, outfile)
return | 27.513514 | 66 | 0.515717 | 1,634 | 0.802554 | 0 | 0 | 0 | 0 | 0 | 0 | 254 | 0.124754 |
16b0c13e303ebbec34fd3a80391f02025c584689 | 589 | py | Python | generate/dummy_data/mvp/gen_csv.py | ifekxp/data | f3571223f51b3fcc3a708d9ac82e76e3cc1ee068 | [
"MIT"
] | null | null | null | generate/dummy_data/mvp/gen_csv.py | ifekxp/data | f3571223f51b3fcc3a708d9ac82e76e3cc1ee068 | [
"MIT"
] | null | null | null | generate/dummy_data/mvp/gen_csv.py | ifekxp/data | f3571223f51b3fcc3a708d9ac82e76e3cc1ee068 | [
"MIT"
] | null | null | null | from faker import Faker
import csv
# Reference: https://pypi.org/project/Faker/
output = open('data.CSV', 'w', newline='')
fake = Faker()
header = ['name', 'age', 'street', 'city', 'state', 'zip', 'lng', 'lat']
mywriter=csv.writer(output)
mywriter.writerow(header)
for r in range(1000):
mywriter.writerow([
fake.name(),
fake.random_int(min=18, max=80, step=1),
fake.street_address(),
fake.city(),
fake.state(),
fake.zipcode(),
fake.longitude(),
fake.latitude()
])
output.close() | 21.814815 | 73 | 0.556876 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 107 | 0.181664 |
16b0eceb3e8aafd2e9b6e9e274abab88018c34aa | 495 | py | Python | subir/ingreso/migrations/0004_auto_20191003_1509.py | Brandon1625/subir | b827a30e64219fdc9de07689d2fb32e2c4bd02b7 | [
"bzip2-1.0.6"
] | null | null | null | subir/ingreso/migrations/0004_auto_20191003_1509.py | Brandon1625/subir | b827a30e64219fdc9de07689d2fb32e2c4bd02b7 | [
"bzip2-1.0.6"
] | null | null | null | subir/ingreso/migrations/0004_auto_20191003_1509.py | Brandon1625/subir | b827a30e64219fdc9de07689d2fb32e2c4bd02b7 | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 2.2.4 on 2019-10-03 21:09
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ingreso', '0003_auto_20190907_2152'),
]
operations = [
migrations.AlterField(
model_name='detalle_ingreso',
name='id_prod',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='producto.Producto'),
),
]
| 24.75 | 116 | 0.650505 | 369 | 0.745455 | 0 | 0 | 0 | 0 | 0 | 0 | 126 | 0.254545 |
16b1afada94a1ed1f6f7ce90f2dda1d6203c70b0 | 1,302 | py | Python | pyscf/nao/test/test_0017_tddft_iter_nao.py | mfkasim1/pyscf | 7be5e015b2b40181755c71d888449db936604660 | [
"Apache-2.0"
] | 3 | 2021-02-28T00:52:53.000Z | 2021-03-01T06:23:33.000Z | pyscf/nao/test/test_0017_tddft_iter_nao.py | mfkasim1/pyscf | 7be5e015b2b40181755c71d888449db936604660 | [
"Apache-2.0"
] | 36 | 2018-08-22T19:44:03.000Z | 2020-05-09T10:02:36.000Z | pyscf/nao/test/test_0017_tddft_iter_nao.py | mfkasim1/pyscf | 7be5e015b2b40181755c71d888449db936604660 | [
"Apache-2.0"
] | 4 | 2018-02-14T16:28:28.000Z | 2019-08-12T16:40:30.000Z | from __future__ import print_function, division
import os,unittest
from pyscf.nao import tddft_iter
dname = os.path.dirname(os.path.abspath(__file__))
td = tddft_iter(label='water', cd=dname)
try:
from pyscf.lib import misc
libnao_gpu = misc.load_library("libnao_gpu")
td_gpu = tddft_iter(label='water', cd=dname, GPU=True)
except:
td_gpu = None
class KnowValues(unittest.TestCase):
def test_tddft_iter(self):
""" This is iterative TDDFT with SIESTA starting point """
self.assertTrue(hasattr(td, 'xocc'))
self.assertTrue(hasattr(td, 'xvrt'))
self.assertTrue(td.ksn2f.sum()==8.0) # water: O -- 6 electrons in the valence + H2 -- 2 electrons
self.assertEqual(td.xocc[0].shape[0], 4)
self.assertEqual(td.xvrt[0].shape[0], 19)
dn0 = td.apply_rf0(td.moms1[:,0])
def test_tddft_iter_gpu(self):
""" Test GPU version """
if td_gpu is not None:
self.assertTrue(hasattr(td_gpu, 'xocc'))
self.assertTrue(hasattr(td_gpu, 'xvrt'))
self.assertTrue(td_gpu.ksn2f.sum()==8.0) # water: O -- 6 electrons in the valence + H2 -- 2 electrons
self.assertEqual(td_gpu.xocc[0].shape[0], 4)
self.assertEqual(td_gpu.xvrt[0].shape[0], 19)
dn0 = td_gpu.apply_rf0(td_gpu.moms1[:,0])
if __name__ == "__main__": unittest.main()
| 33.384615 | 107 | 0.678955 | 887 | 0.68126 | 0 | 0 | 0 | 0 | 0 | 0 | 262 | 0.201229 |
16b1e0777507d0977f5c8842b27867dc734bcc90 | 898 | py | Python | setup.py | dimasciput/osm2geojson | 7b5ba25e39d80838d41f342237161e0fdc5e64b6 | [
"MIT"
] | null | null | null | setup.py | dimasciput/osm2geojson | 7b5ba25e39d80838d41f342237161e0fdc5e64b6 | [
"MIT"
] | null | null | null | setup.py | dimasciput/osm2geojson | 7b5ba25e39d80838d41f342237161e0fdc5e64b6 | [
"MIT"
] | null | null | null | import io
from os import path
from setuptools import setup
dirname = path.abspath(path.dirname(__file__))
with io.open(path.join(dirname, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def parse_requirements(filename):
lines = (line.strip() for line in open(path.join(dirname, filename)))
return [line for line in lines if line and not line.startswith("#")]
setup(
name='osm2geojson',
version='0.1.27',
license='MIT',
description='Parse OSM and Overpass JSON',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='geometry gis osm parsing',
author='Parfeniuk Mykola',
author_email='[email protected]',
url='https://github.com/aspectumapp/osm2geojson',
packages=['osm2geojson'],
include_package_data=True,
install_requires=parse_requirements("requirements.txt")
)
| 32.071429 | 73 | 0.722717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 239 | 0.266147 |
16b268fae933e4415a5583a098a6d7daa28d2e18 | 849 | py | Python | Cap_11/ex11.6.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | 4 | 2020-04-07T00:38:46.000Z | 2022-03-10T03:34:42.000Z | Cap_11/ex11.6.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | null | null | null | Cap_11/ex11.6.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | 1 | 2021-04-22T02:45:38.000Z | 2021-04-22T02:45:38.000Z | import sqlite3
from contextlib import closing
nome = input('Nome do produto: ').lower().capitalize()
with sqlite3.connect('precos.db') as conexao:
with closing(conexao.cursor()) as cursor:
cursor.execute('SELECT * FROM Precos WHERE nome_produto = ?', (nome,))
registro = cursor.fetchone()
if not(registro is None):
print(f'Nome: {registro[0]} | Preço: R${registro[1]:.2f}')
valor = float(input('Novo valor: R$'))
cursor.execute('UPDATE Precos SET preco = ? WHERE nome_produto = ?', (valor, registro[0]))
if cursor.rowcount == 1:
conexao.commit()
print('Alteração gravada.')
else:
conexao.rollback()
print('Alteração abortada.')
else:
print(f'Produto {nome} não encontrado.') | 38.590909 | 102 | 0.572438 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 274 | 0.320468 |
16b631fdc9b05e860febb665678ebc3703e11591 | 4,882 | py | Python | jet20/backend/solver.py | JTJL/jet20 | 2dc01ebf937f8501bcfb15c6641c569f8097ccf5 | [
"MIT"
] | 1 | 2020-07-13T19:02:26.000Z | 2020-07-13T19:02:26.000Z | jet20/backend/solver.py | JTJL/jet20 | 2dc01ebf937f8501bcfb15c6641c569f8097ccf5 | [
"MIT"
] | null | null | null | jet20/backend/solver.py | JTJL/jet20 | 2dc01ebf937f8501bcfb15c6641c569f8097ccf5 | [
"MIT"
] | null | null | null |
import torch
import time
import copy
from jet20.backend.constraints import *
from jet20.backend.obj import *
from jet20.backend.config import *
from jet20.backend.core import solve,OPTIMAL,SUB_OPTIMAL,USER_STOPPED
import logging
logger = logging.getLogger(__name__)
class Solution(object):
def __init__(self,x,_vars,obj_value,status,duals):
self.status = status
self.obj_value = obj_value
self.vars = _vars
self.x = x
self.duals = None
def __str__(self):
return "obj_value: %s vars:%s" % (self.obj_value,self.vars)
__repr__ = __str__
class Problem(object):
def __init__(self,_vars,obj,le_cons=None,eq_cons=None):
self.obj = obj
self.le = le_cons
self.eq = eq_cons
self.vars = _vars
self.n = len(_vars)
@classmethod
def from_numpy(cls,_vars,obj=None,le=None,eq=None,device=torch.device("cpu"),dtype=torch.float64):
def convert(x):
if x is not None:
if isinstance(x,torch.Tensor):
return x.type(dtype).to(device)
else:
return torch.tensor(x,dtype=dtype,device=device)
else:
return None
if obj is not None:
obj_Q,obj_b,obj_c = [convert(x) for x in obj]
if obj_Q is not None:
obj = QuadraticObjective(obj_Q,obj_b,obj_c)
elif obj_b is not None:
obj = LinearObjective(obj_b,obj_c)
if le is not None:
le_A,le_b = [convert(x) for x in le]
if le_b.ndim == 2 and le_b.size(0) == 1:
le_b = le_b.squeeze(0)
le = LinearLeConstraints(le_A,le_b)
if eq is not None:
eq_A,eq_b = [convert(x) for x in eq]
if eq_b.ndim == 2 and eq_b.size(0) == 1:
eq_b = eq_b.squeeze(0)
eq = LinearEqConstraints(eq_A,eq_b)
return cls(_vars,obj,le,eq)
def float(self):
if self.le is not None:
le = self.le.float()
else:
le = None
if self.eq is not None:
eq = self.eq.float()
else:
eq = None
obj = self.obj.float()
return self.__class__(self.vars,obj,le,eq)
def double(self):
if self.le is not None:
le = self.le.double()
else:
le = None
if self.eq is not None:
eq = self.eq.double()
else:
eq = None
obj = self.obj.double()
return self.__class__(self.vars,obj,le,eq)
def to(self,device):
if self.le is not None:
self.le.to(device)
else:
le = None
if self.eq is not None:
self.eq.to(device)
else:
eq = None
obj = self.obj.to(device)
return self.__class__(self.vars,obj,le,eq)
def build_solution(self,x,obj_value,status,duals):
_vars = { var: v.item() for var,v in zip(self.vars,x)}
return Solution(x.cpu().numpy(),_vars,obj_value.item(),status,duals)
class Solver(object):
def __init__(self):
self.pres = []
self.posts = []
def solve(self,p,config,x=None):
for pre in self.pres:
start = time.time()
p,x = pre.preprocess(p,x,config)
logger.debug("preprocessing name:%s, time used:%s",pre.name(),time.time()-start)
if x is None:
x = torch.zeros(p.n).float().to(config.device)
start = time.time()
p_f32 = p.float()
x = x.float()
x,_,status,duals = solve(p_f32,x,config,fast=True)
logger.debug("fast mode, time used:%s",time.time()-start)
x = x.double()
if isinstance(duals,(tuple,list)):
duals = [d.double() for d in duals]
else:
duals = duals.double()
if status == SUB_OPTIMAL:
start = time.time()
# p = p.double()
x,_,status,duals = solve(p,x,config,fast=True,duals=duals)
logger.debug("fast-precision mode, time used:%s",time.time()-start)
if status == SUB_OPTIMAL:
start = time.time()
x,_,status,duals = solve(p,x,config,fast=False,duals=duals)
logger.debug("precision mode, time used:%s",time.time()-start)
if status != OPTIMAL:
logger.warning("optimal not found, status:%s",status)
for post in self.posts:
start = time.time()
p,x = post.postprocess(p,x,config)
logger.debug("postprocessing name:%s, time used:%s",post.name(),time.time()-start)
return p.build_solution(x,p.obj(x),status,duals)
def register_pres(self,*pres):
self.pres.extend(pres)
def register_posts(self,*posts):
self.posts.extend(posts)
| 26.247312 | 102 | 0.546907 | 4,575 | 0.937116 | 0 | 0 | 1,153 | 0.236174 | 0 | 0 | 239 | 0.048955 |
16b8038b17e6b43264d1acbee80a12ded5b8d440 | 1,077 | py | Python | tests/test_transforms.py | mengfu188/mmdetection.bak | 0bc0ea591b5725468f83f9f48630a1e3ad599303 | [
"Apache-2.0"
] | 2 | 2020-07-14T13:55:17.000Z | 2021-05-07T11:25:31.000Z | tests/test_transforms.py | mengfu188/mmdetection.bak | 0bc0ea591b5725468f83f9f48630a1e3ad599303 | [
"Apache-2.0"
] | null | null | null | tests/test_transforms.py | mengfu188/mmdetection.bak | 0bc0ea591b5725468f83f9f48630a1e3ad599303 | [
"Apache-2.0"
] | null | null | null | import torch
from mmdet.datasets.pipelines.transforms import Pad
from mmdet.datasets.pipelines.transforms import FilterBox
import numpy as np
import cv2
def test_pad():
raw = dict(
img=np.zeros((200, 401, 3), dtype=np.uint8)
)
cv2.imshow('raw', raw['img'])
pad = Pad(square=True, pad_val=255)
r = pad(raw)
print(r['img'].shape)
cv2.imshow('draw', r['img'])
cv2.waitKey()
raw = dict(
img=np.zeros((402, 401, 3), dtype=np.uint8)
)
cv2.imshow('raw', raw['img'])
pad = Pad(square=True, pad_val=255)
r = pad(raw)
print(r['img'].shape)
cv2.imshow('draw', r['img'])
cv2.waitKey()
def test_filter_box():
bboxes = np.array([[0, 0, 10, 10],
[10, 10, 20, 20],
[10, 10, 19, 20],
[10, 10, 20, 19],
[10, 10, 19, 19]])
gt_bboxes = np.array([[0, 0, 10, 9]])
result = dict(gt_bboxes=bboxes)
fb = FilterBox((10, 10))
fb(result)
if __name__ == '__main__':
# test_pad()
test_filter_box()
| 22.914894 | 57 | 0.535747 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.068709 |
16b8947aeb5e92484b74a59f50dce7a8d1075f22 | 23,601 | py | Python | dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py | akulamartin/lumberyard | 2d4be458a02845179be098e40cdc0c48f28f3b5a | [
"AML"
] | 8 | 2019-10-07T16:33:47.000Z | 2020-12-07T03:59:58.000Z | dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py | 29e7e280-0d1c-4bba-98fe-f7cd3ca7500a/lumberyard | 1c52b941dcb7d94341fcf21275fe71ff67173ada | [
"AML"
] | null | null | null | dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py | 29e7e280-0d1c-4bba-98fe-f7cd3ca7500a/lumberyard | 1c52b941dcb7d94341fcf21275fe71ff67173ada | [
"AML"
] | 4 | 2019-08-05T07:25:46.000Z | 2020-12-07T05:12:55.000Z | #
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
from waflib import Errors
import lumberyard_modules
import unittest
import pytest
import utils
class FakeContext(object):
pass
class FakeIncludeSettings(object):
pass
class FakePlatformSettings(object):
def __init__(self, platform_name, aliases=set()):
self.platform = platform_name
self.aliases = aliases
class FakeConfigurationSettings(object):
def __init__(self, settings_name, base_config=None):
self.base_config = base_config
self.name = settings_name
class FakeConfiguration(object):
def __init__(self, settings, is_test=False, is_server=False):
self.settings = settings
self.is_test = is_test
self.is_server = is_server
@pytest.fixture()
def mock_parse_json(mock_json_map):
if not mock_json_map:
mock_json_map = {'path': {}}
def _mock_parse_json(path, _):
return mock_json_map[path]
old_parse_json_file = utils.parse_json_file
utils.parse_json_file = _mock_parse_json
yield
utils.parse_json_file = old_parse_json_file
@pytest.fixture()
def fake_context():
return FakeContext()
def test_SanitizeKWInput_SimpleKwDictionary_Success():
kw = dict(
libpath='mylib'
)
lumberyard_modules.sanitize_kw_input(kw)
assert isinstance(kw['libpath'], list)
assert kw['libpath'][0] == 'mylib'
def test_SanitizeKWInput_SimpleKwDictionaryInAdditionalSettings_Success():
kw = dict(
libpath='mylib',
additional_settings=dict(stlibpath='mystlib')
)
lumberyard_modules.sanitize_kw_input(kw)
assert isinstance(kw['libpath'], list)
assert kw['libpath'][0] == 'mylib'
assert isinstance(kw['additional_settings'], list)
assert isinstance(kw['additional_settings'][0], dict)
assert isinstance(kw['additional_settings'][0]['stlibpath'], list)
assert kw['additional_settings'][0]['stlibpath'][0] == 'mystlib'
@pytest.mark.parametrize(
"target, kw_key, source_section, additional_aliases, merge_dict, expected", [
pytest.param('test_target', 'fake_key', {}, {}, {}, {}, id='MissingKeyInSourceNoChange'),
pytest.param('test_target', 'fake_key', {'fake_key': 'fake_value'}, {}, {}, {'fake_key': 'fake_value'}, id='MissingKeyInTargetKeyAdded'),
pytest.param('test_target', 'copyright_org', {'copyright_org': False}, {}, {'copyright_org': 'AMZN'}, type(Errors.WafError), id='InvalidStringKwInSourceError'),
pytest.param('test_target', 'copyright_org', {'copyright_org': 'AMZN'}, {}, {'copyright_org': False}, type(Errors.WafError), id='InvalidStringKwInTargetError'),
pytest.param('test_target', 'copyright_org', {'copyright_org': 'AMZN'}, {}, {'copyright_org': 'A2Z'}, {'copyright_org': 'AMZN'}, id='MergeStringReplaceSuccess'),
pytest.param('test_target', 'client_only', {'client_only': 'False'}, {}, {'client_only': True}, type(Errors.WafError), id='InvalidBoolKwInSourceError'),
pytest.param('test_target', 'client_only', {'client_only': False}, {}, {'client_only': 'True'}, type(Errors.WafError), id='InvalidBoolKwInTargetError'),
pytest.param('test_target', 'client_only', {'client_only': False}, {}, {'client_only': True}, {'client_only': False}, id='MergeBoolReplaceKwSuccess'),
])
def test_ProjectSettingsFileMergeKwKey_ValidInputs(mock_parse_json, target, kw_key, source_section, additional_aliases, merge_dict, expected):
fake_context = FakeContext()
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
if isinstance(expected,dict):
test_settings.merge_kw_key(target=target,
kw_key=kw_key,
source_section=source_section,
merge_kw=merge_dict)
assert merge_dict == expected
elif isinstance(expected, type(Errors.WafError)):
with pytest.raises(Errors.WafError):
test_settings.merge_kw_key(target=target,
kw_key=kw_key,
source_section=source_section,
merge_kw=merge_dict)
@pytest.mark.parametrize(
"test_dict, fake_include_settings, mock_json_map, additional_aliases, expected", [
pytest.param({}, None, None, {}, {}, id='BasicNoAdditionalAliasNoAdditionalIncludes'),
pytest.param({}, 'include_test',
{
'path': {
'includes': ['include_test']
},'include_test': {}
}, {}, {'includes': ['include_test']}, id='BasicNoAdditionalAliasSingleAdditionalIncludes')
])
def test_ProjectSettingsFileMergeKwKey_ValidInputs(mock_parse_json, fake_context, test_dict, fake_include_settings, mock_json_map, additional_aliases, expected):
if fake_include_settings:
def _mock_get_project_settings_file(include_settings_file, additional_aliases):
assert fake_include_settings == include_settings_file
fake_settings = FakeIncludeSettings()
return fake_settings
fake_context.get_project_settings_file = _mock_get_project_settings_file
test = lumberyard_modules.ProjectSettingsFile(fake_context,
'path',
additional_aliases)
assert test.dict == expected
@pytest.mark.parametrize(
"mock_json_map, additional_aliases, section_key, expected", [
pytest.param(None, {}, 'no_section', {}, id='SimpleNoChange'),
pytest.param({
'path': {
"test_section": {
"key1": "value1"
}
}
}, {}, 'test_section', {'key1': 'value1'}, id='SimpleChanges')
])
def test_ProjectSettingsFileMergeKwSection_ValidInputs_Success(mock_parse_json, fake_context, mock_json_map, additional_aliases, section_key, expected):
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
merge_dict = {}
test_settings.merge_kw_section(section_key=section_key,
target='test_target',
merge_kw=merge_dict)
assert expected == merge_dict
class ProjectSettingsTest(unittest.TestCase):
def setUp(self):
self.old_parse_json = utils.parse_json_file
utils.parse_json_file = self.mockParseJson
self.mock_json_map = {}
def tearDown(self):
utils.parse_json_file = self.old_parse_json
def mockParseJson(self, path, _):
return self.mock_json_map[path]
def createSimpleSettings(self, fake_context = FakeContext(), test_dict={}, additional_aliases={}):
self.mock_json_map = {'path': test_dict}
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
return test_settings
def test_ProjectSettingsFileMergeKwDict_RecursiveMergeAdditionalSettingsNoPlatformNoConfiguration_Success(self):
"""
Test scenario:
Setup a project settings that contains other project settings, so that it can recursively call merge_kw_dict
recursively
"""
include_settings_file = 'include_test'
test_settings_single_include = {'includes': [include_settings_file]}
test_empty_settings = {}
test_merge_kw_key = 'passed'
test_merge_kw_value = True
self.mock_json_map = {'path': test_settings_single_include,
include_settings_file: test_empty_settings}
# Prepare a mock include settings object
test_include_settings = self.createSimpleSettings()
def _mock_merge_kw_dict(target, merge_kw, platform, configuration):
merge_kw[test_merge_kw_key] = test_merge_kw_value
pass
test_include_settings.merge_kw_dict = _mock_merge_kw_dict
# Prepare a mock context
fake_context = FakeContext()
def _mock_get_project_settings_file(_a, _b):
return test_include_settings
fake_context.get_project_settings_file = _mock_get_project_settings_file
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_settings_single_include)
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=None,
configuration=None)
self.assertIn(test_merge_kw_key, test_merge_kw)
self.assertEqual(test_merge_kw[test_merge_kw_key], test_merge_kw_value)
def test_ProjectSettingsFileMergeKwDict_MergePlatformSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when only platform is set and not any configurations
"""
test_platform = 'test_platform'
test_alias = 'alias_1'
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform',
aliases={test_alias})
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform,
configuration=None)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform), sections_merged)
self.assertIn('{}/*'.format(test_alias), sections_merged)
self.assertEqual(len(sections_merged), 2)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedNoTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is not a test nor
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(settings=FakeConfigurationSettings(settings_name=test_configuration_name))
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 2)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationDerivedNoTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is not a test nor
server configuration, but is derived from another configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
base_test_configuration_name = 'base_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name,
base_config=FakeConfiguration(FakeConfigurationSettings(settings_name=base_test_configuration_name))))
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, base_test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 3)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedTestDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a test and a
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=True,
is_server=True)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 8)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a test but not a
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=True,
is_server=False)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test', sections_merged)
self.assertIn('{}/*/test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 11)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedNoTestDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a server but not a
test configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=False,
is_server=True)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated', sections_merged)
self.assertIn('{}/*/dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 11)
| 43.304587 | 191 | 0.637897 | 16,837 | 0.713402 | 344 | 0.014576 | 5,278 | 0.223635 | 0 | 0 | 4,643 | 0.196729 |
16ba68b504461ec3bb45c6f18a8ccf9704c15e7e | 7,471 | py | Python | linprog_curvefit.py | drofp/linprog_curvefit | 96ba704edae7cea42d768d7cc6d4036da2ba313a | [
"Apache-2.0"
] | null | null | null | linprog_curvefit.py | drofp/linprog_curvefit | 96ba704edae7cea42d768d7cc6d4036da2ba313a | [
"Apache-2.0"
] | 3 | 2019-11-22T08:04:18.000Z | 2019-11-26T06:55:36.000Z | linprog_curvefit.py | drofp/linprog_curvefit | 96ba704edae7cea42d768d7cc6d4036da2ba313a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
"""Curve fitting with linear programming.
Minimizes the sum of error for each fit point to find the optimal coefficients
for a given polynomial.
Overview:
Objective: Sum of errors
Subject to: Bounds on coefficients
Credit: "Curve Fitting with Linear Programming", H. Swanson and R. E. D. Woolsey
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import enum
import string
from ortools.linear_solver import pywraplp
class ErrorDefinition(enum.Enum):
SUM_ABS_DEV = enum.auto()
SUM_MAX_DEVIATION = enum.auto()
def _generate_variables(solver, points, coeff_ranges, err_max, error_def):
"""Create coefficient variables.
Initial version works for up to 26 variable polynomial. One letter per
english alphabet used for coefficient names.
TODO(drofp): Figure out naming scheme for arbitrary number of variables.
"""
num_of_coeff = len(coeff_ranges)
variables = []
coeff_names = []
# Add coefficients to variable list.
if num_of_coeff == 2:
coeff_names.append('m')
coeff_names.append('b')
else:
for letter_cnt in range(num_of_coeff):
coeff_names.append(string.ascii_lowercase[letter_cnt])
for coeff_num in range(num_of_coeff):
if coeff_ranges[coeff_num][0] is None:
lower_bound = -solver.Infinity()
else:
lower_bound = coeff_ranges[coeff_num][0]
if coeff_ranges[coeff_num][1] is None:
upper_bound = solver.Infinity()
else:
upper_bound = coeff_ranges[coeff_num][1]
variables.append(
solver.NumVar(lower_bound, upper_bound, coeff_names[coeff_num]))
# Add absolute error variables to variable list
for point_cnt in range(len(points)):
positive_err_var = solver.NumVar(
0, err_max, 'e' + str(point_cnt + 1) + '_plus')
negative_err_var = solver.NumVar(
0, err_max, 'e' + str(point_cnt + 1) + '_minus')
variables.append(positive_err_var)
variables.append(negative_err_var)
return variables
def _generate_objective_fn(
solver, num_of_coeff, variables, error_def=ErrorDefinition.SUM_ABS_DEV):
"""Generate objective function for given error definition."""
objective = solver.Objective()
for variable in variables[num_of_coeff:]:
objective.SetCoefficient(variable, 1)
return objective
def _generate_constraints(solver, points, num_of_coeff, variables):
constraints = []
for point_num, point in enumerate(points):
# Equivalency constraint
constraint = solver.Constraint(point[1], point[1])
# Resultant Coefficient terms
for coeff_num, coeff in enumerate(variables[:num_of_coeff]):
power = num_of_coeff - coeff_num - 1
x_val = point[0] ** power
constraint.SetCoefficient(coeff, x_val)
# Error terms
ex_plus = variables[num_of_coeff + 2 * point_num]
ex_minus = variables[num_of_coeff + 2 * point_num + 1]
constraint.SetCoefficient(ex_plus, -1)
constraint.SetCoefficient(ex_minus, 1)
constraints.append(constraint)
return constraints
def get_optimal_polynomial(
points=None, coeff_ranges=None, error_def=ErrorDefinition.SUM_ABS_DEV,
err_max=10000, solver=None):
"""Optimize coefficients for any order polynomial.
Args:
points: A tuple of points, represented as tuples (x, y)
coeff_ranges: A tuple of valid coefficient ranges, respresented as tuples
(min, max). Nubmer of elements in list determines order of polynomial,
from highest order (0th index) to lowest order (nth index).
err_def: An ErrorDefinition enum, specifying the definition for error.
err_max: An Integer, specifying the maximum error allowable.
solver: a ortools.pywraplp.Solver object, if a specific solver instance is
requested by caller.
Returns:
A Dictionary, the desired coefficients mapped to ther values.
"""
if coeff_ranges is None:
raise ValueError('Please provide appropriate coefficient range.')
if solver is None:
solver = pywraplp.Solver(
'polynomial_solver', pywraplp.Solver.GLOP_LINEAR_PROGRAMMING)
variables = _generate_variables(
solver, points, coeff_ranges, err_max=err_max,
error_def=error_def)
num_of_coeff = len(coeff_ranges)
_generate_objective_fn(solver, num_of_coeff, variables)
_generate_constraints(solver, points, num_of_coeff, variables)
solver.Solve()
var_to_val = dict()
for coeff in variables[:num_of_coeff]:
var_to_val[coeff.name()] = coeff.solution_value()
return var_to_val
def demo_optimal_linear_5points():
"""Demonstration of getting optimal linear polynomial.
Uses 5 points from Swanson's curve fitting paper.
"""
print('STARTING LINEAR DEMO WITH 5 POINTS FROM SWANSON PAPER')
points = (0,1), (1,3), (2,2), (3,4), (4,5)
coeff_ranges = ((None, None), (None, None))
# solver = pywraplp.Solver(
# 'polynomial_solver', pywraplp.Solver.GLOP_LINEAR_PROGRAMMING)
optimized_coefficients = get_optimal_polynomial(
points=points, coeff_ranges=coeff_ranges)
for elm in optimized_coefficients:
print('elm: {}'.format(elm))
print(
'type(optimized_coefficients): {}'.format(
type(optimized_coefficients)))
print('optimized_coefficients: {}'.format(optimized_coefficients))
# m, b = optimized_coefficients
# print('Optimized m: {}, b: {}'.format(m, b))
def demo_optimal_linear_10points():
print('STARTING LINEAR DEMO WITH 10 POINTS FROM WILLIAMS')
x_vals = [0.0, 0.5, 1.0, 1.5, 1.9, 2.5, 3.0, 3.5, 4.0, 4.5]
y_vals = [1.0, 0.9, 0.7, 1.5, 2.0, 2.4, 3.2, 2.0, 2.7, 3.5]
points = tuple(zip(x_vals, y_vals))
coeff_ranges = ((None, None), (None, None))
print(get_optimal_polynomial(points=points, coeff_ranges=coeff_ranges))
def demo_optimal_quadratic_10points():
print('STARTING QUADRATIC DEMO WITH 10 POINTS FROM WILLIAMS')
x_vals = [0.0, 0.5, 1.0, 1.5, 1.9, 2.5, 3.0, 3.5, 4.0, 4.5]
y_vals = [1.0, 0.9, 0.7, 1.5, 2.0, 2.4, 3.2, 2.0, 2.7, 3.5]
points = tuple(zip(x_vals, y_vals))
coeff_ranges = ((None, None), (None, None), (None, None))
print(get_optimal_polynomial(points=points, coeff_ranges=coeff_ranges))
def demo_optimal_quadratic_19points():
print('STARTING QUADRATIC DEMO WITH 19 POINTS FROM WILLIAMS')
x_vals = [0.0, 0.5, 1.0, 1.5, 1.9, 2.5, 3.0, 3.5, 4.0, 4.5]
x_vals.extend([5.0, 5.5, 6.0, 6.6, 7.0, 7.6, 8.5, 9.0, 10.0])
y_vals = [1.0, 0.9, 0.7, 1.5, 2.0, 2.4, 3.2, 2.0, 2.7, 3.5]
y_vals.extend([1.0, 4.0, 3.6, 2.7, 5.7, 4.6, 6.0, 6.8, 7.3])
points = tuple(zip(x_vals, y_vals))
coeff_ranges = ((None, None), (None, None), (None, None))
print(get_optimal_polynomial(points=points, coeff_ranges=coeff_ranges))
def demo_optimal_cubic_10points():
print('STARTING CUBIC DEMO WITH 10 POINTS FROM WILLIAMS')
x_vals = [0.0, 0.5, 1.0, 1.5, 1.9, 2.5, 3.0, 3.5, 4.0, 4.5]
y_vals = [1.0, 0.9, 0.7, 1.5, 2.0, 2.4, 3.2, 2.0, 2.7, 3.5]
points = tuple(zip(x_vals, y_vals))
coeff_ranges = ((None, None), (None, None), (None, None), (None, None))
print(get_optimal_polynomial(points=points, coeff_ranges=coeff_ranges))
def main():
demo_optimal_quadratic_19points()
if __name__ == '__main__':
main() | 39.115183 | 80 | 0.674073 | 99 | 0.013251 | 0 | 0 | 0 | 0 | 0 | 0 | 2,200 | 0.294472 |
16bce26f2376d0aa7170df9f650a479bf160647c | 11,177 | py | Python | build-script-helper.py | aciidb0mb3r/swift-stress-tester | aad9df89d2aae4640e9f4e06c234818c6b3ed434 | [
"Apache-2.0"
] | null | null | null | build-script-helper.py | aciidb0mb3r/swift-stress-tester | aad9df89d2aae4640e9f4e06c234818c6b3ed434 | [
"Apache-2.0"
] | null | null | null | build-script-helper.py | aciidb0mb3r/swift-stress-tester | aad9df89d2aae4640e9f4e06c234818c6b3ed434 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
This source file is part of the Swift.org open source project
Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
Licensed under Apache License v2.0 with Runtime Library Exception
See https://swift.org/LICENSE.txt for license information
See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
------------------------------------------------------------------------------
This is a helper script for the main swift repository's build-script.py that
knows how to build and install the stress tester utilities given a swift
workspace.
"""
from __future__ import print_function
import argparse
import sys
import os, platform
import subprocess
def printerr(message):
print(message, file=sys.stderr)
def main(argv_prefix = []):
args = parse_args(argv_prefix + sys.argv[1:])
run(args)
def parse_args(args):
parser = argparse.ArgumentParser(prog='BUILD-SCRIPT-HELPER.PY')
parser.add_argument('--package-dir', default='SourceKitStressTester')
parser.add_argument('-v', '--verbose', action='store_true', help='log executed commands')
parser.add_argument('--prefix', help='install path')
parser.add_argument('--config', default='debug')
parser.add_argument('--build-dir', default='.build')
parser.add_argument('--multiroot-data-file', help='Path to an Xcode workspace to create a unified build of SwiftSyntax with other projects.')
parser.add_argument('--toolchain', required=True, help='the toolchain to use when building this package')
parser.add_argument('--update', action='store_true', help='update all SwiftPM dependencies')
parser.add_argument('--no-local-deps', action='store_true', help='use normal remote dependencies when building')
parser.add_argument('build_actions', help="Extra actions to perform. Can be any number of the following", choices=['all', 'build', 'test', 'install', 'generate-xcodeproj'], nargs="*", default=['build'])
parsed = parser.parse_args(args)
if ("install" in parsed.build_actions or "all" in parsed.build_actions) and not parsed.prefix:
ArgumentParser.error("'--prefix' is required with the install action")
parsed.swift_exec = os.path.join(parsed.toolchain, 'usr', 'bin', 'swift')
parsed.sourcekitd_dir = os.path.join(parsed.toolchain, 'usr', 'lib')
# Convert package_dir to absolute path, relative to root of repo.
repo_path = os.path.dirname(__file__)
parsed.package_dir = os.path.realpath(
os.path.join(repo_path, parsed.package_dir))
# Convert build_dir to absolute path, relative to package_dir.
parsed.build_dir = os.path.join(parsed.package_dir, parsed.build_dir)
return parsed
def run(args):
sourcekit_searchpath=args.sourcekitd_dir
package_name = os.path.basename(args.package_dir)
env = dict(os.environ)
# Use local dependencies (i.e. checked out next sourcekit-lsp).
if not args.no_local_deps:
env['SWIFTCI_USE_LOCAL_DEPS'] = "1"
if args.update:
print("** Updating dependencies of %s **" % package_name)
try:
update_swiftpm_dependencies(package_dir=args.package_dir,
swift_exec=args.swift_exec,
build_dir=args.build_dir,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Updating dependencies of %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
# The test action creates its own build. No need to build if we are just testing
if should_run_any_action(['build', 'install'], args.build_actions):
print("** Building %s **" % package_name)
try:
invoke_swift(package_dir=args.package_dir,
swift_exec=args.swift_exec,
action='build',
products=get_products(args.package_dir),
sourcekit_searchpath=sourcekit_searchpath,
build_dir=args.build_dir,
multiroot_data_file=args.multiroot_data_file,
config=args.config,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Building %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
output_dir = os.path.realpath(os.path.join(args.build_dir, args.config))
if should_run_action("generate-xcodeproj", args.build_actions):
print("** Generating Xcode project for %s **" % package_name)
try:
generate_xcodeproj(args.package_dir,
swift_exec=args.swift_exec,
sourcekit_searchpath=sourcekit_searchpath,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Generating the Xcode project failed')
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
if should_run_action("test", args.build_actions):
print("** Testing %s **" % package_name)
try:
invoke_swift(package_dir=args.package_dir,
swift_exec=args.swift_exec,
action='test',
products=['%sPackageTests' % package_name],
sourcekit_searchpath=sourcekit_searchpath,
build_dir=args.build_dir,
multiroot_data_file=args.multiroot_data_file,
config=args.config,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Testing %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
if should_run_action("install", args.build_actions):
print("** Installing %s **" % package_name)
stdlib_dir = os.path.join(args.toolchain, 'usr', 'lib', 'swift', 'macosx')
try:
install_package(args.package_dir,
install_dir=args.prefix,
sourcekit_searchpath=sourcekit_searchpath,
build_dir=output_dir,
rpaths_to_delete=[stdlib_dir],
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Installing %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
# Returns true if any of the actions in `action_names` should be run.
def should_run_any_action(action_names, selected_actions):
for action_name in action_names:
if should_run_action(action_name, selected_actions):
return True
return False
def should_run_action(action_name, selected_actions):
if action_name in selected_actions:
return True
elif "all" in selected_actions:
return True
else:
return False
def update_swiftpm_dependencies(package_dir, swift_exec, build_dir, env, verbose):
args = [swift_exec, 'package', '--package-path', package_dir, '--build-path', build_dir, 'update']
check_call(args, env=env, verbose=verbose)
def invoke_swift(package_dir, swift_exec, action, products, sourcekit_searchpath, build_dir, multiroot_data_file, config, env, verbose):
# Until rdar://53881101 is implemented, we cannot request a build of multiple
# targets simultaneously. For now, just build one product after the other.
for product in products:
invoke_swift_single_product(package_dir, swift_exec, action, product, sourcekit_searchpath, build_dir, multiroot_data_file, config, env, verbose)
def invoke_swift_single_product(package_dir, swift_exec, action, product, sourcekit_searchpath, build_dir, multiroot_data_file, config, env, verbose):
args = [swift_exec, action, '--package-path', package_dir, '-c', config, '--build-path', build_dir]
if multiroot_data_file:
args.extend(['--multiroot-data-file', multiroot_data_file])
if action == 'test':
args.extend(['--test-product', product])
else:
args.extend(['--product', product])
# Tell SwiftSyntax that we are building in a build-script environment so that
# it does not need to rebuilt if it has already been built before.
env['SWIFT_BUILD_SCRIPT_ENVIRONMENT'] = '1'
env['SWIFT_STRESS_TESTER_SOURCEKIT_SEARCHPATH'] = sourcekit_searchpath
check_call(args, env=env, verbose=verbose)
def install_package(package_dir, install_dir, sourcekit_searchpath, build_dir, rpaths_to_delete, verbose):
bin_dir = os.path.join(install_dir, 'bin')
lib_dir = os.path.join(install_dir, 'lib', 'swift', 'macosx')
for directory in [bin_dir, lib_dir]:
if not os.path.exists(directory):
os.makedirs(directory)
# Install sk-stress-test and sk-swiftc-wrapper
for product in get_products(package_dir):
src = os.path.join(build_dir, product)
dest = os.path.join(bin_dir, product)
# Create a copy of the list since we modify it
rpaths_to_delete_for_this_product = list(rpaths_to_delete)
# Add the rpath to the stdlib in in the toolchain
rpaths_to_add = ['@executable_path/../lib/swift/macosx']
if product in ['sk-stress-test', 'swift-evolve']:
# Make the rpath to sourcekitd relative in the toolchain
rpaths_to_delete_for_this_product += [sourcekit_searchpath]
rpaths_to_add += ['@executable_path/../lib']
install(src, dest,
rpaths_to_delete=rpaths_to_delete_for_this_product,
rpaths_to_add=rpaths_to_add,
verbose=verbose)
def install(src, dest, rpaths_to_delete, rpaths_to_add, verbose):
copy_cmd=['rsync', '-a', src, dest]
print('installing %s to %s' % (os.path.basename(src), dest))
check_call(copy_cmd, verbose=verbose)
for rpath in rpaths_to_delete:
remove_rpath(dest, rpath, verbose=verbose)
for rpath in rpaths_to_add:
add_rpath(dest, rpath, verbose=verbose)
def generate_xcodeproj(package_dir, swift_exec, sourcekit_searchpath, env, verbose):
package_name = os.path.basename(package_dir)
config_path = os.path.join(package_dir, 'Config.xcconfig')
with open(config_path, 'w') as config_file:
config_file.write('''
SYSTEM_FRAMEWORK_SEARCH_PATHS = {sourcekit_searchpath} $(inherited)
LD_RUNPATH_SEARCH_PATHS = {sourcekit_searchpath} $(inherited)
'''.format(sourcekit_searchpath=sourcekit_searchpath))
xcodeproj_path = os.path.join(package_dir, '%s.xcodeproj' % package_name)
args = [swift_exec, 'package', '--package-path', package_dir, 'generate-xcodeproj', '--xcconfig-overrides', config_path, '--output', xcodeproj_path]
check_call(args, env=env, verbose=verbose)
def add_rpath(binary, rpath, verbose):
cmd = ['install_name_tool', '-add_rpath', rpath, binary]
check_call(cmd, verbose=verbose)
def remove_rpath(binary, rpath, verbose):
cmd = ['install_name_tool', '-delete_rpath', rpath, binary]
check_call(cmd, verbose=verbose)
def check_call(cmd, verbose, env=os.environ, **kwargs):
if verbose:
print(' '.join([escape_cmd_arg(arg) for arg in cmd]))
return subprocess.check_call(cmd, env=env, stderr=subprocess.STDOUT, **kwargs)
def interleave(value, list):
return [item for pair in zip([value] * len(list), list) for item in pair]
def escape_cmd_arg(arg):
if '"' in arg or ' ' in arg:
return '"%s"' % arg.replace('"', '\\"')
else:
return arg
def get_products(package_dir):
# FIXME: We ought to be able to query SwiftPM for this info.
if package_dir.endswith("/SourceKitStressTester"):
return ['sk-stress-test', 'sk-swiftc-wrapper']
elif package_dir.endswith("/SwiftEvolve"):
return ['swift-evolve']
else:
return []
if __name__ == '__main__':
main()
| 38.277397 | 204 | 0.711014 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,545 | 0.317169 |
16bd3669143df2de8767a9c8bf39a0f217eb03a8 | 1,701 | py | Python | tests/components/deconz/test_scene.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 1 | 2021-07-08T20:09:55.000Z | 2021-07-08T20:09:55.000Z | tests/components/deconz/test_scene.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 47 | 2021-02-21T23:43:07.000Z | 2022-03-31T06:07:10.000Z | tests/components/deconz/test_scene.py | OpenPeerPower/core | f673dfac9f2d0c48fa30af37b0a99df9dd6640ee | [
"Apache-2.0"
] | null | null | null | """deCONZ scene platform tests."""
from unittest.mock import patch
from openpeerpower.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON
from openpeerpower.const import ATTR_ENTITY_ID
from .test_gateway import (
DECONZ_WEB_REQUEST,
mock_deconz_put_request,
setup_deconz_integration,
)
async def test_no_scenes(opp, aioclient_mock):
"""Test that scenes can be loaded without scenes being available."""
await setup_deconz_integration(opp, aioclient_mock)
assert len(opp.states.async_all()) == 0
async def test_scenes(opp, aioclient_mock):
"""Test that scenes works."""
data = {
"groups": {
"1": {
"id": "Light group id",
"name": "Light group",
"type": "LightGroup",
"state": {"all_on": False, "any_on": True},
"action": {},
"scenes": [{"id": "1", "name": "Scene"}],
"lights": [],
}
}
}
with patch.dict(DECONZ_WEB_REQUEST, data):
config_entry = await setup_deconz_integration(opp, aioclient_mock)
assert len(opp.states.async_all()) == 1
assert opp.states.get("scene.light_group_scene")
# Verify service calls
mock_deconz_put_request(
aioclient_mock, config_entry.data, "/groups/1/scenes/1/recall"
)
# Service turn on scene
await opp.services.async_call(
SCENE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "scene.light_group_scene"},
blocking=True,
)
assert aioclient_mock.mock_calls[1][2] == {}
await opp.config_entries.async_unload(config_entry.entry_id)
assert len(opp.states.async_all()) == 0
| 27.885246 | 82 | 0.627278 | 0 | 0 | 0 | 0 | 0 | 0 | 1,382 | 0.812463 | 388 | 0.228101 |
16bd643a28b81f74d29d0b9a43b20d245093f663 | 12,716 | py | Python | tensorhive/config.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 129 | 2017-08-25T11:45:15.000Z | 2022-03-29T05:11:25.000Z | tensorhive/config.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 251 | 2017-07-27T10:05:58.000Z | 2022-03-02T12:46:13.000Z | tensorhive/config.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 20 | 2017-08-13T13:05:14.000Z | 2022-03-19T02:21:37.000Z | from pathlib import PosixPath
import configparser
from typing import Dict, Optional, Any, List
from inspect import cleandoc
import shutil
import tensorhive
import os
import logging
log = logging.getLogger(__name__)
class CONFIG_FILES:
# Where to copy files
# (TensorHive tries to load these by default)
config_dir = PosixPath.home() / '.config/TensorHive'
MAIN_CONFIG_PATH = str(config_dir / 'main_config.ini')
HOSTS_CONFIG_PATH = str(config_dir / 'hosts_config.ini')
MAILBOT_CONFIG_PATH = str(config_dir / 'mailbot_config.ini')
# Where to get file templates from
# (Clone file when it's not found in config directory)
tensorhive_package_dir = PosixPath(__file__).parent
MAIN_CONFIG_TEMPLATE_PATH = str(tensorhive_package_dir / 'main_config.ini')
HOSTS_CONFIG_TEMPLATE_PATH = str(tensorhive_package_dir / 'hosts_config.ini')
MAILBOT_TEMPLATE_CONFIG_PATH = str(tensorhive_package_dir / 'mailbot_config.ini')
ALEMBIC_CONFIG_PATH = str(tensorhive_package_dir / 'alembic.ini')
MIGRATIONS_CONFIG_PATH = str(tensorhive_package_dir / 'migrations')
class ConfigInitilizer:
'''Makes sure that all default config files exist'''
def __init__(self):
# 1. Check if all config files exist
all_exist = PosixPath(CONFIG_FILES.MAIN_CONFIG_PATH).exists() and \
PosixPath(CONFIG_FILES.HOSTS_CONFIG_PATH).exists() and \
PosixPath(CONFIG_FILES.MAILBOT_CONFIG_PATH).exists()
if not all_exist:
log.warning('[•] Detected missing default config file(s), recreating...')
self.recreate_default_configuration_files()
log.info('[•] All configs already exist, skipping...')
def recreate_default_configuration_files(self) -> None:
try:
# 1. Create directory for stroing config files
CONFIG_FILES.config_dir.mkdir(parents=True, exist_ok=True)
# 2. Clone templates safely from `tensorhive` package
self.safe_copy(src=CONFIG_FILES.MAIN_CONFIG_TEMPLATE_PATH, dst=CONFIG_FILES.MAIN_CONFIG_PATH)
self.safe_copy(src=CONFIG_FILES.HOSTS_CONFIG_TEMPLATE_PATH, dst=CONFIG_FILES.HOSTS_CONFIG_PATH)
self.safe_copy(src=CONFIG_FILES.MAILBOT_TEMPLATE_CONFIG_PATH, dst=CONFIG_FILES.MAILBOT_CONFIG_PATH)
# 3. Change config files permission
rw_owner_only = 0o600
os.chmod(CONFIG_FILES.MAIN_CONFIG_PATH, rw_owner_only)
os.chmod(CONFIG_FILES.HOSTS_CONFIG_PATH, rw_owner_only)
os.chmod(CONFIG_FILES.MAILBOT_CONFIG_PATH, rw_owner_only)
except Exception:
log.error('[✘] Unable to recreate configuration files.')
def safe_copy(self, src: str, dst: str) -> None:
'''Safe means that it won't override existing configuration'''
if PosixPath(dst).exists():
log.info('Skipping, file already exists: {}'.format(dst))
else:
shutil.copy(src, dst)
log.info('Copied {} to {}'.format(src, dst))
class ConfigLoader:
@staticmethod
def load(path, displayed_title=''):
import configparser
config = configparser.ConfigParser(strict=False)
full_path = PosixPath(path).expanduser()
if config.read(str(full_path)):
log.info('[•] Reading {} config from {}'.format(displayed_title, full_path))
else:
log.warning('[✘] Configuration file not found ({})'.format(full_path))
log.info('Using default {} settings from config.py'.format(displayed_title))
return config
ConfigInitilizer()
config = ConfigLoader.load(CONFIG_FILES.MAIN_CONFIG_PATH, displayed_title='main')
def display_config(cls):
'''
Displays all uppercase class atributes (class must be defined first)
Example usage: display_config(API_SERVER)
'''
print('[{class_name}]'.format(class_name=cls.__name__))
for key, value in cls.__dict__.items():
if key.isupper():
print('{} = {}'.format(key, value))
def check_env_var(name: str):
'''Makes sure that env variable is declared'''
if not os.getenv(name):
msg = cleandoc(
'''
{env} - undeclared environment variable!
Try this: `export {env}="..."`
''').format(env=name).split('\n')
log.warning(msg[0])
log.warning(msg[1])
class SSH:
section = 'ssh'
HOSTS_CONFIG_FILE = config.get(section, 'hosts_config_file', fallback=CONFIG_FILES.HOSTS_CONFIG_PATH)
TEST_ON_STARTUP = config.getboolean(section, 'test_on_startup', fallback=True)
TIMEOUT = config.getfloat(section, 'timeout', fallback=10.0)
NUM_RETRIES = config.getint(section, 'number_of_retries', fallback=1)
KEY_FILE = config.get(section, 'key_file', fallback='~/.config/TensorHive/ssh_key')
def hosts_config_to_dict(path: str) -> Dict: # type: ignore
'''Parses sections containing hostnames'''
hosts_config = ConfigLoader.load(path, displayed_title='hosts')
result = {}
for section in hosts_config.sections():
# We want to parse only sections which describe target hosts
if section == 'proxy_tunneling':
continue
hostname = section
result[hostname] = {
'user': hosts_config.get(hostname, 'user'),
'port': hosts_config.getint(hostname, 'port', fallback=22)
}
return result
def proxy_config_to_dict(path: str) -> Optional[Dict]: # type: ignore
'''Parses [proxy_tunneling] section'''
config = ConfigLoader.load(path, displayed_title='proxy')
section = 'proxy_tunneling'
# Check if section is present and if yes, check if tunneling is enabled
if config.has_section(section) and config.getboolean(section, 'enabled', fallback=False):
return {
'proxy_host': config.get(section, 'proxy_host'),
'proxy_user': config.get(section, 'proxy_user'),
'proxy_port': config.getint(section, 'proxy_port', fallback=22)
}
else:
return None
AVAILABLE_NODES = hosts_config_to_dict(HOSTS_CONFIG_FILE)
PROXY = proxy_config_to_dict(HOSTS_CONFIG_FILE)
class DB:
section = 'database'
default_path = '~/.config/TensorHive/database.sqlite'
def uri_for_path(path: str) -> str: # type: ignore
return 'sqlite:///{}'.format(PosixPath(path).expanduser())
SQLALCHEMY_DATABASE_URI = uri_for_path(config.get(section, 'path', fallback=default_path))
TEST_DATABASE_URI = 'sqlite://' # Use in-memory (before: sqlite:///test_database.sqlite)
class API:
section = 'api'
TITLE = config.get(section, 'title', fallback='TensorHive API')
URL_HOSTNAME = config.get(section, 'url_hostname', fallback='0.0.0.0')
URL_PREFIX = config.get(section, 'url_prefix', fallback='api')
SPEC_FILE = config.get(section, 'spec_file', fallback='api_specification.yml')
IMPL_LOCATION = config.get(section, 'impl_location', fallback='tensorhive.api.controllers')
import yaml
respones_file_path = str(PosixPath(__file__).parent / 'controllers/responses.yml')
with open(respones_file_path, 'r') as file:
RESPONSES = yaml.safe_load(file)
class APP_SERVER:
section = 'web_app.server'
BACKEND = config.get(section, 'backend', fallback='gunicorn')
HOST = config.get(section, 'host', fallback='0.0.0.0')
PORT = config.getint(section, 'port', fallback=5000)
WORKERS = config.getint(section, 'workers', fallback=4)
LOG_LEVEL = config.get(section, 'loglevel', fallback='warning')
class API_SERVER:
section = 'api.server'
BACKEND = config.get(section, 'backend', fallback='gevent')
HOST = config.get(section, 'host', fallback='0.0.0.0')
PORT = config.getint(section, 'port', fallback=1111)
DEBUG = config.getboolean(section, 'debug', fallback=False)
class MONITORING_SERVICE:
section = 'monitoring_service'
ENABLED = config.getboolean(section, 'enabled', fallback=True)
ENABLE_GPU_MONITOR = config.getboolean(section, 'enable_gpu_monitor', fallback=True)
UPDATE_INTERVAL = config.getfloat(section, 'update_interval', fallback=2.0)
class PROTECTION_SERVICE:
section = 'protection_service'
ENABLED = config.getboolean(section, 'enabled', fallback=True)
UPDATE_INTERVAL = config.getfloat(section, 'update_interval', fallback=2.0)
NOTIFY_ON_PTY = config.getboolean(section, 'notify_on_pty', fallback=True)
NOTIFY_VIA_EMAIL = config.getboolean(section, 'notify_via_email', fallback=False)
class MAILBOT:
mailbot_config = ConfigLoader.load(CONFIG_FILES.MAILBOT_CONFIG_PATH, displayed_title='mailbot')
section = 'general'
INTERVAL = mailbot_config.getfloat(section, 'interval', fallback=10.0)
MAX_EMAILS_PER_PROTECTION_INTERVAL = mailbot_config.getint(section,
'max_emails_per_protection_interval', fallback=50)
NOTIFY_INTRUDER = mailbot_config.getboolean(section, 'notify_intruder', fallback=True)
NOTIFY_ADMIN = mailbot_config.getboolean(section, 'notify_admin', fallback=False)
ADMIN_EMAIL = mailbot_config.get(section, 'admin_email', fallback=None)
section = 'smtp'
SMTP_LOGIN = mailbot_config.get(section, 'email', fallback=None)
SMTP_PASSWORD = mailbot_config.get(section, 'password', fallback=None)
SMTP_SERVER = mailbot_config.get(section, 'smtp_server', fallback=None)
SMTP_PORT = mailbot_config.getint(section, 'smtp_port', fallback=587)
section = 'template/intruder'
INTRUDER_SUBJECT = mailbot_config.get(section, 'subject')
INTRUDER_BODY_TEMPLATE = mailbot_config.get(section, 'html_body')
section = 'template/admin'
ADMIN_SUBJECT = mailbot_config.get(section, 'subject')
ADMIN_BODY_TEMPLATE = mailbot_config.get(section, 'html_body')
class USAGE_LOGGING_SERVICE:
section = 'usage_logging_service'
default_path = '~/.config/TensorHive/logs/'
def full_path(path: str) -> str: # type: ignore
return str(PosixPath(path).expanduser())
ENABLED = config.getboolean(section, 'enabled', fallback=True)
UPDATE_INTERVAL = config.getfloat(section, 'update_interval', fallback=2.0)
LOG_DIR = full_path(config.get(section, 'log_dir', fallback=default_path))
LOG_CLEANUP_ACTION = config.getint(section, 'log_cleanup_action', fallback=2)
class JOB_SCHEDULING_SERVICE:
section = 'job_scheduling_service'
ENABLED = config.getboolean(section, 'enabled', fallback=True)
UPDATE_INTERVAL = config.getfloat(section, 'update_interval', fallback=30.0)
STOP_TERMINATION_ATTEMPTS_AFTER = config.getfloat(section, 'stop_termination_attempts_after_mins', fallback=5.0)
SCHEDULE_QUEUED_JOBS_WHEN_FREE_MINS = config.getint(section, "schedule_queued_jobs_when_free_mins", fallback=30)
class AUTH:
from datetime import timedelta
section = 'auth'
def config_get_parsed(option: str, fallback: Any) -> List[str]: # type: ignore
'''
Parses value for option from string to a valid python list.
Fallback value is returned when anything goes wrong (e.g. option or value not present)
Example .ini file, function called with arguments: option='some_option', fallback=None
[some_section]
some_option = ['foo', 'bar']
Will return:
['foo', 'bar']
'''
import ast
try:
raw_arguments = config.get('auth', option)
parsed_arguments = ast.literal_eval(raw_arguments)
return parsed_arguments
except (configparser.Error, ValueError):
log.warning('Parsing [auth] config section failed for option "{}", using fallback value: {}'.format(
option, fallback))
return fallback
FLASK_JWT = {
'SECRET_KEY': config.get(section, 'secrect_key', fallback='jwt-some-secret'),
'JWT_BLACKLIST_ENABLED': config.getboolean(section, 'jwt_blacklist_enabled', fallback=True),
'JWT_BLACKLIST_TOKEN_CHECKS': config_get_parsed('jwt_blacklist_token_checks', fallback=['access', 'refresh']),
'BUNDLE_ERRORS': config.getboolean(section, 'bundle_errors', fallback=True),
'JWT_ACCESS_TOKEN_EXPIRES': timedelta(minutes=config.getint(section, 'jwt_access_token_expires_minutes',
fallback=1)),
'JWT_REFRESH_TOKEN_EXPIRES': timedelta(days=config.getint(section, 'jwt_refresh_token_expires_days',
fallback=1)),
'JWT_TOKEN_LOCATION': config_get_parsed('jwt_token_location', fallback=['headers'])
}
| 42.959459 | 118 | 0.681268 | 11,677 | 0.91757 | 0 | 0 | 528 | 0.04149 | 0 | 0 | 3,674 | 0.2887 |
16bdc023e7792aee5f95f6dd1ec12e9328dbed08 | 4,534 | py | Python | model.py | iz2late/baseline-seq2seq | 2bfa8981083aed8d30befeb42e41fe78d8ec1641 | [
"MIT"
] | 1 | 2021-01-06T20:49:32.000Z | 2021-01-06T20:49:32.000Z | model.py | iz2late/baseline-seq2seq | 2bfa8981083aed8d30befeb42e41fe78d8ec1641 | [
"MIT"
] | null | null | null | model.py | iz2late/baseline-seq2seq | 2bfa8981083aed8d30befeb42e41fe78d8ec1641 | [
"MIT"
] | null | null | null | import random
from typing import Tuple
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch import Tensor
class Encoder(nn.Module):
def __init__(self, input_dim, emb_dim, enc_hid_dim, dec_hid_dim, dropout):
super().__init__()
self.input_dim = input_dim
self.emb_dim = emb_dim
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.dropout = dropout
self.embedding = nn.Embedding(input_dim, emb_dim)
self.rnn = nn.GRU(emb_dim, enc_hid_dim, bidirectional = True)
self.fc = nn.Linear(enc_hid_dim * 2, dec_hid_dim)
self.dropout = nn.Dropout(dropout)
def forward(self, src):
embedded = self.dropout(self.embedding(src))
outputs, hidden = self.rnn(embedded)
# output of bi-directional rnn should be concatenated
hidden = torch.tanh(self.fc(torch.cat((hidden[-2,:,:], hidden[-1,:,:]), dim = 1)))
return outputs, hidden
class Attention(nn.Module):
def __init__(self, enc_hid_dim, dec_hid_dim, attn_dim):
super().__init__()
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.attn_in = (enc_hid_dim * 2) + dec_hid_dim
self.attn = nn.Linear(self.attn_in, attn_dim)
def forward(self, decoder_hidden, encoder_outputs):
src_len = encoder_outputs.shape[0]
repeated_decoder_hidden = decoder_hidden.unsqueeze(1).repeat(1, src_len, 1)
encoder_outputs = encoder_outputs.permute(1, 0, 2)
energy = torch.tanh(self.attn(torch.cat((
repeated_decoder_hidden,
encoder_outputs),
dim = 2)))
attention = torch.sum(energy, dim=2)
return F.softmax(attention, dim=1)
class Decoder(nn.Module):
def __init__(self, output_dim, emb_dim, enc_hid_dim, dec_hid_dim, dropout, attention):
super().__init__()
self.emb_dim = emb_dim
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.output_dim = output_dim
self.dropout = dropout
self.attention = attention
self.embedding = nn.Embedding(output_dim, emb_dim)
self.rnn = nn.GRU((enc_hid_dim * 2) + emb_dim, dec_hid_dim)
self.out = nn.Linear(self.attention.attn_in + emb_dim, output_dim)
self.dropout = nn.Dropout(dropout)
def _weighted_encoder_rep(self, decoder_hidden, encoder_outputs):
a = self.attention(decoder_hidden, encoder_outputs)
a = a.unsqueeze(1)
encoder_outputs = encoder_outputs.permute(1, 0, 2)
weighted_encoder_rep = torch.bmm(a, encoder_outputs)
weighted_encoder_rep = weighted_encoder_rep.permute(1, 0, 2)
return weighted_encoder_rep
def forward(self, input, decoder_hidden, encoder_outputs):
input = input.unsqueeze(0)
embedded = self.dropout(self.embedding(input))
weighted_encoder_rep = self._weighted_encoder_rep(decoder_hidden,
encoder_outputs)
rnn_input = torch.cat((embedded, weighted_encoder_rep), dim = 2)
output, decoder_hidden = self.rnn(rnn_input, decoder_hidden.unsqueeze(0))
embedded = embedded.squeeze(0)
output = output.squeeze(0)
weighted_encoder_rep = weighted_encoder_rep.squeeze(0)
output = self.out(torch.cat((output,
weighted_encoder_rep,
embedded), dim = 1))
return output, decoder_hidden.squeeze(0)
class Seq2Seq(nn.Module):
def __init__(self, encoder, decoder, device):
super().__init__()
self.encoder = encoder
self.decoder = decoder
self.device = device
def forward(self, src, trg, teacher_forcing_ratio=0.5):
batch_size = src.shape[1]
max_len = trg.shape[0]
trg_vocab_size = self.decoder.output_dim
outputs = torch.zeros(max_len, batch_size, trg_vocab_size).to(self.device)
encoder_outputs, hidden = self.encoder(src)
# first input to the decoder is the <sos> token
output = trg[0,:]
for t in range(1, max_len):
output, hidden = self.decoder(output, hidden, encoder_outputs)
outputs[t] = output
teacher_force = random.random() < teacher_forcing_ratio
top1 = output.max(1)[1]
output = (trg[t] if teacher_force else top1)
return outputs
| 37.163934 | 90 | 0.635862 | 4,334 | 0.955889 | 0 | 0 | 0 | 0 | 0 | 0 | 100 | 0.022056 |
16be469a1debb4ce731178e138eb07a68236018a | 7,907 | py | Python | ML/Pytorch/more_advanced/Seq2Seq/seq2seq.py | xuyannus/Machine-Learning-Collection | 6d5dcd18d4e40f90e77355d56a2902e4c617ecbe | [
"MIT"
] | 3,094 | 2020-09-20T04:34:31.000Z | 2022-03-31T23:59:46.000Z | ML/Pytorch/more_advanced/Seq2Seq/seq2seq.py | xkhainguyen/Machine-Learning-Collection | 425d196e9477dbdbbd7cc0d19d29297571746ab5 | [
"MIT"
] | 79 | 2020-09-24T08:54:17.000Z | 2022-03-30T14:45:08.000Z | ML/Pytorch/more_advanced/Seq2Seq/seq2seq.py | xkhainguyen/Machine-Learning-Collection | 425d196e9477dbdbbd7cc0d19d29297571746ab5 | [
"MIT"
] | 1,529 | 2020-09-20T16:21:21.000Z | 2022-03-31T21:16:25.000Z | import torch
import torch.nn as nn
import torch.optim as optim
from torchtext.datasets import Multi30k
from torchtext.data import Field, BucketIterator
import numpy as np
import spacy
import random
from torch.utils.tensorboard import SummaryWriter # to print to tensorboard
from utils import translate_sentence, bleu, save_checkpoint, load_checkpoint
spacy_ger = spacy.load("de")
spacy_eng = spacy.load("en")
def tokenize_ger(text):
return [tok.text for tok in spacy_ger.tokenizer(text)]
def tokenize_eng(text):
return [tok.text for tok in spacy_eng.tokenizer(text)]
german = Field(tokenize=tokenize_ger, lower=True, init_token="<sos>", eos_token="<eos>")
english = Field(
tokenize=tokenize_eng, lower=True, init_token="<sos>", eos_token="<eos>"
)
train_data, valid_data, test_data = Multi30k.splits(
exts=(".de", ".en"), fields=(german, english)
)
german.build_vocab(train_data, max_size=10000, min_freq=2)
english.build_vocab(train_data, max_size=10000, min_freq=2)
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers, p):
super(Encoder, self).__init__()
self.dropout = nn.Dropout(p)
self.hidden_size = hidden_size
self.num_layers = num_layers
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p)
def forward(self, x):
# x shape: (seq_length, N) where N is batch size
embedding = self.dropout(self.embedding(x))
# embedding shape: (seq_length, N, embedding_size)
outputs, (hidden, cell) = self.rnn(embedding)
# outputs shape: (seq_length, N, hidden_size)
return hidden, cell
class Decoder(nn.Module):
def __init__(
self, input_size, embedding_size, hidden_size, output_size, num_layers, p
):
super(Decoder, self).__init__()
self.dropout = nn.Dropout(p)
self.hidden_size = hidden_size
self.num_layers = num_layers
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p)
self.fc = nn.Linear(hidden_size, output_size)
def forward(self, x, hidden, cell):
# x shape: (N) where N is for batch size, we want it to be (1, N), seq_length
# is 1 here because we are sending in a single word and not a sentence
x = x.unsqueeze(0)
embedding = self.dropout(self.embedding(x))
# embedding shape: (1, N, embedding_size)
outputs, (hidden, cell) = self.rnn(embedding, (hidden, cell))
# outputs shape: (1, N, hidden_size)
predictions = self.fc(outputs)
# predictions shape: (1, N, length_target_vocabulary) to send it to
# loss function we want it to be (N, length_target_vocabulary) so we're
# just gonna remove the first dim
predictions = predictions.squeeze(0)
return predictions, hidden, cell
class Seq2Seq(nn.Module):
def __init__(self, encoder, decoder):
super(Seq2Seq, self).__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, source, target, teacher_force_ratio=0.5):
batch_size = source.shape[1]
target_len = target.shape[0]
target_vocab_size = len(english.vocab)
outputs = torch.zeros(target_len, batch_size, target_vocab_size).to(device)
hidden, cell = self.encoder(source)
# Grab the first input to the Decoder which will be <SOS> token
x = target[0]
for t in range(1, target_len):
# Use previous hidden, cell as context from encoder at start
output, hidden, cell = self.decoder(x, hidden, cell)
# Store next output prediction
outputs[t] = output
# Get the best word the Decoder predicted (index in the vocabulary)
best_guess = output.argmax(1)
# With probability of teacher_force_ratio we take the actual next word
# otherwise we take the word that the Decoder predicted it to be.
# Teacher Forcing is used so that the model gets used to seeing
# similar inputs at training and testing time, if teacher forcing is 1
# then inputs at test time might be completely different than what the
# network is used to. This was a long comment.
x = target[t] if random.random() < teacher_force_ratio else best_guess
return outputs
### We're ready to define everything we need for training our Seq2Seq model ###
# Training hyperparameters
num_epochs = 100
learning_rate = 0.001
batch_size = 64
# Model hyperparameters
load_model = False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
input_size_encoder = len(german.vocab)
input_size_decoder = len(english.vocab)
output_size = len(english.vocab)
encoder_embedding_size = 300
decoder_embedding_size = 300
hidden_size = 1024 # Needs to be the same for both RNN's
num_layers = 2
enc_dropout = 0.5
dec_dropout = 0.5
# Tensorboard to get nice loss plot
writer = SummaryWriter(f"runs/loss_plot")
step = 0
train_iterator, valid_iterator, test_iterator = BucketIterator.splits(
(train_data, valid_data, test_data),
batch_size=batch_size,
sort_within_batch=True,
sort_key=lambda x: len(x.src),
device=device,
)
encoder_net = Encoder(
input_size_encoder, encoder_embedding_size, hidden_size, num_layers, enc_dropout
).to(device)
decoder_net = Decoder(
input_size_decoder,
decoder_embedding_size,
hidden_size,
output_size,
num_layers,
dec_dropout,
).to(device)
model = Seq2Seq(encoder_net, decoder_net).to(device)
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
pad_idx = english.vocab.stoi["<pad>"]
criterion = nn.CrossEntropyLoss(ignore_index=pad_idx)
if load_model:
load_checkpoint(torch.load("my_checkpoint.pth.tar"), model, optimizer)
sentence = "ein boot mit mehreren männern darauf wird von einem großen pferdegespann ans ufer gezogen."
for epoch in range(num_epochs):
print(f"[Epoch {epoch} / {num_epochs}]")
checkpoint = {"state_dict": model.state_dict(), "optimizer": optimizer.state_dict()}
save_checkpoint(checkpoint)
model.eval()
translated_sentence = translate_sentence(
model, sentence, german, english, device, max_length=50
)
print(f"Translated example sentence: \n {translated_sentence}")
model.train()
for batch_idx, batch in enumerate(train_iterator):
# Get input and targets and get to cuda
inp_data = batch.src.to(device)
target = batch.trg.to(device)
# Forward prop
output = model(inp_data, target)
# Output is of shape (trg_len, batch_size, output_dim) but Cross Entropy Loss
# doesn't take input in that form. For example if we have MNIST we want to have
# output to be: (N, 10) and targets just (N). Here we can view it in a similar
# way that we have output_words * batch_size that we want to send in into
# our cost function, so we need to do some reshapin. While we're at it
# Let's also remove the start token while we're at it
output = output[1:].reshape(-1, output.shape[2])
target = target[1:].reshape(-1)
optimizer.zero_grad()
loss = criterion(output, target)
# Back prop
loss.backward()
# Clip to avoid exploding gradient issues, makes sure grads are
# within a healthy range
torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=1)
# Gradient descent step
optimizer.step()
# Plot to tensorboard
writer.add_scalar("Training loss", loss, global_step=step)
step += 1
score = bleu(test_data[1:100], model, german, english, device)
print(f"Bleu score {score*100:.2f}")
| 32.539095 | 103 | 0.682433 | 3,528 | 0.446074 | 0 | 0 | 0 | 0 | 0 | 0 | 2,346 | 0.296624 |
16beddc32cad55aeba19e5840d544ba51efbce38 | 2,533 | py | Python | gail_chatbot/light/sqil/light_sentence_imitate_mixin.py | eublefar/gail_chatbot | fcb7798515c0e2c031b5127803eb8a9f1fd4f0ab | [
"MIT"
] | null | null | null | gail_chatbot/light/sqil/light_sentence_imitate_mixin.py | eublefar/gail_chatbot | fcb7798515c0e2c031b5127803eb8a9f1fd4f0ab | [
"MIT"
] | null | null | null | gail_chatbot/light/sqil/light_sentence_imitate_mixin.py | eublefar/gail_chatbot | fcb7798515c0e2c031b5127803eb8a9f1fd4f0ab | [
"MIT"
] | null | null | null | from typing import Dict, Any, List
import string
from parlai.core.agents import Agent
from parlai.core.message import Message
from random import sample
import pathlib
path = pathlib.Path(__file__).parent.absolute()
class LightImitateMixin(Agent):
"""Abstract class that handles passing expert trajectories alongside self-play sampling
"""
def __init__(self, opt: Dict[str, Any], shared: Dict[str, Any] = None):
self.id = "LightChatbotSelfPlay"
self.train_step = 0
self.self_speaker_token = "<speaker_self>"
self.other_speaker_token = "<speaker_other>"
def act(self):
raise NotImplementedError()
def batch_act(self, observations):
self.train_step += 1
# Add generated histories to data ones
imitate = []
sample = []
for i, observation in enumerate(observations):
sample.extend(
[
(dialog[0], dialog[1][:-1])
for dialog in observation["text"] if len(dialog[1]) > 0
]
)
imitate.extend(
[
dialog
for dialog in observation["text"] if len(dialog[1]) > 0
]
)
self.batch_imitate(imitate)
utterances = self.batch_sample(sample)
if (
self.train_step % self.episode_num_dialog_dump == 0
) and self.train_step != 0:
self.checkpoint([sample, utterances])
return [{"id": self.id} for _ in observations]
def batch_imitate(self, dialogs):
"""Implement sampling utterances and memorization here"""
pass
def batch_sample(self, dialogs) -> List[str]:
"""Implement update here"""
pass
def batch_update(self):
"""Update weights here"""
pass
def _update_histories(self, utterances, other=False):
for i in range(len(utterances)):
history = self.histories[i]
history.append(
(self.self_speaker_token if not other else self.other_speaker_token)
+ utterances[i]
)
self.histories[i] = history
def _convert_history_to_other(self, history):
history = [
turn.replace(self.self_speaker_token, self.other_speaker_token)
if self.self_speaker_token in turn
else turn.replace(self.other_speaker_token, self.self_speaker_token)
for turn in history
]
return history
| 29.8 | 91 | 0.586656 | 2,310 | 0.911962 | 0 | 0 | 0 | 0 | 0 | 0 | 313 | 0.123569 |
16bf36b1dcc9b129dcd361097fbc1ea1ea920674 | 1,654 | py | Python | pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py | TeoZosa/pytudes | 4f01ab20f936bb4b3f42d1946180d4a20fd95fbf | [
"Apache-2.0"
] | 1 | 2022-02-08T09:47:35.000Z | 2022-02-08T09:47:35.000Z | pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py | TeoZosa/pytudes | 4f01ab20f936bb4b3f42d1946180d4a20fd95fbf | [
"Apache-2.0"
] | 62 | 2021-04-02T23:41:16.000Z | 2022-03-25T13:16:10.000Z | pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py | TeoZosa/pytudes | 4f01ab20f936bb4b3f42d1946180d4a20fd95fbf | [
"Apache-2.0"
] | null | null | null | """https://www.educative.io/courses/grokking-the-coding-interview/N7rwVyAZl6D
Categories:
- Binary
- Bit Manipulation
- Blind 75
See Also:
- pytudes/_2021/leetcode/blind_75/linked_list/_141__linked_list_cycle__easy.py
"""
from pytudes._2021.utils.linked_list import (
ListNode,
NodeType,
convert_list_to_linked_list,
)
def has_cycle(head: NodeType) -> bool:
"""
Args:
head: head of a singly-linked list of nodes
Returns:
whether or not the linked list has a cycle
Examples:
>>> has_cycle(None)
False
>>> head = ListNode("self-edge")
>>> head.next = head
>>> has_cycle(head)
True
>>> head = convert_list_to_linked_list([1,2,3,4,5,6])
>>> has_cycle(head)
False
>>> head.next.next.next.next.next.next = head.next.next
>>> has_cycle(head)
True
>>> head.next.next.next.next.next.next = head.next.next.next
>>> has_cycle(head)
True
"""
slow = fast = head
while fast is not None and fast.next is not None: # since fast ≥ slow
slow = slow.next
fast = fast.next.next
if slow == fast:
return True # found the cycle
else:
return False
def main():
head = convert_list_to_linked_list([1, 2, 3, 4, 5, 6])
print("LinkedList has cycle: " + str(has_cycle(head)))
head.next.next.next.next.next.next = head.next.next
print("LinkedList has cycle: " + str(has_cycle(head)))
head.next.next.next.next.next.next = head.next.next.next
print("LinkedList has cycle: " + str(has_cycle(head)))
main()
| 23.971014 | 82 | 0.605804 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 977 | 0.589976 |
16bf4f8f27c28015e220b292e189af4ce08ed99c | 4,417 | py | Python | httpd.py | whtt8888/TritonHTTPserver | 99adf3f1e6c3867bb870cda8434605c59409ea19 | [
"MIT"
] | 2 | 2019-04-07T06:11:56.000Z | 2019-10-14T05:08:16.000Z | httpd.py | whtt8888/TritonHTTPserver | 99adf3f1e6c3867bb870cda8434605c59409ea19 | [
"MIT"
] | null | null | null | httpd.py | whtt8888/TritonHTTPserver | 99adf3f1e6c3867bb870cda8434605c59409ea19 | [
"MIT"
] | null | null | null | import sys
import os
import socket
import time
import threading
class MyServer:
def __init__(self, port, doc_root):
self.port = port
self.doc_root = doc_root
self.host = '127.0.0.1'
self.res_200 = "HTTP/1.1 200 OK\r\nServer: Myserver 1.0\r\n"
self.res_404 = "HTTP/1.1 404 NOT FOUND\r\nServer: Myserver 1.0\r\n\r\n"
self.res_400 = "HTTP/1.1 400 Client Error\r\nServer: Myserver 1.0\r\n\r\n"
self.res_close = "HTTP/1.1 Connection:close\r\nServer: Myserver 1.0\r\n\r\n"
# map request into dict
def req_info(self, request):
# 400 malform
if request[-4:] != '\r\n\r\n':
info = {'url': '400malform'}
return info
headers = request.splitlines()
firstline = headers.pop(0)
try:
(act, url, version) = firstline.split()
except ValueError:
info = {'url': '400malform'}
return info
info = {'act': act, 'url': url, 'version': version}
for h in headers:
h = h.split(': ')
if len(h) < 2:
continue
field = h[0]
value = h[1]
info[field] = value
# mapping url, return 404 escape or absolute filename
# judge whether escape
path = ''
x = url.split('/')
i = 0
while i < len(x):
if '' in x:
x.remove('')
if i < 0 or x[0] == '..' or len(x) == 0: # path escape from file root
info['url'] = '404escape'
return info
if i < len(x) and x[i] == '..':
x.remove(x[i])
x.remove(x[i - 1])
i -= 1
else:
i += 1
# map index.html
if len(x[-1].split('.')) < 2:
x.append('index.html')
for d in range(len(x)):
path = path + '/' + x[d]
info['url'] = os.path.realpath(self.doc_root + path)
return info
# generate response
def res_gen(self, reqinfo):
path = reqinfo['url']
# 404 escape
if path == '404escape':
return self.res_404
# 400 malform req
if path == "400malform":
return self.res_400
try:
reqinfo['Host'] and reqinfo['User-Agent']
except KeyError:
return self.res_400
# 404 not found
if not os.path.isfile(path):
return self.res_404
# a valid 200 req
else:
res = self.res_200
res += "Last-Modified: {}\r\n".format(time.ctime(os.stat(path).st_mtime))
with open(path, "rb") as f:
data = f.read()
res += "Content-Length: {}\r\n".format(len(data))
if path.split('.')[-1] == 'html':
res += 'Content-Type: text/html\r\n\r\n'
res = res + str(data, 'utf-8')
else: # for jpg and png
if path.split('.')[-1] == 'png':
res += 'Content-Type: image/png\r\n\r\n'
else:
res += 'Content-Type: image/jpeg\r\n\r\n'
res = res + str(data)
return res
def createsocket(conn, addr):
with conn:
try:
conn.settimeout(5)
except socket.timeout:
conn.close()
# print('closed')
# print('Connected by', addr)
while True:
req = conn.recv(1024).decode()
if not req:
break
info = server.req_info(req)
msg = server.res_gen(info).encode()
conn.sendall(msg)
# print("msg send finished")
# msg = server.res_close.encode()
# conn.sendall(msg)
break
if __name__ == '__main__':
input_port = int(sys.argv[1])
input_doc_root = sys.argv[2]
server = MyServer(input_port, input_doc_root)
# Add code to start your server here
threads = []
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((server.host, server.port))
s.listen()
while True:
conn, addr = s.accept()
t = threading.Thread(target=createsocket(conn, addr), args=(conn, addr))
t.start()
threads.append(t)
for t in threads:
t.join()
| 31.105634 | 85 | 0.479511 | 3,189 | 0.721983 | 0 | 0 | 0 | 0 | 0 | 0 | 998 | 0.225945 |
16c04b9a7be2241d66d21a6886d268026e2fdc89 | 258 | py | Python | metric/metric.py | riven314/ENetDepth_TimeAnlysis_Tmp | 29bd864adf91700799d87b449d0c4e389f7028bc | [
"MIT"
] | null | null | null | metric/metric.py | riven314/ENetDepth_TimeAnlysis_Tmp | 29bd864adf91700799d87b449d0c4e389f7028bc | [
"MIT"
] | null | null | null | metric/metric.py | riven314/ENetDepth_TimeAnlysis_Tmp | 29bd864adf91700799d87b449d0c4e389f7028bc | [
"MIT"
] | null | null | null | class Metric(object):
"""Base class for all metrics.
From: https://github.com/pytorch/tnt/blob/master/torchnet/meter/meter.py
"""
def reset(self):
pass
def add(self):
pass
def value(self):
pass
| 18.428571 | 77 | 0.554264 | 256 | 0.992248 | 0 | 0 | 0 | 0 | 0 | 0 | 119 | 0.46124 |
16c081effc971dd24b22b938117db5e30575dfca | 1,179 | py | Python | pf_pweb_sourceman/task/git_repo_man.py | problemfighter/pf-pweb-sourceman | 827b1d92ac992ec1495b128e99137aab1cfa09a0 | [
"Apache-2.0"
] | null | null | null | pf_pweb_sourceman/task/git_repo_man.py | problemfighter/pf-pweb-sourceman | 827b1d92ac992ec1495b128e99137aab1cfa09a0 | [
"Apache-2.0"
] | null | null | null | pf_pweb_sourceman/task/git_repo_man.py | problemfighter/pf-pweb-sourceman | 827b1d92ac992ec1495b128e99137aab1cfa09a0 | [
"Apache-2.0"
] | null | null | null | from git import Repo
from pf_pweb_sourceman.common.console import console
from pf_py_file.pfpf_file_util import PFPFFileUtil
class GitRepoMan:
def get_repo_name_from_url(self, url: str):
if not url:
return None
last_slash_index = url.rfind("/")
last_suffix_index = url.rfind(".git")
if last_suffix_index < 0:
last_suffix_index = len(url)
if last_slash_index < 0 or last_suffix_index <= last_slash_index:
raise Exception("Invalid repo url {}".format(url))
return url[last_slash_index + 1:last_suffix_index]
def clone_or_pull_project(self, path, url, branch):
repo_name = self.get_repo_name_from_url(url)
if not repo_name:
raise Exception("Invalid repo")
if not PFPFFileUtil.is_exist(path):
console.success("Cloning project: " + repo_name + ", Branch: " + branch)
Repo.clone_from(url, branch=branch, to_path=path)
else:
console.success(repo_name + " Taking pull...")
repo = Repo(path)
repo.git.checkout(branch)
origin = repo.remotes.origin
origin.pull()
| 33.685714 | 84 | 0.63274 | 1,051 | 0.891433 | 0 | 0 | 0 | 0 | 0 | 0 | 92 | 0.078032 |
16c22952eef284ef2bbd4cfa4e2bbaa9380b0ceb | 2,969 | py | Python | tool/remote_info.py | shanmukmichael/Asset-Discovery-Tool | 82c3f2f5cecb394a1ad87b2e504fbef219a466fd | [
"MIT"
] | null | null | null | tool/remote_info.py | shanmukmichael/Asset-Discovery-Tool | 82c3f2f5cecb394a1ad87b2e504fbef219a466fd | [
"MIT"
] | null | null | null | tool/remote_info.py | shanmukmichael/Asset-Discovery-Tool | 82c3f2f5cecb394a1ad87b2e504fbef219a466fd | [
"MIT"
] | null | null | null | import socket
import paramiko
import json
Hostname = '34.224.2.243'
Username = 'ec2-user'
key = 'G:/Projects/Python/Asset-Discovery-Tool/tool/s.pem'
def is_connected():
try:
# connect to the host -- tells us if the host is actually
# reachable
socket.create_connection(("8.8.8.8", 53))
return "conneted to the Internet!"
except OSError:
pass
return "Please Connect to the Internet!"
is_connected()
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=Hostname, username=Username, key_filename=key)
except paramiko.AuthenticationException:
print("Failed to connect to {} due to wrong username/password".format(Hostname))
exit(1)
except:
print("Failed to connect to {} ".format(Hostname))
exit(2)
# commands
_, stdout_1, _ = ssh.exec_command("hostname")
_, stdout_2, _ = ssh.exec_command("hostname -I | awk '{print $1}'")
_, stdout_3, _ = ssh.exec_command("cat /sys/class/net/eth0/address")
_, stdout_4, _ = ssh.exec_command(
"awk -F= '$1=={} {{ print $2 ;}}' /etc/os-release".format('"NAME"'))
_, stdout_5, _ = ssh.exec_command("whoami")
_, stdout_6, _ = ssh.exec_command("last -F")
_, stdout_7, _ = ssh.exec_command("netstat -tnpa | grep 'ESTABLISHED.*sshd'")
#_, stdout_8, _ = ssh.exec_command("sudo {}/24".format())
# egrep -o '([0-9]{1,3}\.){3}[0-9]{1,3}' --IP-address
# ---------------------------------
def remote_data_1():
output_1 = stdout_1.readlines()
output_2 = stdout_2.readlines()
output_3 = stdout_3.readlines()
output_4 = stdout_4.readlines()
output_5 = stdout_5.readlines()
remote_data_1 = {
'Hostname': '',
'IP': '',
'MAC': '',
'OS': '',
'Currentuser': '',
}
remote_data_1['Hostname'] = output_1[0].strip('\n')
remote_data_1['IP'] = output_2[0].strip('\n')
remote_data_1['MAC'] = output_3[0].strip('\n')
remote_data_1['OS'] = output_4[0][1:-1].strip('\"')
remote_data_1['Currentuser'] = output_5[0].strip('\n')
return json.dumps(remote_data_1, indent=4)
# ----------------------------------
def remote_data_2_():
output = stdout_6.readlines()
data_ = []
filter_ = []
remote_data_2 = {
'Hostname': [],
'IP': [],
'MAC': [],
'Lastseen': [],
'Status': [],
}
for i in output:
data_.append(i.split(' '))
for i in data_:
filter_.append(list(filter(None, i)))
for i in range(len(filter_)-3):
remote_data_2['Hostname'].append(filter_[i][0])
remote_data_2['IP'].append(filter_[i][2])
remote_data_2['MAC'].append('not found')
remote_data_2['Lastseen'].append(' '.join(filter_[i][3:8]))
if 'logged' in filter_[i][9]:
remote_data_2['Status'].append('Active')
else:
remote_data_2['Status'].append('Inactive')
# ssh.close()
return remote_data_2
| 29.39604 | 84 | 0.594139 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 919 | 0.309532 |
16c3880f871252c2ad2ebcf1bd3aca25678856cb | 16,099 | py | Python | hvac/api/secrets_engines/kv_v2.py | Famoco/hvac | cdc1854385dd981de38bcb6350f222a52bcf3923 | [
"Apache-2.0"
] | null | null | null | hvac/api/secrets_engines/kv_v2.py | Famoco/hvac | cdc1854385dd981de38bcb6350f222a52bcf3923 | [
"Apache-2.0"
] | null | null | null | hvac/api/secrets_engines/kv_v2.py | Famoco/hvac | cdc1854385dd981de38bcb6350f222a52bcf3923 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""KvV2 methods module."""
from hvac import exceptions, utils
from hvac.api.vault_api_base import VaultApiBase
DEFAULT_MOUNT_POINT = 'secret'
class KvV2(VaultApiBase):
"""KV Secrets Engine - Version 2 (API).
Reference: https://www.vaultproject.io/api/secret/kv/kv-v2.html
"""
def configure(self, max_versions=10, cas_required=None, mount_point=DEFAULT_MOUNT_POINT):
"""Configure backend level settings that are applied to every key in the key-value store.
Supported methods:
POST: /{mount_point}/config. Produces: 204 (empty body)
:param max_versions: The number of versions to keep per key. This value applies to all keys, but a key's
metadata setting can overwrite this value. Once a key has more than the configured allowed versions the
oldest version will be permanently deleted. Defaults to 10.
:type max_versions: int
:param cas_required: If true all keys will require the cas parameter to be set on all write requests.
:type cas_required: bool
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
params = {
'max_versions': max_versions,
}
if cas_required is not None:
params['cas_required'] = cas_required
api_path = utils.format_url('/v1/{mount_point}/config', mount_point=mount_point)
return self._adapter.post(
url=api_path,
json=params,
)
def read_configuration(self, mount_point=DEFAULT_MOUNT_POINT):
"""Read the KV Version 2 configuration.
Supported methods:
GET: /auth/{mount_point}/config. Produces: 200 application/json
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url(
'/v1/{mount_point}/config',
mount_point=mount_point,
)
response = self._adapter.get(url=api_path)
return response.json()
def read_secret_version(self, path, version=None, mount_point=DEFAULT_MOUNT_POINT):
"""Retrieve the secret at the specified location.
Supported methods:
GET: /{mount_point}/data/{path}. Produces: 200 application/json
:param path: Specifies the path of the secret to read. This is specified as part of the URL.
:type path: str | unicode
:param version: Specifies the version to return. If not set the latest version is returned.
:type version: int
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
params = {}
if version is not None:
params['version'] = version
api_path = utils.format_url('/v1/{mount_point}/data/{path}', mount_point=mount_point, path=path)
response = self._adapter.get(
url=api_path,
params=params,
)
return response.json()
def create_or_update_secret(self, path, secret, cas=None, mount_point=DEFAULT_MOUNT_POINT):
"""Create a new version of a secret at the specified location.
If the value does not yet exist, the calling token must have an ACL policy granting the create capability. If
the value already exists, the calling token must have an ACL policy granting the update capability.
Supported methods:
POST: /{mount_point}/data/{path}. Produces: 200 application/json
:param path: Path
:type path: str | unicode
:param cas: Set the "cas" value to use a Check-And-Set operation. If not set the write will be allowed. If set
to 0 a write will only be allowed if the key doesn't exist. If the index is non-zero the write will only be
allowed if the key's current version matches the version specified in the cas parameter.
:type cas: int
:param secret: The contents of the "secret" dict will be stored and returned on read.
:type secret: dict
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
params = {
'options': {},
'data': secret,
}
if cas is not None:
params['options']['cas'] = cas
api_path = utils.format_url('/v1/{mount_point}/data/{path}', mount_point=mount_point, path=path)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def patch(self, path, secret, mount_point=DEFAULT_MOUNT_POINT):
"""Set or update data in the KV store without overwriting.
:param path: Path
:type path: str | unicode
:param secret: The contents of the "secret" dict will be stored and returned on read.
:type secret: dict
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the create_or_update_secret request.
:rtype: dict
"""
# First, do a read.
try:
current_secret_version = self.read_secret_version(
path=path,
mount_point=mount_point,
)
except exceptions.InvalidPath:
raise exceptions.InvalidPath('No value found at "{path}"; patch only works on existing data.'.format(path=path))
# Update existing secret dict.
patched_secret = current_secret_version['data']['data']
patched_secret.update(secret)
# Write back updated secret.
return self.create_or_update_secret(
path=path,
cas=current_secret_version['data']['metadata']['version'],
secret=patched_secret,
mount_point=mount_point,
)
def delete_latest_version_of_secret(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Issue a soft delete of the secret's latest version at the specified location.
This marks the version as deleted and will stop it from being returned from reads, but the underlying data will
not be removed. A delete can be undone using the undelete path.
Supported methods:
DELETE: /{mount_point}/data/{path}. Produces: 204 (empty body)
:param path: Specifies the path of the secret to delete. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url('/v1/{mount_point}/data/{path}', mount_point=mount_point, path=path)
return self._adapter.delete(
url=api_path,
)
def delete_secret_versions(self, path, versions, mount_point=DEFAULT_MOUNT_POINT):
"""Issue a soft delete of the specified versions of the secret.
This marks the versions as deleted and will stop them from being returned from reads,
but the underlying data will not be removed. A delete can be undone using the
undelete path.
Supported methods:
POST: /{mount_point}/delete/{path}. Produces: 204 (empty body)
:param path: Specifies the path of the secret to delete. This is specified as part of the URL.
:type path: str | unicode
:param versions: The versions to be deleted. The versioned data will not be deleted, but it will no longer be
returned in normal get requests.
:type versions: int
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if not isinstance(versions, list) or len(versions) == 0:
error_msg = 'argument to "versions" must be a list containing one or more integers, "{versions}" provided.'.format(
versions=versions
)
raise exceptions.ParamValidationError(error_msg)
params = {
'versions': versions,
}
api_path = utils.format_url('/v1/{mount_point}/delete/{path}', mount_point=mount_point, path=path)
return self._adapter.post(
url=api_path,
json=params,
)
def undelete_secret_versions(self, path, versions, mount_point=DEFAULT_MOUNT_POINT):
"""Undelete the data for the provided version and path in the key-value store.
This restores the data, allowing it to be returned on get requests.
Supported methods:
POST: /{mount_point}/undelete/{path}. Produces: 204 (empty body)
:param path: Specifies the path of the secret to undelete. This is specified as part of the URL.
:type path: str | unicode
:param versions: The versions to undelete. The versions will be restored and their data will be returned on
normal get requests.
:type versions: list of int
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if not isinstance(versions, list) or len(versions) == 0:
error_msg = 'argument to "versions" must be a list containing one or more integers, "{versions}" provided.'.format(
versions=versions
)
raise exceptions.ParamValidationError(error_msg)
params = {
'versions': versions,
}
api_path = utils.format_url('/v1/{mount_point}/undelete/{path}', mount_point=mount_point, path=path)
return self._adapter.post(
url=api_path,
json=params,
)
def destroy_secret_versions(self, path, versions, mount_point=DEFAULT_MOUNT_POINT):
"""Permanently remove the specified version data and numbers for the provided path from the key-value store.
Supported methods:
POST: /{mount_point}/destroy/{path}. Produces: 204 (empty body)
:param path: Specifies the path of the secret to destroy.
This is specified as part of the URL.
:type path: str | unicode
:param versions: The versions to destroy. Their data will be
permanently deleted.
:type versions: list of int
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if not isinstance(versions, list) or len(versions) == 0:
error_msg = 'argument to "versions" must be a list containing one or more integers, "{versions}" provided.'.format(
versions=versions
)
raise exceptions.ParamValidationError(error_msg)
params = {
'versions': versions,
}
api_path = utils.format_url('/v1/{mount_point}/destroy/{path}', mount_point=mount_point, path=path)
return self._adapter.post(
url=api_path,
json=params,
)
def list_secrets(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Return a list of key names at the specified location.
Folders are suffixed with /. The input must be a folder; list on a file will not return a value. Note that no
policy-based filtering is performed on keys; do not encode sensitive information in key names. The values
themselves are not accessible via this command.
Supported methods:
LIST: /{mount_point}/metadata/{path}. Produces: 200 application/json
:param path: Specifies the path of the secrets to list. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url('/v1/{mount_point}/metadata/{path}', mount_point=mount_point, path=path)
response = self._adapter.list(
url=api_path,
)
return response.json()
def read_secret_metadata(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Retrieve the metadata and versions for the secret at the specified path.
Supported methods:
GET: /{mount_point}/metadata/{path}. Produces: 200 application/json
:param path: Specifies the path of the secret to read. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url('/v1/{mount_point}/metadata/{path}', mount_point=mount_point, path=path)
response = self._adapter.get(
url=api_path,
)
return response.json()
def update_metadata(self, path, max_versions=None, cas_required=None, mount_point=DEFAULT_MOUNT_POINT):
"""Updates the max_versions of cas_required setting on an existing path.
Supported methods:
POST: /{mount_point}/metadata/{path}. Produces: 204 (empty body)
:param path: Path
:type path: str | unicode
:param max_versions: The number of versions to keep per key. If not set, the backend's configured max version is
used. Once a key has more than the configured allowed versions the oldest version will be permanently
deleted.
:type max_versions: int
:param cas_required: If true the key will require the cas parameter to be set on all write requests. If false,
the backend's configuration will be used.
:type cas_required: bool
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
params = {}
if max_versions is not None:
params['max_versions'] = max_versions
if cas_required is not None:
if not isinstance(cas_required, bool):
error_msg = 'bool expected for cas_required param, {type} received'.format(type=type(cas_required))
raise exceptions.ParamValidationError(error_msg)
params['cas_required'] = cas_required
api_path = utils.format_url('/v1/{mount_point}/metadata/{path}', mount_point=mount_point, path=path)
return self._adapter.post(
url=api_path,
json=params,
)
def delete_metadata_and_all_versions(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Delete (permanently) the key metadata and all version data for the specified key.
All version history will be removed.
Supported methods:
DELETE: /{mount_point}/metadata/{path}. Produces: 204 (empty body)
:param path: Specifies the path of the secret to delete. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url('/v1/{mount_point}/metadata/{path}', mount_point=mount_point, path=path)
return self._adapter.delete(
url=api_path,
)
| 42.254593 | 127 | 0.638611 | 15,907 | 0.988074 | 0 | 0 | 0 | 0 | 0 | 0 | 10,605 | 0.658737 |
16c3b1e6ee4edc3e7c6e66622f8ee4afa8a44dad | 512 | py | Python | android/install-all.py | SaschaWillems/vulkan_slim | 642bcf1eaba8bbcb94a8bec61f3454c597af72f9 | [
"MIT"
] | 28 | 2017-09-04T18:54:49.000Z | 2021-09-18T11:52:04.000Z | android/install-all.py | 0xm1nam0/Vulkan | ea726e617f71f5ff5c1503bca134b2a7ad17a1a7 | [
"MIT"
] | null | null | null | android/install-all.py | 0xm1nam0/Vulkan | ea726e617f71f5ff5c1503bca134b2a7ad17a1a7 | [
"MIT"
] | 1 | 2018-07-20T06:51:08.000Z | 2018-07-20T06:51:08.000Z | # Install all examples to connected device(s)
import subprocess
import sys
answer = input("Install all vulkan examples to attached device, this may take some time! (Y/N)").lower() == 'y'
if answer:
BUILD_ARGUMENTS = ""
for arg in sys.argv[1:]:
if arg == "-validation":
BUILD_ARGUMENTS += "-validation"
if subprocess.call(("python build-all.py -deploy %s" % BUILD_ARGUMENTS).split(' ')) != 0:
print("Error: Not all examples may have been installed!")
sys.exit(-1)
| 36.571429 | 111 | 0.644531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.470703 |
16c4d3d9ff39c41395ea4a9779719c084f2fc55a | 1,726 | py | Python | main.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | main.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | main.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | from generators.ahoughton import AhoughtonGenerator
from render_config import RendererConfig
from problem_renderer import ProblemRenderer
from moonboard import get_moonboard
from adapters.default import DefaultProblemAdapter
from adapters.crg import CRGProblemAdapter
from adapters.ahoughton import AhoughtonAdapter
import json
def main():
# Create Renderer
config = RendererConfig()
renderer = ProblemRenderer(
get_moonboard(2017),
DefaultProblemAdapter(),
config
)
crg_renderer = ProblemRenderer(
get_moonboard(2017),
CRGProblemAdapter(),
config
)
ahoughton_renderer_2016 = ProblemRenderer(
get_moonboard(2016),
AhoughtonAdapter(),
config
)
ahoughton_generator_2016 = AhoughtonGenerator(year=2016, driver_path='C:/.selenium_drivers/chromedriver.exe')
ahoughton_renderer_2017 = ProblemRenderer(
get_moonboard(2017),
AhoughtonAdapter(),
config
)
ahoughton_generator_2017 = AhoughtonGenerator(year=2017, driver_path='C:/.selenium_drivers/chromedriver.exe')
# Load data
with open('data/problems.json', 'r') as f:
problems = json.load(f)
renderer.render_problem(problems['339318'], with_info=True)
with open('data/crg.json', 'r') as f:
crg_problems = json.load(f)
crg_renderer.render_problem(crg_problems['1'])
# Ahoughton generator and adapter test
# 2016
problem = ahoughton_generator_2016.generate()
ahoughton_renderer_2016.render_problem(problem)
# 2017
problem = ahoughton_generator_2017.generate()
ahoughton_renderer_2017.render_problem(problem)
if __name__ == "__main__":
main()
| 30.280702 | 113 | 0.707995 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 218 | 0.126304 |
16c4fdb052f6373448ef88971819f508813eb2d7 | 5,228 | py | Python | GearBot/Util/Pages.py | JohnyTheCarrot/GearBot | 8a32bfc79f997a154c9abccbf6742a79fc5257b0 | [
"MIT"
] | null | null | null | GearBot/Util/Pages.py | JohnyTheCarrot/GearBot | 8a32bfc79f997a154c9abccbf6742a79fc5257b0 | [
"MIT"
] | null | null | null | GearBot/Util/Pages.py | JohnyTheCarrot/GearBot | 8a32bfc79f997a154c9abccbf6742a79fc5257b0 | [
"MIT"
] | null | null | null | import discord
from Util import Utils, Emoji, Translator
page_handlers = dict()
known_messages = dict()
def on_ready(bot):
load_from_disc()
def register(type, init, update, sender_only=False):
page_handlers[type] = {
"init": init,
"update": update,
"sender_only": sender_only
}
def unregister(type_handler):
if type_handler in page_handlers.keys():
del page_handlers[type_handler]
async def create_new(type, ctx, **kwargs):
text, embed, has_pages, emoji = await page_handlers[type]["init"](ctx, **kwargs)
message: discord.Message = await ctx.channel.send(text, embed=embed)
if has_pages or len(emoji) > 0:
data = {
"type": type,
"page": 0,
"trigger": ctx.message.id,
"sender": ctx.author.id
}
for k, v in kwargs.items():
data[k] = v
known_messages[str(message.id)] = data
try:
if has_pages: await message.add_reaction(Emoji.get_emoji('LEFT'))
for e in emoji: await message.add_reaction(e)
if has_pages: await message.add_reaction(Emoji.get_emoji('RIGHT'))
except discord.Forbidden:
await ctx.send(
f"{Emoji.get_chat_emoji('WARNING')} {Translator.translate('paginator_missing_perms', ctx, prev=Emoji.get_chat_emoji('LEFT'), next=Emoji.get_chat_emoji('RIGHT'))} {Emoji.get_chat_emoji('WARNING')}")
if len(known_messages.keys()) > 500:
del known_messages[list(known_messages.keys())[0]]
save_to_disc()
async def update(bot, message, action, user):
message_id = str(message.id)
if message_id in known_messages.keys():
type = known_messages[message_id]["type"]
if type in page_handlers.keys():
data = known_messages[message_id]
if data["sender"] == user or page_handlers[type]["sender_only"] is False:
page_num = data["page"]
try:
trigger_message = await message.channel.get_message(data["trigger"])
except discord.NotFound:
trigger_message = None
ctx = await bot.get_context(trigger_message) if trigger_message is not None else None
text, embed, page = await page_handlers[type]["update"](ctx, message, page_num, action, data)
await message.edit(content=text, embed=embed)
known_messages[message_id]["page"] = page
save_to_disc()
return True
return False
def basic_pages(pages, page_num, action):
if action == "PREV":
page_num -= 1
elif action == "NEXT":
page_num += 1
if page_num < 0:
page_num = len(pages) - 1
if page_num >= len(pages):
page_num = 0
page = pages[page_num]
return page, page_num
def paginate(input, max_lines=20, max_chars=1900, prefix="", suffix=""):
max_chars -= len(prefix) + len(suffix)
lines = str(input).splitlines(keepends=True)
pages = []
page = ""
count = 0
for line in lines:
if len(page) + len(line) > max_chars or count == max_lines:
if page == "":
# single 2k line, split smaller
words = line.split(" ")
for word in words:
if len(page) + len(word) > max_chars:
pages.append(f"{prefix}{page}{suffix}")
page = f"{word} "
else:
page += f"{word} "
else:
pages.append(f"{prefix}{page}{suffix}")
page = line
count = 1
else:
page += line
count += 1
pages.append(f"{prefix}{page}{suffix}")
return pages
def paginate_fields(input):
pages = []
for page in input:
page_fields = dict()
for name, content in page.items():
page_fields[name] = paginate(content, max_chars=1024)
pages.append(page_fields)
real_pages = []
for page in pages:
page_count = 0
page_fields = dict()
for name, parts in page.items():
base_name = name
if len(parts) is 1:
if page_count + len(name) + len(parts[0]) > 4000:
real_pages.append(page_fields)
page_fields = dict()
page_count = 0
page_fields[name] = parts[0]
page_count += len(name) + len(parts[0])
else:
for i in range(len(parts)):
part = parts[i]
name = f"{base_name} ({i+1}/{len(parts)})"
if page_count + len(name) + len(part) > 3000:
real_pages.append(page_fields)
page_fields = dict()
page_count = 0
page_fields[name] = part
page_count += len(name) + len(part)
real_pages.append(page_fields)
return real_pages
def save_to_disc():
Utils.saveToDisk("known_messages", known_messages)
def load_from_disc():
global known_messages
known_messages = Utils.fetch_from_disk("known_messages")
| 33.299363 | 213 | 0.55394 | 0 | 0 | 0 | 0 | 0 | 0 | 2,106 | 0.402831 | 543 | 0.103864 |
16c7d2d61e641808d594577e77047ea93c4d6c86 | 8,007 | py | Python | software/Opal/spud/diamond/build/lib.linux-x86_64-2.7/diamond/dialogs.py | msc-acse/acse-9-independent-research-project-Wade003 | cfcba990d52ccf535171cf54c0a91b184db6f276 | [
"MIT"
] | 2 | 2020-05-11T02:39:46.000Z | 2020-05-11T03:08:38.000Z | software/multifluids_icferst/libspud/diamond/build/lib.linux-x86_64-2.7/diamond/dialogs.py | msc-acse/acse-9-independent-research-project-Wade003 | cfcba990d52ccf535171cf54c0a91b184db6f276 | [
"MIT"
] | null | null | null | software/multifluids_icferst/libspud/diamond/build/lib.linux-x86_64-2.7/diamond/dialogs.py | msc-acse/acse-9-independent-research-project-Wade003 | cfcba990d52ccf535171cf54c0a91b184db6f276 | [
"MIT"
] | 2 | 2020-05-21T22:50:19.000Z | 2020-10-28T17:16:31.000Z | #!/usr/bin/env python
# This file is part of Diamond.
#
# Diamond is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Diamond is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Diamond. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import traceback
import gtk
import pygtkconsole
def prompt(parent, message, type = gtk.MESSAGE_QUESTION, has_cancel = False):
"""
Display a simple Yes / No dialog. Returns one of gtk.RESPONSE_{YES,NO,CANCEL}.
"""
prompt_dialog = gtk.MessageDialog(parent, 0, type, gtk.BUTTONS_NONE, message)
prompt_dialog.add_buttons(gtk.STOCK_YES, gtk.RESPONSE_YES, gtk.STOCK_NO, gtk.RESPONSE_NO)
if has_cancel:
prompt_dialog.add_buttons(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
prompt_dialog.connect("response", prompt_response)
prompt_dialog.run()
return prompt_response.response
def long_message(parent, message):
"""
Display a message prompt, with the message contained within a scrolled window.
"""
message_dialog = gtk.Dialog(parent = parent, buttons = (gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
message_dialog.set_default_size(400, 300)
message_dialog.connect("response", close_dialog)
scrolled_window = gtk.ScrolledWindow()
message_dialog.vbox.add(scrolled_window)
scrolled_window.show()
scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
text_view = gtk.TextView()
scrolled_window.add(text_view)
text_view.show()
text_view.get_buffer().set_text(message)
text_view.set_cursor_visible(False)
text_view.set_property("editable", False)
text_view.set_property("height-request", 180)
text_view.set_property("width-request", 240)
message_dialog.run()
return
def error(parent, message):
"""
Display an error message.
"""
error_dialog = gtk.MessageDialog(parent, 0, gtk.MESSAGE_WARNING, gtk.BUTTONS_OK, message)
error_dialog.connect("response", close_dialog)
error_dialog.run()
return
def error_tb(parent, message):
"""
Display an error message, together with the last traceback.
"""
tb = traceback.format_exception(sys.exc_info()[0] ,sys.exc_info()[1], sys.exc_info()[2])
tb_msg = ""
for tbline in tb:
tb_msg += tbline
long_message(parent, tb_msg + "\n" + message)
return
def get_filename(title, action, filter_names_and_patterns = {}, folder_uri = None):
"""
Utility function to get a filename.
"""
if action == gtk.FILE_CHOOSER_ACTION_SAVE:
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)
elif action == gtk.FILE_CHOOSER_ACTION_CREATE_FOLDER:
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_NEW,gtk.RESPONSE_OK)
else:
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)
filew = gtk.FileChooserDialog(title=title, action=action, buttons=buttons)
filew.set_default_response(gtk.RESPONSE_OK)
if not folder_uri is None:
filew.set_current_folder_uri("file://" + os.path.abspath(folder_uri))
for filtername in filter_names_and_patterns:
filter = gtk.FileFilter()
filter.set_name(filtername)
filter.add_pattern(filter_names_and_patterns[filtername])
filew.add_filter(filter)
allfilter = gtk.FileFilter()
allfilter.set_name("All known files")
for filtername in filter_names_and_patterns:
allfilter.add_pattern(filter_names_and_patterns[filtername])
filew.add_filter(allfilter)
filter = gtk.FileFilter()
filter.set_name("All files")
filter.add_pattern("*")
filew.add_filter(filter)
result = filew.run()
if result == gtk.RESPONSE_OK:
filename = filew.get_filename()
filtername = filew.get_filter().get_name()
filew.destroy()
return filename
else:
filew.destroy()
return None
def console(parent, locals = None):
"""
Launch a python console.
"""
console_dialog = gtk.Dialog(parent = parent, buttons = (gtk.STOCK_QUIT, gtk.RESPONSE_ACCEPT))
console_dialog.set_default_size(400, 300)
console_dialog.connect("response", close_dialog)
stdout = sys.stdout
stderr = sys.stderr
console_widget = pygtkconsole.GTKInterpreterConsole(locals)
console_dialog.vbox.add(console_widget)
console_widget.show()
console_dialog.run()
sys.stdout = stdout
sys.stderr = stderr
return
def prompt_response(dialog, response_id):
"""
Signal handler for dialog response signals. Stores the dialog response in the
function namespace, to allow response return in other functions.
"""
if response_id == gtk.RESPONSE_DELETE_EVENT:
response_id = gtk.RESPONSE_CANCEL
prompt_response.response = response_id
close_dialog(dialog, response_id)
return
def close_dialog(dialog, response_id = None):
"""
Signal handler for dialog reponse or destroy signals. Closes the dialog.
"""
dialog.destroy()
return
def radio_dialog(title, message, choices, logo):
r = RadioDialog(title, message, choices, logo)
return r.data
def message_box(window, title, message):
dialog = gtk.MessageDialog(window, 0, gtk.MESSAGE_INFO, gtk.BUTTONS_OK, message)
dialog.set_title(title)
dialog.connect("response", close_dialog)
dialog.run()
class RadioDialog:
def __init__(self, title, message, choices, logo):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.connect("delete_event", self.cleanup)
self.window.connect("key_press_event", self.key_press)
self.window.set_title(title)
self.window.set_position(gtk.WIN_POS_CENTER)
if not logo is None:
self.window.set_icon_from_file(logo)
self.window.show()
#swindow = gtk.ScrolledWindow()
#swindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
#self.window.add(swindow)
#swindow.show()
main_box = gtk.VBox(False, 0)
self.window.add(main_box)
main_box.show()
if not logo is None:
image = gtk.Image()
image.set_from_file(logo)
main_box.pack_start(image, True, True, 0)
image.show()
label = gtk.Label(message)
main_box.add(label)
label.show()
radio_box = gtk.VBox(False, 10)
main_box.pack_start(radio_box, True, True, 0)
radio_box.show()
separator = gtk.HSeparator()
main_box.pack_start(separator, False, True, 0)
separator.show()
close = gtk.Button(stock=gtk.STOCK_OK)
close.connect("clicked", self.cleanup)
main_box.pack_start(close, False, False, 0)
close.show()
prev_radio = None
for choice in choices:
radio = gtk.RadioButton(prev_radio, choice)
radio.connect("toggled", self.radio_callback, choice)
radio_box.pack_start(radio, False, False, 0)
radio.show()
if prev_radio is None:
radio.set_active(True)
prev_radio = radio
self.data = choices[0]
gtk.main()
def cleanup(self, widget, data=None):
self.window.destroy()
gtk.main_quit()
def key_press(self, widget, event):
if event.keyval == gtk.keysyms.Return:
self.cleanup(None)
def radio_callback(self, widget, data):
self.data = data
class GoToDialog:
def __init__(self, parent):
self.goto_gui = gtk.glade.XML(parent.gladefile, root="GoToDialog")
self.dialog_box = self.goto_gui.get_widget("GoToDialog")
self.dialog_box.set_modal(True)
def run(self):
signals = {"goto_activate": self.on_goto_activate,
"cancel_activate": self.on_cancel_activate}
self.goto_gui.signal_autoconnect(signals)
self.dialog_box.show()
return ""
def on_goto_activate(self, widget=None):
print "goto"
def on_cancel_activate(self, widget=None):
print "cancel"
| 28.193662 | 95 | 0.723242 | 2,426 | 0.302985 | 0 | 0 | 0 | 0 | 0 | 0 | 1,696 | 0.211815 |
16c86dba44c4d72104ae5760fa8ff0a89daa4441 | 5,793 | py | Python | src/mazes.py | tim-fi/pyxel_games | 3df9d7e1f3d5436d2051db3f5783bdeab916c054 | [
"Unlicense"
] | 2 | 2021-04-03T09:49:46.000Z | 2021-12-27T19:32:32.000Z | src/mazes.py | tim-fi/pyxel_games | 3df9d7e1f3d5436d2051db3f5783bdeab916c054 | [
"Unlicense"
] | null | null | null | src/mazes.py | tim-fi/pyxel_games | 3df9d7e1f3d5436d2051db3f5783bdeab916c054 | [
"Unlicense"
] | null | null | null | from __future__ import annotations
from dataclasses import dataclass, field, InitVar
from typing import List, Tuple, Iterator, Iterable, Optional
from random import choice
import pyxel
# -------------------------------------------------------
# Types
# -------------------------------------------------------
Maze = Tuple[int, ...]
# -------------------------------------------------------
# Constants
# -------------------------------------------------------
SCALE = 3
BOARD_WIDTH = 32
BOARD_HEIGHT = 32
CELL_SIZE = 6
CELL_COLOR = 15
WALL_SIZE = 1
WALL_COLOR = 5
# Flags
UP = 1 << 0
LEFT = 1 << 1
DOWN = 1 << 2
RIGHT = 1 << 3
VISTED = 1 << 4
# Calculated
N_CELLS = BOARD_WIDTH * BOARD_HEIGHT
BLOCK_SIZE = CELL_SIZE + WALL_SIZE * 2
WINDOW_WIDTH = BOARD_WIDTH * BLOCK_SIZE
WINDOW_HEIGHT = BOARD_HEIGHT * BLOCK_SIZE
NEIGHBORS = ((0, -1), (-1, 0), (0, 1), (1, 0))
# -------------------------------------------------------
# Maze
# -------------------------------------------------------
@dataclass
class Generator:
width: int
height: int
start_pos: InitVar[Optional[Tuple[int, int]]] = None
_visited_cells: int = field(init=False, default=0)
_stack: List[Tuple[int, int]] = field(init=False, default_factory=list)
_maze: List[int] = field(init=False)
def __post_init__(self, start_pos: Optional[Tuple[int, int]]):
x, y = start_pos = start_pos or (0, 0)
self._stack.append(start_pos)
self._visited_cells = 1
self._maze = [0 for _ in range(self.width * self.height)]
self._maze[y * self.width + x] |= VISTED
def _get_neighbors(self, x: int, y: int) -> List[int]:
return [
(i, dx, dy)
for i, (dx, dy) in enumerate(NEIGHBORS)
if (
0 <= x + dx < self.width and
0 <= y + dy < self.height and
self._maze[(y + dy) * self.width + (x + dx)] & VISTED == 0
)
]
def step(self) -> Tuple[Maze, Tuple[int, int], bool]:
if self._visited_cells < self.width * self.height:
x, y = self._stack[-1]
neighbors = self._get_neighbors(x, y)
if neighbors:
d, dx, dy = choice(neighbors)
self._maze[y * self.width + x] |= 1 << d
x_, y_ = x + dx, y + dy
self._maze[y_ * self.width + x_] |= 1 << ((d + 2) % 4) | VISTED
self._stack.append((x_, y_))
self._visited_cells += 1
else:
del self._stack[-1]
return tuple(self._maze), self._stack[-1], False
else:
return tuple(self._maze), (0, 0), True
# -------------------------------------------------------
# Application
# -------------------------------------------------------
@dataclass
class App:
maze: Maze = field(init=False, default=tuple(0 for _ in range(N_CELLS)))
generator: Optional[Generator] = field(init=False, default=None)
running: bool = field(init=False, default=False)
pos: Tuple[int, int] = field(init=False, default=(0, 0))
def run(self):
pyxel.init(
WINDOW_WIDTH, WINDOW_HEIGHT,
scale=SCALE, caption="Mazes",
border_width=SCALE, border_color=pyxel.DEFAULT_PALETTE[5],
fps=100
)
pyxel.mouse(True)
pyxel.run(self.update, self.draw)
def draw(self):
pyxel.cls(0)
for i, cell in enumerate(self.maze):
x, y = i % BOARD_WIDTH, i // BOARD_WIDTH
scr_x, scr_y = x * BLOCK_SIZE, y * BLOCK_SIZE
pyxel.rect(
scr_x, scr_y,
BLOCK_SIZE, BLOCK_SIZE,
WALL_COLOR
)
if cell & VISTED:
pyxel.rect(
scr_x + WALL_SIZE, scr_y + WALL_SIZE,
CELL_SIZE, CELL_SIZE,
CELL_COLOR
)
if cell & UP:
pyxel.rect(
scr_x + WALL_SIZE, scr_y,
CELL_SIZE, WALL_SIZE,
CELL_COLOR
)
if cell & LEFT:
pyxel.rect(
scr_x, scr_y + WALL_SIZE,
WALL_SIZE, CELL_SIZE,
CELL_COLOR
)
if cell & DOWN:
pyxel.rect(
scr_x + WALL_SIZE, scr_y + WALL_SIZE + CELL_SIZE,
CELL_SIZE, WALL_SIZE,
CELL_COLOR
)
if cell & RIGHT:
pyxel.rect(
scr_x + WALL_SIZE + CELL_SIZE, scr_y + WALL_SIZE,
WALL_SIZE, CELL_SIZE,
CELL_COLOR
)
x, y = self.pos
pyxel.rectb(
x * BLOCK_SIZE + WALL_SIZE, y * BLOCK_SIZE + WALL_SIZE,
CELL_SIZE, CELL_SIZE,
2 if self.running else 1
)
def update(self):
if pyxel.btnp(pyxel.KEY_SPACE) or pyxel.btnp(pyxel.MOUSE_LEFT_BUTTON):
self.running = not self.running
if self.running and self.generator is None:
self.generator = Generator(BOARD_WIDTH, BOARD_HEIGHT, self.pos)
if self.running:
next_maze, pos, done = self.generator.step()
if done:
self.running = False
self.generator = None
self.maze = next_maze
self.pos = pos
else:
self.pos = (
max(0, min(BOARD_WIDTH-1, pyxel.mouse_x // BLOCK_SIZE)),
max(0, min(BOARD_HEIGHT-1, pyxel.mouse_y // BLOCK_SIZE))
)
if __name__ == '__main__':
App().run() | 32.544944 | 79 | 0.468324 | 4,591 | 0.792508 | 0 | 0 | 4,613 | 0.796306 | 0 | 0 | 529 | 0.091317 |
16c8cf672763555c8ebe97c11704c5a42703427b | 5,536 | py | Python | bobjiang/settings.py | bobjiangps/django-blog | 6afd36fa96c5a027546575b362b0a481c5d7c1a5 | [
"MIT"
] | 3 | 2019-10-25T13:08:04.000Z | 2020-01-05T11:29:18.000Z | bobjiang/settings.py | bobjiangps/django-blog | 6afd36fa96c5a027546575b362b0a481c5d7c1a5 | [
"MIT"
] | 9 | 2020-05-10T10:13:56.000Z | 2022-03-11T23:33:52.000Z | bobjiang/settings.py | bobjiangps/django-blog | 6afd36fa96c5a027546575b362b0a481c5d7c1a5 | [
"MIT"
] | 3 | 2019-02-11T02:55:51.000Z | 2020-01-05T11:29:20.000Z | """
Django settings for bobjiang project.
Generated by 'django-admin startproject' using Django 2.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import json
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
with open(os.path.join(BASE_DIR, "store.json"), "r") as store_file:
STORED = json.load(store_file)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = STORED['secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = True
DEBUG = False
RECORD_VISITOR = True
# RECORD_VISITOR = False
ALLOWED_HOSTS = ['*',]
APPEND_SLASH = True
# Application definition
INSTALLED_APPS = [
'haystack',
'blog.apps.BlogConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main',
'comments',
'ckeditor',
'ckeditor_uploader',
'tool',
'accounting',
#'xadmin',
#'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bobjiang.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'bobjiang.context_processors.device'
],
},
},
]
WSGI_APPLICATION = 'bobjiang.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': STORED['db_name'],
'USER': STORED['db_user'],
'PASSWORD': STORED['db_pw'],
'HOST': '127.0.0.1',
'PORT': 3306,
'OPTIONS': {
'autocommit': True,
},
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
#LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
#STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
#STATIC_ROOT = '/home/bob/djproject/bobjiang/blog/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
CKEDITOR_UPLOAD_PATH = 'upload/'
CKEDITOR_IMAGE_BACKEND = 'pillow'
CKEDITOR_BROWSE_SHOW_DIRS = True
CKEDITOR_RESTRICT_BY_USER = True
CKEDITOR_CONFIGS = {
'default': {
'toolbar': (['div', 'Source', '-', 'Save', 'NewPage', 'Preview', '-', 'Templates'],
['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-','Print','SpellChecker','Scayt'],
['Undo', 'Redo', '-', 'Find', 'Replace', '-', 'SelectAll', 'RemoveFormat','-','Maximize', 'ShowBlocks', '-',"CodeSnippet", 'Subscript', 'Superscript'],
['Form', 'Checkbox', 'Radio', 'TextField', 'Textarea', 'Select', 'Button', 'ImageButton',
'HiddenField'],
['Bold', 'Italic', 'Underline', 'Strike', '-'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', 'Blockquote'],
['JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak'],
['Styles', 'Format', 'Font', 'FontSize'],
['TextColor', 'BGColor'],
),
'extraPlugins': 'codesnippet',
}
}
# haystack
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'blog.whoosh_cn_backend.WhooshEngine',
'PATH': os.path.join(BASE_DIR, 'whoosh_index'),
},
}
HAYSTACK_SEARCH_RESULTS_PER_PAGE = 5
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
| 28.984293 | 171 | 0.638728 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,515 | 0.634935 |
16c94789f75ac4c3a4caedf7d0832ce6641802d7 | 671 | py | Python | Users/models.py | titusnjuguna/FreeDom | 204b3d06ba66e6e8a04af976a25c3c1b7c070f75 | [
"MIT"
] | 1 | 2022-02-10T17:54:53.000Z | 2022-02-10T17:54:53.000Z | Users/models.py | titusnjuguna/FreeDom | 204b3d06ba66e6e8a04af976a25c3c1b7c070f75 | [
"MIT"
] | null | null | null | Users/models.py | titusnjuguna/FreeDom | 204b3d06ba66e6e8a04af976a25c3c1b7c070f75 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from PIL import Image
class Profile(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg',
upload_to='profile_pic/')
def __str__(self):
return f'{self.user.username} Profile'
def save(self,*args,**kwargs):
super().save()
img = Image(self.prof_pic.path)
if img.height > 300 and img.width > 300:
output_size = (300,300)
img.thumbnail(output_size)
img.save(self.prof_pic.path)
| 29.173913 | 60 | 0.588674 | 564 | 0.840537 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.086438 |
16c9a5ddd1d3e1f33c18bfd269bc6097b27aa5a2 | 2,281 | py | Python | dvc/__init__.py | zjj2wry/dvc | c9df567938eefd7b1f5b094c15f04e5ce704aa36 | [
"Apache-2.0"
] | null | null | null | dvc/__init__.py | zjj2wry/dvc | c9df567938eefd7b1f5b094c15f04e5ce704aa36 | [
"Apache-2.0"
] | null | null | null | dvc/__init__.py | zjj2wry/dvc | c9df567938eefd7b1f5b094c15f04e5ce704aa36 | [
"Apache-2.0"
] | null | null | null | """
DVC
----
Make your data science projects reproducible and shareable.
"""
import os
import warnings
VERSION_BASE = '0.23.2'
__version__ = VERSION_BASE
PACKAGEPATH = os.path.abspath(os.path.dirname(__file__))
HOMEPATH = os.path.dirname(PACKAGEPATH)
VERSIONPATH = os.path.join(PACKAGEPATH, 'version.py')
def _update_version_file():
"""Dynamically update version file."""
from git import Repo
from git.exc import InvalidGitRepositoryError
try:
repo = Repo(HOMEPATH)
except InvalidGitRepositoryError:
return __version__
sha = repo.head.object.hexsha
short_sha = repo.git.rev_parse(sha, short=6)
dirty = '.mod' if repo.is_dirty() else ''
ver = '{}+{}{}'.format(__version__, short_sha, dirty)
# Write a helper file, that will be installed with the package
# and will provide a true version of the installed dvc
with open(VERSIONPATH, 'w+') as fobj:
fobj.write('# AUTOGENERATED by dvc/__init__.py\n')
fobj.write('version = "{}"\n'.format(ver))
return ver
def _remove_version_file():
"""Remove version.py so that it doesn't get into the release."""
if os.path.exists(VERSIONPATH):
os.unlink(VERSIONPATH)
if os.path.exists(os.path.join(HOMEPATH, 'setup.py')):
# dvc is run directly from source without installation or
# __version__ is called from setup.py
if os.getenv('APPVEYOR_REPO_TAG', '').lower() != 'true' \
and os.getenv('TRAVIS_TAG', '') == '':
__version__ = _update_version_file()
else: # pragma: no cover
_remove_version_file()
else: # pragma: no cover
# dvc was installed with pip or something. Hopefully we have our
# auto-generated version.py to help us provide a true version
from dvc.version import version
__version__ = version
VERSION = __version__
# Ignore numpy's runtime warnings: https://github.com/numpy/numpy/pull/432.
# We don't directly import numpy, but our dependency networkx does, causing
# these warnings in some environments. Luckily these warnings are benign and
# we can simply ignore them so that they don't show up when you are using dvc.
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| 32.126761 | 78 | 0.702762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,071 | 0.469531 |
16c9bf4375ba49f6aaa19ea289549cfbf3ed1092 | 9,092 | py | Python | pkg_dir/src/utils/notion_utils.py | robperch/robase_datalysis | 343cb59b16630ca776bd941897ab8da63f20bfe1 | [
"MIT"
] | 2 | 2022-01-09T19:18:57.000Z | 2022-01-09T19:19:04.000Z | pkg_dir/src/utils/notion_utils.py | robperch/robasecode | 343cb59b16630ca776bd941897ab8da63f20bfe1 | [
"MIT"
] | 4 | 2022-01-17T02:46:24.000Z | 2022-02-20T23:04:05.000Z | pkg_dir/src/utils/notion_utils.py | robperch/robasecode | 343cb59b16630ca776bd941897ab8da63f20bfe1 | [
"MIT"
] | null | null | null | ## MODULE WITH UTIL FUNCTIONS - NOTION
"----------------------------------------------------------------------------------------------------------------------"
####################################################### Imports ########################################################
"----------------------------------------------------------------------------------------------------------------------"
## Standard library imports
import requests
## Third party imports
import pandas as pd
## Local application imports
from pkg_dir.config.config import (
creds_file_path as crds_loc,
)
from pkg_dir.src.utils.general_utils import (
read_yaml,
)
"----------------------------------------------------------------------------------------------------------------------"
####################################################### Functions ######################################################
"----------------------------------------------------------------------------------------------------------------------"
## Read notion database with api
def notion_api_call(db_api_url, db_id, headers):
"""
Read notion database with api
:param db_api_url (string): base url provided by Notion to make api calls
:param db_id (string): unique id of the database that will be read
:param headers (dictionary): dict with authorization and version info
:return req (?): response after calling notions api
"""
## Configuring reading URL
read_url = db_api_url + db_id + "/query"
## Requesting info via the API
req = requests.request(
"POST",
read_url,
headers=headers
)
## Verifying API call status
print("API interaction status code: ", req.status_code)
return req
## Calling a Notion database as a json via Notion's API
def get_notion_db_json(db_id):
"""
Calling a Notion database as a json via Notion's API
:param db_id (string): unique id of the database that will be called
:return db_json (json): json with the notion's db contents
"""
## Reading credentials from yaml file
yaml_file = read_yaml(crds_loc)
notion_version = yaml_file["notion_api"]["notion_version"]
db_api_url = yaml_file["notion_api"]["db_api_url"]
api_key = yaml_file["notion_api"]["api_key"]
## Building headers for the API call
headers = {
"Authorization": "Bearer " + api_key,
"Notion-Version": notion_version
}
## Calling notion's api
req = notion_api_call(db_api_url, db_id, headers)
## Converting the api response to a json
db_json = req.json()
return db_json
## Crating a schema of the notion database that was read
def create_notion_db_schema(db_json, relevant_properties):
"""
Crating a schema of the notion database that was read
:param db_json (json): json object obtained by calling notion's api
:param relevant_properties (list): list of string with the names of the relevant properties
:return db_schema (dictionary): schema of the table that includes the properties' data type
"""
## Selecting a sample entry to go over all of it's properties
sample_entry = db_json["results"][0]["properties"]
## Bulding dictionary (schema) of the relevant properties and their datatypes
db_schema = {
prop: {
"data_type": sample_entry[prop]["type"]
}
for prop in sample_entry
if prop in relevant_properties
}
# print(db_schema)
return db_schema
## Building a the blueprint dictionary for the dataframe (orient=index)
def notion_db_blueprint_df(db_json, db_schema, index_prop):
"""
Building a the blueprint dictionary for the dataframe (orient=index)
:param db_json (json): json object obtained by calling notion's api
:return db_schema (dictionary): schema of the table that includes the properties' data type
:param index_prop (string): name of the property that will serve as the df's index
:return df_dict (dict): dictionary that will be used to create a dataframe with the json contents
"""
## Empty dictionary that will store all the results
df_dict = {}
## Iterating over every row in the dataframe
for row in db_json["results"]:
## Defining the table's base attributes
#### All properties contained in the notion db
row_props = row["properties"]
#### Name of the index; key attribute in the notion db
row_name = row_props[index_prop]["title"][0]["plain_text"]
#### Empty list to store all the row contents
row_contents = []
## Iterating over every relevant property in the table
for col in db_schema:
## Identifying the datatype of the property
data_type = db_schema[col]["data_type"]
## Set of conditions to determine how the row will be treated
#### Skipping the index row
if data_type == "title":
continue
#### Searching for data in specific locations for special data types (1)
elif data_type in ["select", "person", "created_by"]:
try:
row_contents.append(row_props[col][data_type]["name"])
except:
row_contents.append("No_data")
#### Searching for data in specific locations for special data types (2)
elif data_type in ["rich_text"]:
try:
row_contents.append(row_props[col][data_type][0]["text"]["content"])
except:
row_contents.append("No_data")
#### Searching for data in specific locations for special data types (2)
elif data_type in ["formula"]:
try:
#### Applying conditions based on the type of formula result
if row_props[col][data_type]["type"] == "string":
row_contents.append(row_props[col][data_type]["string"])
elif row_props[col][data_type]["type"] == "number":
row_contents.append(row_props[col][data_type]["number"])
except:
row_contents.append("No_data")
#### General procedure to find data
else:
row_contents.append(row_props[col][db_schema[col]["data_type"]])
## Saving the row contents gathered
df_dict[row_name] = row_contents
return df_dict
## Obtaining a dataframe from a notion database
def notion_json_to_df(db_json, relevant_properties):
"""
Obtaining a dataframe from a notion database
:param db_json (json): json object obtained by calling notion's api
:param relevant_properties (list): list of string with the names of the relevant properties
:return df_n (dataframe): resulting dataframe crated based on the blueprint generated
"""
## General parameters needed to build the dataframe
#### Database schema
db_schema = create_notion_db_schema(db_json, relevant_properties)
#### Property that will be used as the dataframe's index
index_prop = [prop for prop in db_schema if db_schema[prop]["data_type"] == "title"][0]
## Building a the blueprint dictionary for the dataframe (orient=index)
df_dict = notion_db_blueprint_df(db_json, db_schema, index_prop)
## Creating dataframe with the resulting blueprint dictionary
#### Crating dataframe
df_n = pd.DataFrame.from_dict(df_dict, orient="index")
#### Inserting the table's index as a column at the end of the df
df_n.insert(
df_n.shape[1],
index_prop,
df_n.index
)
#### Resetting index
df_n.reset_index(inplace=True, drop=True)
#### Adjusting column names
df_n.columns = [col_n for col_n in db_schema]
return df_n
## Obtaining a Notion database as dataframe with the selected columns
def notion_db_to_df(db_id, relevant_properties):
"""
Obtaining a Notion database as dataframe with the selected columns
:param db_id (string): unique id to identify the notion database
:param relevant_properties (list): list of string with the names of the relevant properties
:return df_n (dataframe): resulting dataframe crated based on the blueprint generated
"""
## Calling a Notion database as a json via Notion's API
db_json = get_notion_db_json(db_id)
## Obtaining a dataframe from a notion database
df_n = notion_json_to_df(db_json, relevant_properties)
return df_n
"----------------------------------------------------------------------------------------------------------------------"
"----------------------------------------------------------------------------------------------------------------------"
## END OF FILE ##
"----------------------------------------------------------------------------------------------------------------------"
"----------------------------------------------------------------------------------------------------------------------" | 30.006601 | 120 | 0.569182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,736 | 0.630884 |
16caf6d3ac2e6621185a4d16c03069163552a572 | 8,371 | py | Python | libpermian/issueanalyzer/test_baseissue.py | velezd/permian | b52189f44c3112ad933a6b1e303a6b30c272651a | [
"MIT"
] | null | null | null | libpermian/issueanalyzer/test_baseissue.py | velezd/permian | b52189f44c3112ad933a6b1e303a6b30c272651a | [
"MIT"
] | 9 | 2022-02-07T14:14:10.000Z | 2022-03-22T09:17:16.000Z | libpermian/issueanalyzer/test_baseissue.py | velezd/permian | b52189f44c3112ad933a6b1e303a6b30c272651a | [
"MIT"
] | 3 | 2022-01-20T09:17:39.000Z | 2022-03-08T00:35:58.000Z | import unittest
import logging
import contextlib
from libpermian.settings import Settings
from .proxy import IssueAnalyzerProxy
from .base import BaseAnalyzer, BaseIssue
from .issueset import IssueSet
LOGGER = logging.getLogger('test')
class NewIssue(BaseIssue):
def submit(self):
LOGGER.info('submit was called')
return super().submit()
def make(self):
LOGGER.info('make was called')
return 'http://issuetracker.example.com/new_issue'
def update(self):
LOGGER.info('update was called')
def _lookup(self):
LOGGER.info('lookup was called')
return None
@property
def resolved(self):
return False
@property
def report_url(self):
return 'http://issuetracker.example.com/new/foo'
class TrackedUnresolvedIssue(NewIssue):
def _lookup(self):
LOGGER.info('lookup was called')
return 'http://issuetracker.example.com/123'
@property
def resolved(self):
return False
@property
def report_url(self):
return 'http://issuetracker.example.com/new/bar'
class TrackedResolvedIssue(TrackedUnresolvedIssue):
@property
def resolved(self):
return True
class TestNewIssue(unittest.TestCase):
def setUp(self):
self.settings = Settings({}, {}, [])
self.issue = NewIssue(self.settings)
def test_properties(self):
self.assertTrue(self.issue.new)
self.assertFalse(self.issue.tracked)
self.assertEqual(self.issue.uri, None)
def test_sync(self):
# test lookup was called
with self.assertLogs('test', level='INFO') as cm:
self.issue.sync()
self.assertEqual(cm.output, ['INFO:test:lookup was called'])
self.test_properties()
def test_str(self):
self.assertEqual(str(self.issue), self.issue.report_url)
class TestTrackedUnresolvedIssue(TestNewIssue):
def setUp(self):
self.settings = Settings({}, {}, [])
self.issue = TrackedUnresolvedIssue(self.settings)
def test_properties(self):
self.assertFalse(self.issue.new)
self.assertTrue(self.issue.tracked)
self.assertEqual(self.issue.uri, 'http://issuetracker.example.com/123')
def test_str(self):
self.assertEqual(str(self.issue), self.issue.uri)
# TrackedResolvedIssue should behave the same way as TrackedUnresolvedIssue
# so just inherit the whole test case to run the very same test
class TestTrackedResolvedIssue(TestTrackedUnresolvedIssue):
def setUp(self):
self.settings = Settings({}, {}, [])
self.issue = TrackedResolvedIssue(self.settings)
class TestSubmitDisabled(unittest.TestCase):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': False,
'update_issues': False,
'create_issues_instead_of_update': False,
}
},
{},
[]
)
def setUp(self):
self.new = NewIssue(self.settings)
self.unresolved = TrackedUnresolvedIssue(self.settings)
self.resolved = TrackedResolvedIssue(self.settings)
# sync the issues so that lookup is not called => logged during submit
self.new.sync()
self.unresolved.sync()
self.resolved.sync()
@contextlib.contextmanager
def assertUnchanged(self, issue):
old_uri = issue.uri
old_new = issue.new
old_tracked = issue.tracked
yield issue
self.assertEqual(issue.uri, old_uri)
self.assertEqual(issue.new, old_new)
self.assertEqual(issue.tracked, old_tracked)
def assertSubmitNoop(self, issue):
with self.assertUnchanged(issue):
with self.assertLogs('test', level='INFO') as cm:
issue.submit()
issue.submit()
self.assertEqual(cm.output, [
"INFO:test:submit was called",
"INFO:test:submit was called",
])
def assertSubmitCreate(self, issue):
with self.assertLogs('test', level='INFO') as cm:
result1 = issue.submit()
result2 = issue.submit()
self.assertEqual(cm.output, [
"INFO:test:submit was called",
"INFO:test:make was called",
"INFO:test:submit was called",
])
self.assertEqual(result1, result2)
return result1
def assertSubmitUpdate(self, issue):
with self.assertUnchanged(issue):
with self.assertLogs('test', level='INFO') as cm:
result1 = issue.submit()
result2 = issue.submit()
self.assertEqual(cm.output, [
"INFO:test:submit was called",
"INFO:test:update was called",
"INFO:test:submit was called",
])
self.assertEqual(result1, result2)
return result1
def testNew(self):
self.assertSubmitNoop(self.new)
def testUnresolved(self):
self.assertSubmitNoop(self.unresolved)
def testResolved(self):
self.assertSubmitNoop(self.resolved)
class TestSubmitCreateUpdate(TestSubmitDisabled):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': True,
'update_issues': True,
'create_issues_instead_of_update': False,
}
},
{},
[]
)
def testNew(self):
result = self.assertSubmitCreate(self.new)
self.assertTrue(self.new.new)
self.assertTrue(self.new.tracked)
self.assertEqual(result, 'http://issuetracker.example.com/new_issue')
self.assertEqual(result, self.new.uri)
# repeated submit doesn't do anything
with self.assertUnchanged(self.new):
with self.assertLogs('test', level='INFO') as cm:
result = self.new.submit()
self.assertEqual(cm.output, [
"INFO:test:submit was called",
])
def testUnresolved(self):
self.assertSubmitUpdate(self.unresolved)
def testResolved(self):
self.assertSubmitUpdate(self.resolved)
class TestSubmitCreateOnlyNew(TestSubmitCreateUpdate):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': True,
'update_issues': False,
'create_issues_instead_of_update': False,
}
},
{},
[]
)
def testUnresolved(self):
self.assertSubmitNoop(self.unresolved)
def testResolved(self):
self.assertSubmitNoop(self.resolved)
class TestSubmitUpdateOnlyTracked(TestSubmitCreateUpdate):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': False,
'update_issues': True,
'create_issues_instead_of_update': False,
}
},
{},
[]
)
def testNew(self):
self.assertSubmitNoop(self.new)
class TestSubmitCreateAlwaysWithUpdateOff(TestSubmitCreateUpdate):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': True,
'update_issues': False, # This should have no effect
'create_issues_instead_of_update': True,
}
},
{},
[]
)
def testUnresolved(self):
old_uri = self.unresolved.uri
result = self.assertSubmitCreate(self.unresolved)
self.assertEqual(result, 'http://issuetracker.example.com/new_issue')
self.assertEqual(self.unresolved.uri, result)
self.assertNotEqual(result, old_uri)
def testResolved(self):
old_uri = self.resolved.uri
result = self.assertSubmitCreate(self.resolved)
self.assertEqual(result, 'http://issuetracker.example.com/new_issue')
self.assertEqual(self.resolved.uri, result)
self.assertNotEqual(result, old_uri)
# The update_issue should have no effect when create_issues_instead_of_update
# is set to True.
class TestSubmitCreateAlwaysWithUpdateOn(TestSubmitCreateAlwaysWithUpdateOff):
settings = Settings(
{
'issueAnalyzer' : {
'create_issues': True,
'update_issues': True, # This should have no effect
'create_issues_instead_of_update': True,
}
},
{},
[]
)
| 29.896429 | 79 | 0.611158 | 7,872 | 0.940389 | 288 | 0.034404 | 664 | 0.079321 | 0 | 0 | 1,662 | 0.198543 |
16cb0577b93ac4b27ff6f443a2d517ea18cbf9f7 | 6,421 | py | Python | naplib/alignment/prosodylab_aligner/__main__.py | gavinmischler/naplib-python | 8cd7a0fc700f1c07243169ec42fc087955885adc | [
"MIT"
] | 1 | 2022-03-02T20:54:23.000Z | 2022-03-02T20:54:23.000Z | naplib/alignment/prosodylab_aligner/__main__.py | gavinmischler/gavlib | cacf9180b1442e4aed98b6182d586747a6d6ef90 | [
"MIT"
] | null | null | null | naplib/alignment/prosodylab_aligner/__main__.py | gavinmischler/gavlib | cacf9180b1442e4aed98b6182d586747a6d6ef90 | [
"MIT"
] | null | null | null | # Copyright (c) 2011-2014 Kyle Gorman and Michael Wagner
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Command-line driver for the module
"""
import logging
import os
import sys
import yaml
from bisect import bisect
from shutil import copyfile
from textgrid import MLF
from corpus import Corpus
from aligner import Aligner
from archive import Archive
from utilities import splitname, resolve_opts, \
ALIGNED, CONFIG, HMMDEFS, MACROS, SCORES
from argparse import ArgumentParser
DICTIONARY = "eng.dict"
MODEL = "eng.zip"
LOGGING_FMT = "%(message)s"
# parse arguments
argparser = ArgumentParser(prog="{} -m aligner".format(sys.executable),
description="Prosodylab-Aligner")
argparser.add_argument("-c", "--configuration",
help="config file")
argparser.add_argument("-d", "--dictionary", metavar="DICT", action="append",
help="dictionary file (default: {}) (can specify multiple)".format(DICTIONARY))
argparser.add_argument("-s", "--samplerate", type=int,
help="analysis samplerate (in Hz)")
argparser.add_argument("-e", "--epochs", type=int,
help="# of epochs of training per round")
input_group = argparser.add_argument_group()
input_group.add_argument("-r", "--read",
help="source for a precomputed acoustic model")
input_group.add_argument("-t", "--train",
help="directory containing data for training")
output_group = argparser.add_mutually_exclusive_group(required=True)
output_group.add_argument("-a", "--align",
help="directory containing data to align")
output_group.add_argument("-w", "--write",
help="destination for computed acoustic model")
verbosity_group = argparser.add_mutually_exclusive_group()
verbosity_group.add_argument("-v", "--verbose", action="store_true",
help="Verbose output")
verbosity_group.add_argument("-V", "--extra-verbose", action="store_true",
help="Even more verbose output")
args = argparser.parse_args()
# hack to allow proper override of default dictionary
if not args.dictionary:
args.dictionary = [DICTIONARY]
# set up logging
loglevel = logging.WARNING
if args.extra_verbose:
loglevel = logging.DEBUG
elif args.verbose:
loglevel = logging.INFO
logging.basicConfig(format=LOGGING_FMT, level=loglevel)
# input: pick one
if args.train:
if args.read:
logging.error("Cannot train on persistent model.")
exit(1)
logging.info("Preparing corpus '{}'.".format(args.train))
opts = resolve_opts(args)
corpus = Corpus(args.train, opts)
logging.info("Preparing aligner.")
aligner = Aligner(opts)
logging.info("Training aligner on corpus '{}'.".format(args.train))
aligner.HTKbook_training_regime(corpus, opts["epochs"],
flatstart=(args.read is None))
else:
if not args.read:
args.read = MODEL
logging.info("Reading aligner from '{}'.".format(args.read))
# warn about irrelevant flags
if args.configuration:
logging.warning("Ignoring config flag (-c/--configuration).")
args.configuration = None
if args.epochs:
logging.warning("Ignoring epochs flag (-e/--epochs).")
if args.samplerate:
logging.warning("Ignoring samplerate flag (-s/--samplerate).")
args.samplerate = None
# create archive from -r argument
archive = Archive(args.read)
# read configuration file therefrom, and resolve options with it
args.configuration = os.path.join(archive.dirname, CONFIG)
opts = resolve_opts(args)
# initialize aligner and set it to point to the archive data
aligner = Aligner(opts)
aligner.curdir = archive.dirname
# output: pick one
if args.align:
# check to make sure we're not aligning on the training data
if (not args.train) or (os.path.realpath(args.train) !=
os.path.realpath(args.align)):
logging.info("Preparing corpus '{}'.".format(args.align))
corpus = Corpus(args.align, opts)
logging.info("Aligning corpus '{}'.".format(args.align))
aligned = os.path.join(args.align, ALIGNED)
scores = os.path.join(args.align, SCORES)
aligner.align_and_score(corpus, aligned, scores)
logging.debug("Wrote MLF file to '{}'.".format(aligned))
logging.debug("Wrote likelihood scores to '{}'.".format(scores))
logging.info("Writing TextGrids.")
size = MLF(aligned).write(args.align)
if not size:
logging.error("No paths found!")
exit(1)
logging.debug("Wrote {} TextGrids.".format(size))
elif args.write:
# create and populate archive
(_, basename, _) = splitname(args.write)
archive = Archive.empty(basename)
archive.add(os.path.join(aligner.curdir, HMMDEFS))
archive.add(os.path.join(aligner.curdir, MACROS))
# whatever this is, it's not going to work once you move the data
if "dictionary" in opts:
del opts["dictionary"]
with open(os.path.join(archive.dirname, CONFIG), "w") as sink:
yaml.dump(opts, sink)
(basename, _) = os.path.splitext(args.write)
archive_path = os.path.relpath(archive.dump(basename))
logging.info("Wrote aligner to '{}'.".format(archive_path))
# else unreachable
logging.info("Success!")
| 40.13125 | 102 | 0.68167 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,711 | 0.422208 |
16cc459343115a5e0d636bad4bf667af5c4f5d6d | 4,021 | py | Python | init/build_statements.py | andgein/sis-2017-winter-olymp | e6cf290ab2c24a22ca76949895e2a6cc6d818dc0 | [
"MIT"
] | null | null | null | init/build_statements.py | andgein/sis-2017-winter-olymp | e6cf290ab2c24a22ca76949895e2a6cc6d818dc0 | [
"MIT"
] | null | null | null | init/build_statements.py | andgein/sis-2017-winter-olymp | e6cf290ab2c24a22ca76949895e2a6cc6d818dc0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import codecs
import os
import os.path
import shutil
import subprocess
import logging
import glob
import json
CONTEST_DIR = 'polygon-contest'
INIT_FILE = 'init.txt'
BUILD_DIR = 'build'
LANGUAGE = 'russian'
FILES_DIR = 'files-' + LANGUAGE
def time_limit_from_int(tl):
tl //= 1000
return str(tl) + ' секунд' + ('a' if tl == 1 else 'ы')
def memory_limit_from_int(ml):
return str(ml // (1024 ** 2)) + ' мегабайт'
def build_with_text(text, replace_data, result, section='', problem_name=''):
text = text.replace('%TEXT%', section + '\n' + replace_data)
with codecs.open(os.path.join(BUILD_DIR, 'data.tex'), 'w', 'utf-8') as data_file:
data_file.write(text)
cwd = os.getcwd()
os.chdir(BUILD_DIR)
logging.info('Compile problem %s' % problem_name)
for _ in range(2):
subprocess.check_output(['pdflatex', 'compile.tex'])
os.chdir(cwd)
shutil.copy(os.path.join(BUILD_DIR, 'compile.pdf'), os.path.join(FILES_DIR, result))
def main():
id_by_name = {}
with open(INIT_FILE, 'r', encoding='utf-8') as init:
for line in init:
if not line.strip():
continue
line = line.strip().split('\t')
id_by_name[line[11]] = line[2]
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s [%(levelname)s] %(message)s')
if not os.path.exists(FILES_DIR):
logging.info('Create folder for output files: %s' % FILES_DIR)
os.mkdir(FILES_DIR)
if not os.path.exists(BUILD_DIR):
logging.info('Create folder for build files: %s' % BUILD_DIR)
os.mkdir(BUILD_DIR)
problems_dir = os.path.join(CONTEST_DIR, 'problems')
for problem_counter, problem_dir in enumerate(glob.glob(os.path.join(problems_dir, '*')), start=1):
statement_dir = os.path.join(problem_dir, 'statements', LANGUAGE)
properties_file_name = os.path.join(statement_dir, 'problem-properties.json')
logging.info('Read problem properties file %s' % properties_file_name)
with codecs.open(properties_file_name, 'r', 'utf-8') as properties_file:
properties = json.load(properties_file)
name = properties['name']
legend = properties['legend']
input_file = properties['inputFile']
output_file = properties['outputFile']
time_limit = time_limit_from_int(properties['timeLimit'])
memory_limit = memory_limit_from_int(properties['memoryLimit'])
input_format = properties['input']
output_format = properties['output']
samples = "".join(["\exmp{%s}{%s}%%\n" % (sample['input'], sample['output'])
for sample in properties['sampleTests']])
notes = ''
if len(properties.get('notes','')) > 0:
notes = '\\Note\n' + properties['notes']
shutil.copy('template.tex', os.path.join(BUILD_DIR, 'compile.tex'))
shutil.copy('olymp.sty', os.path.join(BUILD_DIR, 'olymp.sty'))
with codecs.open('data.tex', 'r', 'utf-8') as data_file:
data = data_file.read()
problem_name = os.path.basename(problem_dir)
problem_id = id_by_name[problem_name]
data = data.replace('%NAME%', name).replace('%INPUT_FILE%', input_file).replace('%OUTPUT_FILE%', output_file).\
replace('%TIME_LIMIT%', time_limit).replace('%MEMORY_LIMIT%', memory_limit).\
replace('%ID%', problem_id).\
replace('%PROBLEM_COUNTER%', str(problem_counter)).\
replace('%STATEMENT_DIR%', os.path.join('..', statement_dir).replace('\\', '/') + '/')
build_with_text(data, legend + '\n\\InputFile\n' + input_format + '\n\\OutputFile\n' + output_format +
"\\begin{example}" + samples +"\\end{example}\n" + notes,
problem_name + '.pdf', problem_name=problem_name)
if __name__ == '__main__':
main()
| 39.038835 | 123 | 0.607063 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 857 | 0.212339 |
16cc8a900ca38b32bc2a6bbb0fff269ef5b921da | 1,430 | py | Python | conanfile.py | mmurooka/mc_rtc_data | bf45279cc59f9d85915cb2a01a84c23e5ce45958 | [
"BSD-2-Clause"
] | 1 | 2021-04-12T06:02:53.000Z | 2021-04-12T06:02:53.000Z | conanfile.py | mmurooka/mc_rtc_data | bf45279cc59f9d85915cb2a01a84c23e5ce45958 | [
"BSD-2-Clause"
] | 3 | 2020-06-18T10:01:15.000Z | 2021-11-08T12:43:43.000Z | conanfile.py | mmurooka/mc_rtc_data | bf45279cc59f9d85915cb2a01a84c23e5ce45958 | [
"BSD-2-Clause"
] | 4 | 2020-03-12T08:57:41.000Z | 2021-09-07T03:07:56.000Z | # -*- coding: utf-8 -*-
#
from conans import python_requires
import conans.tools as tools
import os
base = python_requires("Eigen3ToPython/latest@multi-contact/dev")
class MCRTCDataConan(base.Eigen3ToPythonConan):
name = "mc_rtc_data"
version = "1.0.4"
description = "Environments/Robots description for mc_rtc"
topics = ("robotics", "data")
url = "https://github.com/jrl-umi3218/mc_rtc_data"
homepage = "https://github.com/jrl-umi3218/mc_rtc_data"
author = "Pierre Gergondet <[email protected]>"
license = "BSD-2-Clause"
exports = ["LICENSE"]
exports_sources = ["CMakeLists.txt", "conan/CMakeLists.txt", "cmake/*", "jvrc_description/*", "mc_env_description/*", "mc_int_obj_description/*", "mc_rtc_data/*"]
generators = "cmake"
settings = "os", "arch"
requires = ()
def config_options(self):
del self.options.python2_version
del self.options.python3_version
def package_id(self):
pass
def package(self):
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
for f in [".catkin", "_setup_util.py", "env.sh", "setup.bash", "local_setup.bash", "setup.sh", "local_setup.sh", "setup.zsh", "local_setup.zsh", ".rosinstall"]:
p = os.path.join(self.package_folder, f)
if os.path.exists(p):
os.remove(p)
| 34.878049 | 168 | 0.652448 | 1,259 | 0.88042 | 0 | 0 | 0 | 0 | 0 | 0 | 596 | 0.416783 |
16cd7731b200cbda5815fed9bc8eb8baf3b78188 | 1,217 | py | Python | hyperion/migrations/0006_auto_20190218_2251.py | ExiaSR/hyperion | 0b14ef55ed00b964f1966c722f4162c475aa4895 | [
"MIT"
] | 3 | 2019-01-30T03:50:04.000Z | 2019-02-20T00:33:05.000Z | hyperion/migrations/0006_auto_20190218_2251.py | ExiaSR/hyperion | 0b14ef55ed00b964f1966c722f4162c475aa4895 | [
"MIT"
] | 173 | 2019-01-30T08:30:54.000Z | 2019-04-05T19:43:06.000Z | hyperion/migrations/0006_auto_20190218_2251.py | ExiaSR/hyperion | 0b14ef55ed00b964f1966c722f4162c475aa4895 | [
"MIT"
] | 2 | 2019-05-06T22:59:56.000Z | 2020-09-29T03:13:03.000Z | # Generated by Django 2.1.5 on 2019-02-18 22:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hyperion', '0005_auto_20190212_2116'),
]
operations = [
migrations.RenameField(
model_name='post',
old_name='visibleTo',
new_name='visible_to',
),
migrations.AddField(
model_name='post',
name='content_type',
field=models.CharField(choices=[('1', 'text/plain'), ('2', 'text/markdown'), ('3', 'image/png;base64'), ('4', 'image/jpeg;base64'), ('5', 'application/base64')], default='1', max_length=1),
),
migrations.AddField(
model_name='post',
name='visibility',
field=models.CharField(choices=[('1', 'PUBLIC'), ('2', 'FOAF'), ('3', 'FRIENDS'), ('4', 'PRIVATE'), ('5', 'SERVERONLY')], default='1', max_length=1),
),
migrations.AlterField(
model_name='comment',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='hyperion.UserProfile'),
),
]
| 34.771429 | 201 | 0.571898 | 1,091 | 0.896467 | 0 | 0 | 0 | 0 | 0 | 0 | 362 | 0.297453 |
16cdaac129cd705700eab605365385f7b7b8a82c | 2,236 | py | Python | pottan_ocr/utils.py | nithyadurai87/pottan-ocr-tamil | e455891dc0ddd508d1318abf84fc59cc548873f7 | [
"MIT"
] | 5 | 2019-05-05T18:26:14.000Z | 2019-08-02T05:04:12.000Z | pottan_ocr/utils.py | nithyadurai87/pottan-ocr-tamil | e455891dc0ddd508d1318abf84fc59cc548873f7 | [
"MIT"
] | 3 | 2020-07-17T02:28:11.000Z | 2021-05-08T21:58:10.000Z | pottan_ocr/utils.py | nithyadurai87/pottan-ocr-tamil | e455891dc0ddd508d1318abf84fc59cc548873f7 | [
"MIT"
] | 3 | 2020-04-11T19:39:08.000Z | 2020-12-21T08:44:21.000Z | import torch
import json
import numpy as np
from torch.autograd import Variable
import gzip
import yaml
from re import split
from matplotlib import pyplot
def showImg( im ):
pyplot.imshow( im )
pyplot.show()
def myOpen( fname, mode ):
return open( fname, mode, encoding="utf-8" )
def readFile( fname ):
opener, mode = ( gzip.open, 'rt' ) if fname[-3:] == '.gz' else ( open, 'r' )
with opener( fname, mode ) as f:
return f.read()
def readLines( fname ):
return split('[\r\n]', readFile( fname ) )
def readJson( fname ):
with myOpen( fname, 'r' ) as f:
return json.load( f )
def writeFile( fname, contents ):
with myOpen( fname, 'w' ) as f:
f.write( contents )
def writeJson( fname, data ):
with myOpen( fname, 'w') as outfile:
json.dump(data, outfile)
def readYaml( fname ):
with myOpen(fname, 'r') as fp:
return yaml.load( fp )
config = readYaml('./config.yaml')
class averager(object):
"""Compute average for `torch.Variable` and `torch.Tensor`. """
def __init__(self):
self.reset()
def add(self, v):
if isinstance(v, Variable):
count = v.data.numel()
v = v.data.sum()
elif isinstance(v, torch.Tensor):
count = v.numel()
v = v.sum()
self.n_count += count
self.sum += v
def reset(self):
self.n_count = 0
self.sum = 0
def val(self):
res = 0
if self.n_count != 0:
res = self.sum / float(self.n_count)
return res
def loadTrainedModel( model, opt ):
"""Load a pretrained model into given model"""
print('loading pretrained model from %s' % opt.crnn)
if( opt.cuda ):
stateDict = torch.load(opt.crnn )
else:
stateDict = torch.load(opt.crnn, map_location={'cuda:0': 'cpu'} )
# Handle the case of some old torch version. It will save the data as module.<xyz> . Handle it
if( list( stateDict.keys() )[0][:7] == 'module.' ):
for key in list(stateDict.keys()):
stateDict[ key[ 7:] ] = stateDict[key]
del stateDict[ key ]
model.load_state_dict( stateDict )
print('Completed loading pre trained model')
| 24.304348 | 99 | 0.58542 | 611 | 0.273256 | 0 | 0 | 0 | 0 | 0 | 0 | 351 | 0.156977 |
16cef8471ab7389079cb6001c00f1c83826a7643 | 1,546 | py | Python | pyvips/error.py | kleisauke/pyvips | ae3b0c09669cfb662e773e8ae69cf589ac15e320 | [
"MIT"
] | null | null | null | pyvips/error.py | kleisauke/pyvips | ae3b0c09669cfb662e773e8ae69cf589ac15e320 | [
"MIT"
] | null | null | null | pyvips/error.py | kleisauke/pyvips | ae3b0c09669cfb662e773e8ae69cf589ac15e320 | [
"MIT"
] | null | null | null | # errors from libvips
import sys
import logging
from pyvips import ffi, vips_lib
logger = logging.getLogger(__name__)
_is_PY3 = sys.version_info[0] == 3
if _is_PY3:
text_type = str
else:
text_type = unicode
ffi.cdef('''
const char* vips_error_buffer (void);
void vips_error_clear (void);
''')
def _to_bytes(x):
"""Convert to a byte string.
Convert a Python unicode string to a utf-8-encoded byte string. You must
call this on strings you pass to libvips.
"""
if isinstance(x, text_type):
x = x.encode()
return x
def _to_string(x):
"""Convert to a unicode string.
If x is a byte string, assume it is utf-8 and decode to a Python unicode
string. You must call this on text strings you get back from libvips.
"""
if _is_PY3 and isinstance(x, bytes):
x = x.decode('utf-8')
return x
class Error(Exception):
"""An error from vips.
Attributes:
message (str): a high-level description of the error
detail (str): a string with some detailed diagnostics
"""
def __init__(self, message, detail=None):
self.message = message
if detail is None or detail == "":
detail = _to_string(ffi.string(vips_lib.vips_error_buffer()))
vips_lib.vips_error_clear()
self.detail = detail
logger.debug('Error %s %s', self.message, self.detail)
def __str__(self):
return '{0}\n {1}'.format(self.message, self.detail)
__all__ = [
'_to_bytes', '_to_string', 'Error',
]
| 20.891892 | 76 | 0.638422 | 613 | 0.396507 | 0 | 0 | 0 | 0 | 0 | 0 | 693 | 0.448254 |
16cf7d6d5783bc8dc6f881f5646090c8b7e4317c | 7,584 | py | Python | population_estimator/curses_io.py | cruzanta/population-estimator | cb56c551b615726543d8b1643302be2d30fd593c | [
"MIT"
] | 1 | 2019-02-10T01:30:09.000Z | 2019-02-10T01:30:09.000Z | population_estimator/curses_io.py | cruzantada/population-estimator | cb56c551b615726543d8b1643302be2d30fd593c | [
"MIT"
] | null | null | null | population_estimator/curses_io.py | cruzantada/population-estimator | cb56c551b615726543d8b1643302be2d30fd593c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Module for painting output on and obtaining input from a text-based terminal
window using the curses library.
"""
import curses
import textwrap
def display_string(screen, a_string, output_line):
# Paints a string on a text-based terminal window.
_, width = screen.getmaxyx()
try:
screen.addstr(output_line, 0, textwrap.fill(a_string, width - 1))
except curses.error:
screen.addstr(0, 0, textwrap.fill(
'Terminal window too small for output! Please resize. ', width - 1))
return output_line
def display_list_items(screen, a_list, output_line):
# Paints each item of a list on a text-based terminal window.
for item in a_list:
output_line = display_string(screen, '%s' % (item), output_line)
output_line += 1
return output_line
def display_formatted_dict(screen, dct, output_line):
# Paints each key, value pair of a dict on a text-based terminal window.
for key, value in dct.items():
if isinstance(value, int):
value = '{:,}'.format(value)
formatted_dict = '%s: %s' % (key, value)
output_line = display_string(screen, formatted_dict, output_line)
output_line += 1
return output_line
def display_string_with_prompt(screen, first_line_num, a_string, prompt):
"""Paints two strings and accepts input.
Paints two strings on a text-based terminal window. The latter of the two
strings serves as the prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 3
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def display_list_items_with_prompt(screen, first_line_num, a_string, a_list,
prompt):
"""Paints a string, each item of a list, and accepts input.
Paints a string, each item of a list, and another string on a text-based
terminal window. Each item of the list is painted on its own line.
The second string serves as a prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
a_list: A list whose items are painted on each line of the terminal
window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 2
output_line = display_list_items(screen, a_list, output_line)
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def display_formatted_dicts_with_prompt(screen, first_line_num, a_string,
list_of_dicts, prompt):
"""Paints a string, each item of each dict in a list, and accepts input.
Paints a string, each item of each dict in a list, and another string on a
text-based terminal window. Each key, value pair of each dict is painted on
its own line with the key and value separated by a colon. The second string
serves as a prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
list_of_dicts: A list of dictionaries whose key, value pairs are painted
on their own line of the terminal window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 2
for dct in list_of_dicts:
output_line = display_formatted_dict(screen, dct, output_line)
output_line += 1
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def get_user_menu_selection(screen, first_line_num, a_string, menu_items,
prompt):
"""Paints a string, a menu, and accepts input.
Paints a string, a menu, and another string on a text-based terminal window.
The menu is composed of the items in a list, and each item is assigned its
own number that represents the order in which the item appears in the menu.
The second string serves as a prompt for the user to enter a number from the
menu.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
menu_items: A list whose items are painted on each line of the terminal
window as menu options.
prompt: A string that serves as a prompt for the user to enter a number
from the menu.
Returns:
A string representation of the item in 'menu_items' that the user
selects.
"""
# Create a dictionary that contains the items in 'menu_items'. Each item
# is added as a value with an integer key that represents the order in which
# the item will appear in the menu.
item_key = 1
selection_items = {}
for item in menu_items:
selection_items['%s' % (item_key)] = item
item_key += 1
# Display the menu and prompt the user for a selection.
while True:
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 3
for menu_num in sorted(selection_items.iterkeys()):
item_line = '%s) %s' % (menu_num, selection_items[menu_num])
output_line = display_string(screen, item_line, output_line)
output_line += 1
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
input = screen.getstr(output_line, len(prompt) + 1)
if input not in selection_items.keys():
continue # Force the user to enter a valid selection.
else:
return selection_items[input]
| 36.114286 | 80 | 0.676292 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,317 | 0.569225 |
16cfaddb94dc7d27c3d0480f8a3e480fd2dc70da | 30 | py | Python | tools/micropython-mockup/urandom.py | hwinther/lanot | f6700cacb3946535081624467b746fdfd38e021d | [
"Apache-2.0"
] | null | null | null | tools/micropython-mockup/urandom.py | hwinther/lanot | f6700cacb3946535081624467b746fdfd38e021d | [
"Apache-2.0"
] | null | null | null | tools/micropython-mockup/urandom.py | hwinther/lanot | f6700cacb3946535081624467b746fdfd38e021d | [
"Apache-2.0"
] | null | null | null | def randrange(n, y):
pass
| 10 | 20 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
16d095db1ff9ef61a032d5e0564695c1cb47f1b3 | 6,986 | py | Python | SAP/released_tr_email_sender/ui.py | botisko/personal_programs | 2e234271db438e228b9028b8180a6e833f482104 | [
"MIT"
] | null | null | null | SAP/released_tr_email_sender/ui.py | botisko/personal_programs | 2e234271db438e228b9028b8180a6e833f482104 | [
"MIT"
] | 1 | 2021-01-08T13:25:16.000Z | 2021-01-08T13:25:16.000Z | SAP/released_tr_email_sender/ui.py | botisko/personal_programs | 2e234271db438e228b9028b8180a6e833f482104 | [
"MIT"
] | 1 | 2021-01-08T12:52:29.000Z | 2021-01-08T12:52:29.000Z | import json
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
from tr_data import TRData, NO_DATA_MEETS_CRITERIA
from email_text import email_body_template
from helpers import send_email
RECIPIENT = <email_address>
EXCEPTION_FILE = "tr_number_exceptions.json"
class TrEmailSender:
def __init__(self, transport_requests: TRData):
self.transport_requests = transport_requests
self.exceptions = self.load_exceptions()
# WINDOW CREATION
self.window = Tk()
self.window.title("Send email with import requests to TST")
self.window.config(padx=20, pady=20)
# TTILE LABEL
self.title_lbl = Label(
text="Please select TRs to be included into email: ",
)
# BUTTONS
self.refresh_btn = Button(text="REFRESH", command=self.refresh)
self.exceptions_btn = Button(text="Add to exceptions", command=self.add_to_exceptions)
self.select_all_btn = Button(text="Select All", command=self.select_all)
self.send_btn = Button(text="SEND", command=self.send_email)
# list of TRs
columns_labels = {
'tr_number': ("TR Number", 100),
'description': ("Description", 350),
'tkt_type': ("Ticket Type", 80),
'ticket_num': ("Ticket Number", 80),
'module': ("SAP Module", 80),
'export_datetime': ("Export Timestamp", 150),
'owner': ("Owner", 80)
}
# TREE VIEW for list display
self.tr_tree_view = ttk.Treeview(columns=tuple(columns_labels.keys()), show='headings')
# Update columns
for column, (label, field_length) in columns_labels.items():
self.tr_tree_view.column(column, minwidth=80, width=field_length, anchor='w', stretch=False)
self.tr_tree_view.heading(column, text=label)
# insert data
self.populate_tree_view_lines()
#LAYOUT PLACEMENT
self.title_lbl.grid(row=0, column=0, sticky=W)
self.tr_tree_view.grid(row=1, column=0, rowspan=4)
self.refresh_btn.grid(row=1, column=1, sticky=N+S+E+W, padx=2, pady=2)
self.exceptions_btn.grid(row=2, column=1, sticky=E+W+S, padx=1, pady=2)
self.select_all_btn.grid(row=3, column=1, sticky=E+W+N, padx=1, pady=2)
self.send_btn.grid(row=4, column=1, sticky=N+S+E+W, padx=1, pady=2)
# DISPLAY WINDOW
self.window.mainloop()
def refresh(self):
# delete all rows in tree view
for item in self.tr_tree_view.get_children():
self.tr_tree_view.delete(item)
# update with new data
self.transport_requests.refresh()
self.exceptions = self.load_exceptions()
self.populate_tree_view_lines()
def populate_tree_view_lines(self):
all_are_in_exceptions = True
for (tr_number, export_timestamp, owner, description, ticket_number, sap_module, ticket_type) in self.transport_requests.data:
# check if not in exception
if not tr_number in self.exceptions:
year = export_timestamp[:4]
month = export_timestamp[4:6]
day = export_timestamp[6:8]
time = f"{export_timestamp[8:10]}:{export_timestamp[10:12]}:{export_timestamp[12:]}"
export_date_time = f"{day}/{month}/{year} - {time}"
line_values = (tr_number, description, ticket_type, ticket_number, sap_module, export_date_time, owner)
self.tr_tree_view.insert('', 'end', values=line_values)
all_are_in_exceptions = False
# if all TRs are in exceptions, insert only pre-defined information
if all_are_in_exceptions:
tr_number = NO_DATA_MEETS_CRITERIA[0][0]
description = NO_DATA_MEETS_CRITERIA[0][3]
no_data_information = (tr_number, description, "", "", "", "", "")
self.tr_tree_view.insert('', 'end', values=no_data_information)
def select_all(self):
items = self.tr_tree_view.get_children()
self.tr_tree_view.selection_add(items)
def get_selected_item_ids(self):
return self.tr_tree_view.selection()
def send_email(self):
# get selected lines
selected_ids = self.get_selected_item_ids()
# get data of each id
if not selected_ids:
messagebox.showinfo(
title="Status Info",
message="There is nothing to send.\n\nPlease refresh the page."
)
return None
email_details = self.prepare_email_details(selected_ids)
# send email
if send_email(**email_details):
messagebox.showinfo(
title="Status Info", message="Email has been sent!")
# add trs into exceptions
return self.add_to_exceptions()
else:
return None
def prepare_email_details(self, selected_ids):
transport_data = [self.tr_tree_view.item(id_tag, 'values') for id_tag in selected_ids]
# prepare list of transports for email body
html_list_of_trs = ""
ticket_numbers = set()
for (tr_number, description, ticket_type, ticket_number, sap_module, export_timestamp, owner) in transport_data:
html_list_of_trs += f"<li>{tr_number} - {owner} - {description}</li>"
ticket_numbers.add(ticket_number)
# prepare email details
email_details = {
'recipient': RECIPIENT,
'subject': f"Transport requests for: {', '.join(sorted(ticket_numbers)).rstrip(', ')}",
'html_body': email_body_template.format(html_list_of_trs)
}
return email_details
def load_exceptions(self):
try:
with open(file=EXCEPTION_FILE, mode='r') as file:
exception_list = set(json.load(file)['tr_numbers'])
except FileNotFoundError:
with open(file=EXCEPTION_FILE, mode='w') as file:
exception_dict = {'tr_numbers': []}
json.dump(exception_dict, file, indent=4)
return set()
else:
return exception_list
def add_to_exceptions(self):
selected_ids = self.get_selected_item_ids()
if not selected_ids:
messagebox.showinfo(
title="Status Info",
message="Nothing has been selected.\n\nPlease refresh the page."
)
return None
transport_numbers = [self.tr_tree_view.item(id_tag, 'values')[0] for id_tag in selected_ids]
# add TR number of selected items to exception json file
for number in transport_numbers:
self.exceptions.add(number)
updated_data= {'tr_numbers': list(self.exceptions)}
with open(file=EXCEPTION_FILE, mode='w') as file:
json.dump(updated_data, file, indent=4)
return self.refresh()
| 38.174863 | 134 | 0.61838 | 6,694 | 0.958202 | 0 | 0 | 0 | 0 | 0 | 0 | 1,351 | 0.193387 |
16d0a3ae5b7a5043417a9ada134eda9cc4f2dd27 | 1,548 | py | Python | AI-Practice-Tensorflow-Notes-master/opt/opt4_8_backward.py | foochane/Tensorflow-Learning | 54d210a1286051e9d60c98a62bd63eb070bc0a11 | [
"Apache-2.0"
] | 2 | 2019-01-23T14:23:17.000Z | 2019-01-23T14:23:49.000Z | AI-Practice-Tensorflow-Notes-master/opt/opt4_8_backward.py | foochane/Tensorflow-Learning | 54d210a1286051e9d60c98a62bd63eb070bc0a11 | [
"Apache-2.0"
] | null | null | null | AI-Practice-Tensorflow-Notes-master/opt/opt4_8_backward.py | foochane/Tensorflow-Learning | 54d210a1286051e9d60c98a62bd63eb070bc0a11 | [
"Apache-2.0"
] | null | null | null | #coding:utf-8
#0导入模块 ,生成模拟数据集
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import opt4_8_generateds
import opt4_8_forward
STEPS = 40000
BATCH_SIZE = 30
LEARNING_RATE_BASE = 0.001
LEARNING_RATE_DECAY = 0.999
REGULARIZER = 0.01
def backward():
x = tf.placeholder(tf.float32, shape=(None, 2))
y_ = tf.placeholder(tf.float32, shape=(None, 1))
X, Y_, Y_c = opt4_8_generateds.generateds()
y = opt4_8_forward.forward(x, REGULARIZER)
global_step = tf.Variable(0,trainable=False)
learning_rate = tf.train.exponential_decay(
LEARNING_RATE_BASE,
global_step,
300/BATCH_SIZE,
LEARNING_RATE_DECAY,
staircase=True)
#定义损失函数
loss_mse = tf.reduce_mean(tf.square(y-y_))
loss_total = loss_mse + tf.add_n(tf.get_collection('losses'))
#定义反向传播方法:包含正则化
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss_total)
with tf.Session() as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
for i in range(STEPS):
start = (i*BATCH_SIZE) % 300
end = start + BATCH_SIZE
sess.run(train_step, feed_dict={x: X[start:end], y_:Y_[start:end]})
if i % 2000 == 0:
loss_v = sess.run(loss_total, feed_dict={x:X,y_:Y_})
print("After %d steps, loss is: %f" %(i, loss_v))
xx, yy = np.mgrid[-3:3:.01, -3:3:.01]
grid = np.c_[xx.ravel(), yy.ravel()]
probs = sess.run(y, feed_dict={x:grid})
probs = probs.reshape(xx.shape)
plt.scatter(X[:,0], X[:,1], c=np.squeeze(Y_c))
plt.contour(xx, yy, probs, levels=[.5])
plt.show()
if __name__=='__main__':
backward()
| 24.967742 | 72 | 0.700258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 161 | 0.099876 |
16d13aced6b20979dea691425018aa9f0ea80fb3 | 3,168 | py | Python | test/examples/integrated/codec/vip/vip_agent.py | rodrigomelo9/uvm-python | e3127eba2cc1519a61dc6f736d862a8dcd6fce20 | [
"Apache-2.0"
] | 140 | 2020-01-18T00:14:17.000Z | 2022-03-29T10:57:24.000Z | test/examples/integrated/codec/vip/vip_agent.py | Mohsannaeem/uvm-python | 1b8768a1358d133465ede9cadddae651664b1d53 | [
"Apache-2.0"
] | 24 | 2020-01-18T18:40:58.000Z | 2021-03-25T17:39:07.000Z | test/examples/integrated/codec/vip/vip_agent.py | Mohsannaeem/uvm-python | 1b8768a1358d133465ede9cadddae651664b1d53 | [
"Apache-2.0"
] | 34 | 2020-01-18T12:22:59.000Z | 2022-02-11T07:03:11.000Z | #//
#// -------------------------------------------------------------
#// Copyright 2011 Synopsys, Inc.
#// Copyright 2019-2020 Tuomas Poikela (tpoikela)
#// All Rights Reserved Worldwide
#//
#// Licensed under the Apache License, Version 2.0 (the
#// "License"); you may not use this file except in
#// compliance with the License. You may obtain a copy of
#// the License at
#//
#// http://www.apache.org/licenses/LICENSE-2.0
#//
#// Unless required by applicable law or agreed to in
#// writing, software distributed under the License is
#// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
#// CONDITIONS OF ANY KIND, either express or implied. See
#// the License for the specific language governing
#// permissions and limitations under the License.
#// -------------------------------------------------------------
#//
from uvm import *
from .vip_sequencer import vip_sequencer
from .vip_driver import vip_driver
from .vip_monitor import vip_monitor
class vip_agent(UVMAgent):
def __init__(self, name, parent=None):
super().__init__(name, parent)
self.hier_objection = False
def build_phase(self, phase):
self.sqr = vip_sequencer.type_id.create("sqr", self)
self.drv = vip_driver.type_id.create("drv", self)
self.tx_mon = vip_monitor.type_id.create("tx_mon", self)
self.rx_mon = vip_monitor.type_id.create("rx_mon", self)
self.rx_mon.hier_objection = self.hier_objection
self.tx_mon.hier_objection = self.hier_objection
self.drv.hier_objection = self.hier_objection
vif = []
if not UVMConfigDb.get(self, "", "vif", vif):
uvm_fatal("VIP/AGT/NOVIF", "No virtual interface specified for self agent instance")
self.vif = vif[0]
UVMConfigDb.set(self, "tx_mon", "vif", self.vif.tx_mon)
UVMConfigDb.set(self, "rx_mon", "vif", self.vif.rx)
def connect_phase(self, phase):
self.drv.seq_item_port.connect(self.sqr.seq_item_export)
async def pre_reset_phase(self, phase):
if self.hier_objection:
phase.raise_objection(self, "Resetting agent")
await self.reset_and_suspend()
if self.hier_objection:
print("vip_agent dropping objection")
phase.drop_objection(self)
async def reset_and_suspend(self):
#fork
await sv.fork_join([
cocotb.fork(self.drv.reset_and_suspend()),
cocotb.fork(self.tx_mon.reset_and_suspend()),
cocotb.fork(self.rx_mon.reset_and_suspend())
])
#join
self.sqr.stop_sequences()
async def suspend(self):
await sv.fork_join([
# fork
cocotb.fork(self.drv.suspend()),
cocotb.fork(self.tx_mon.suspend()),
cocotb.fork(self.rx_mon.suspend()),
])
# join
async def resume(self):
# fork
await sv.fork_join([
cocotb.fork(self.drv.resume()),
cocotb.fork(self.tx_mon.resume()),
cocotb.fork(self.rx_mon.resume()),
])
# join
uvm_component_utils(vip_agent)
| 33 | 96 | 0.606376 | 2,116 | 0.667929 | 0 | 0 | 0 | 0 | 1,069 | 0.337437 | 1,085 | 0.342487 |
16d1b5218231a945c48c3095503b717e135149a2 | 7,987 | py | Python | tests/test_transliterate.py | abosoar/camel_tools | 0a92c06f6dde0063e26df5cbe4d74c2f99b418e0 | [
"MIT"
] | 1 | 2021-03-23T12:50:47.000Z | 2021-03-23T12:50:47.000Z | tests/test_transliterate.py | KaoutharMokrane/camel_tools | e9099907835b05d448362bce2cb0e815ac7f5590 | [
"MIT"
] | null | null | null | tests/test_transliterate.py | KaoutharMokrane/camel_tools | e9099907835b05d448362bce2cb0e815ac7f5590 | [
"MIT"
] | 1 | 2021-01-24T05:06:33.000Z | 2021-01-24T05:06:33.000Z | # -*- coding: utf-8 -*-
# MIT License
#
# Copyright 2018-2020 New York University Abu Dhabi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Tests for camel_tools.transliterate.
"""
from __future__ import absolute_import
import pytest
from camel_tools.utils.charmap import CharMapper
from camel_tools.utils.transliterate import Transliterator
# A mapper that translates lower-case English characters to a lower-case x and
# upper-case English characters to an upper-case X. This makes it easy to
# predict what the transliteration should be.
TEST_MAP = {
u'A-Z': u'X',
u'a-z': u'x',
}
TEST_MAPPER = CharMapper(TEST_MAP, None)
class TestTransliteratorInit(object):
"""Test class for Transliterator.__init__.
"""
def test_init_none_mapper(self):
"""Test that init raises a TypeError when given a mapper that is None.
"""
with pytest.raises(TypeError):
Transliterator(None)
def test_init_invalid_type_mapper(self):
"""Test that init raises a TypeError when given a mapper that is not a
CharMapper instance.
"""
with pytest.raises(TypeError):
Transliterator({})
def test_init_valid_mapper(self):
"""Test that init doesn't raise an error when given a valid mapper.
"""
assert Transliterator(TEST_MAPPER)
def test_init_none_marker(self):
"""Test that init raises a TypeError when given a marker that is None.
"""
with pytest.raises(TypeError):
Transliterator(TEST_MAPPER, None)
def test_init_invalid_type_marker(self):
"""Test that init raises a TypeError when given a marker that is not a
string.
"""
with pytest.raises(TypeError):
Transliterator(TEST_MAPPER, [])
def test_init_empty_marker(self):
"""Test that init raises a ValueError when given a marker that is an
empty string.
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '')
def test_init_invalid_marker1(self):
"""Test that init raises a ValueError when given an invalid marker (
wgitespace in the middle).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '@@LAT @@')
def test_init_invalid_marker2(self):
"""Test that init raises a ValueError when given an invalid marker (
whitespace at the end).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '@@LAT@@ ')
def test_init_invalid_marker3(self):
"""Test that init raises a ValueError when given an invalid marker (
whitespace at the beginning).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, ' @@LAT@@')
def test_init_valid_marker1(self):
"""Test that init doesn't raise an error when given a valid marker.
"""
assert Transliterator(TEST_MAPPER, '@@LAT@@')
def test_init_valid_marker2(self):
"""Test that init doesn't raise an error when given a valid marker.
"""
assert Transliterator(TEST_MAPPER, u'@@LAT@@')
class TestTransliteratorTranslate(object):
"""Test class for Transliterator.translate.
"""
def test_trans_empty(self):
"""Test that transliterating an empty string returns an empty string.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'') == u''
def test_trans_single_no_markers(self):
"""Test that a single word with no markers gets transliterated.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'Hello') == u'Xxxxx'
def test_trans_single_with_markers(self):
"""Test that a single word with markers does not get transliterated.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello') == u'@@Hello'
def test_trans_single_strip(self):
"""Test that a single word with markers does not get transliterated
but markers do get stripped when strip_markers is set to True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', True) == u'Hello'
def test_trans_single_ignore(self):
"""Test that a single word with markers gets transliterated when ignore
markers is set to True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', False, True) == u'@@Xxxxx'
def test_trans_single_ignore_strip(self):
"""Test that a single word with markers gets transliterated with
markers stripped when both strip_markers and ignore_markers are set to
True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', True, True) == u'Xxxxx'
def test_trans_sent_no_markers(self):
"""Test that a sentence with no markers gets transliterated.
"""
sent_orig = u'Hello World, this is a sentence!'
sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig) == sent_out
def test_trans_sent_with_markers(self):
"""Test that tokens with markers in a sentence do not get
transliterated.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx @@World, xxxx xx x @@sentence!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig) == sent_out
def test_trans_sent_strip(self):
"""Test that tokens with markers in a sentence do not get
transliterated but markers do get stripped when strip_markers is set
to True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx World, xxxx xx x sentence!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, True) == sent_out
def test_trans_sent_ignore(self):
"""Test that tokens with markers in a sentence get transliterated
when ignore markers is set to True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx @@Xxxxx, xxxx xx x @@xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, False, True) == sent_out
def test_trans_sent_ignore_strip(self):
"""Test that tokens with markers in a sentence get transliterated with
markers stripped when both strip_markers and ignore_markers are set to
True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, True, True) == sent_out
| 33.700422 | 79 | 0.662076 | 6,328 | 0.792287 | 0 | 0 | 0 | 0 | 0 | 0 | 4,406 | 0.551646 |
16d2ceeba676dbb491a1206466347e8ee17c6418 | 2,485 | py | Python | source/code/build-instance-scheduler-template.py | liangruibupt/aws-instance-scheduler | a4e46eec9f39c2e3b95c5bcbe32c036e239d6066 | [
"Apache-2.0"
] | null | null | null | source/code/build-instance-scheduler-template.py | liangruibupt/aws-instance-scheduler | a4e46eec9f39c2e3b95c5bcbe32c036e239d6066 | [
"Apache-2.0"
] | null | null | null | source/code/build-instance-scheduler-template.py | liangruibupt/aws-instance-scheduler | a4e46eec9f39c2e3b95c5bcbe32c036e239d6066 | [
"Apache-2.0"
] | 1 | 2021-04-09T15:01:49.000Z | 2021-04-09T15:01:49.000Z | ######################################################################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
import json
import sys
from collections import OrderedDict
def get_versioned_template(template_filename, bucket, solution, version, region):
with open(template_filename, "rt") as f:
template_text = "".join(f.readlines())
template_text = template_text.replace("%bucket%", bucket)
template_text = template_text.replace("%solution%", solution)
template_text = template_text.replace("%version%", version)
if region == 'cn-north-1' or region == 'cn-northwest-1':
arn_prefix = "arn:aws-cn"
else:
arn_prefix = "arn:aws"
template_text = template_text.replace("%arn_prefix%", arn_prefix)
return json.loads(template_text, object_pairs_hook=OrderedDict)
def main(template_file, bucket, solution, version, region):
template = get_versioned_template(template_file, bucket, solution, version, region)
print(json.dumps(template, indent=4))
main(template_file=sys.argv[1], bucket=sys.argv[2], solution=sys.argv[3], version=sys.argv[4], region=sys.argv[5])
exit(0)
| 59.166667 | 118 | 0.464789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,518 | 0.610865 |
16d306bdfaed88804b418d267e2c9f7fdd6fab73 | 7,965 | py | Python | src/parse.py | StanfordAHA/Configuration | a5d404433d32b0ac20544d5bafa9422c979afc16 | [
"BSD-3-Clause"
] | null | null | null | src/parse.py | StanfordAHA/Configuration | a5d404433d32b0ac20544d5bafa9422c979afc16 | [
"BSD-3-Clause"
] | null | null | null | src/parse.py | StanfordAHA/Configuration | a5d404433d32b0ac20544d5bafa9422c979afc16 | [
"BSD-3-Clause"
] | null | null | null | ###############################################################################
# file -- parse.py --
# Top contributors (to current version):
# Nestan Tsiskaridze
# This file is part of the configuration finder for the Stanford AHA project.
# Copyright (c) 2021 by the authors listed in the file AUTHORS
# in the top-level source directory) and their institutional affiliations.
# All rights reserved. See the file LICENSE in the top-level source
# directory for licensing information.
#
# Handles parsing of all input files.
###############################################################################
import smt_switch as ss
import smt_switch.primops as po
import smt_switch.sortkinds as sk
import argparse
import pono as c
import sys
import re
import time
import copy
import io
#import timeit
class stream:
set0 = []
set1 = []
seq_in = []
seq_out = []
vars = {}
var_array_inds = {}
constr2terms = []
data_in_size = []
data_out_size = []
clk_name = None
rst_n_name = None
config_names = []
def read_stream(self, args, fout, agg_set, tb_set, sram_set, symbols, solver):
global dim_names
# open an annotation file
if args.annotation == None:
if agg_set:
annot_file = args.hwpath+"agg_lake_top_annotation.txt"
elif tb_set:
annot_file = args.hwpath+"tb_lake_top_annotation.txt"
elif sram_set:
annot_file = args.hwpath+"sram_lake_top_annotation.txt"
else:
annot_file = args.hwpath+"lake_top_annotation.txt"
else:
annot_file = args.annotation
cfo = open(annot_file, "r+")
clines = cfo.readlines()
# Close opend file
cfo.close()
# Collect the Set0, Set1, I/O sequence, and config_name variables as they appear in btor2
for cln in clines:
cln = cln.strip()
cln = cln.replace(',', '')
cvars = cln.split()
if 'var' == cvars[0]:
self.vars[cvars[1]] = cvars[3]
elif 'input' != cvars[0] and 'output' != cvars[0] and 'var' != cvars[0] and 'if' != cvars[0] and 'SOLVE' != cvars[1]:
self.constr2terms.append(cvars)
elif 'if' == cvars[0]:
self.constr2terms.append(cvars)
elif 'SOLVE' == cvars[1]:#specific bits are set only to be solved. Others can be anything, e.g. 0
signal = cvars[0]
if ':' in signal:
signal_name = signal[:signal.find('[')]
ind_start = signal[signal.find('[')+1:signal.find(':')]
symb_start = False
if ind_start in self.vars:
ind_start = int(self.vars[ind_start],0)
elif ind_start.isdigit():
ind_start = int(ind_start,0)
else: symb_start = True
ind_end = signal[signal.find(':')+1:signal.find(']')]
symb_end = False
if ind_end in self.vars:
ind_end = int(self.vars[ind_end],0)
elif ind_end.isdigit():
ind_end = int(ind_end,0)
else: #case of symbolic
symb_end = True
if not symb_start and not symb_end:
if signal[:signal.find('[')] not in self.var_array_inds:
self.var_array_inds[signal[:signal.find('[')]] = []
for i in range(ind_start,ind_end+1):
self.var_array_inds[signal_name].append(i)
else: #implement later when suport for universal quantifiers is added
self.constr2terms.append(cvars)
else:
if signal[:signal.find('[')] not in self.var_array_inds:
self.var_array_inds[signal[:signal.find('[')]] = []
self.var_array_inds[signal[:signal.find('[')]].append(signal[signal.find('[')+1:signal.find(']')])
elif 'SET' == cvars[-1][:-1]:
if len(cvars) == 6:
rem_dims = cvars[2]
dims = []
while (rem_dims != ''):
dims.append(int(rem_dims[1:rem_dims.find(':')],0))
rem_dims = rem_dims[rem_dims.find(']')+1:]
gen = [0]*len(dims)
j = len(dims)-1
while j >= 0:
if gen[j] <= dims[j]:
build_dims = cvars[-2]
for i in gen:
build_dims = build_dims + '['+str(i)+']'
if cvars[-1][-1:] == '0':
self.set0.append(build_dims)
else:
self.set1.append(build_dims)
while (j < len(dims)-1 and gen[j+1] == 0):
j += 1
else:
gen[j] = 0
j -= 1
gen[j] += 1
else:
if cvars[-1][-1:] == '0':
self.set0.append(cvars[-2])
else:
self.set1.append(cvars[-2])
elif 'SEQUENCE' == cvars[-1]:
if len(cvars) == 6:
rem_dims = cvars[2]
dims = []
while (rem_dims != ''):
dims.append(int(rem_dims[1:rem_dims.find(':')],0))
rem_dims = rem_dims[rem_dims.find(']')+1:]
if cvars[0] == 'input':
self.data_in_size = dims
else:
self.data_out_size = dims
assert len(self.data_in_size) <= 3
assert len(self.data_out_size) <= 3
gen = [0]*len(dims)
j = len(dims)-1
while j >= 0:
if gen[j] <= dims[j]:
build_dims = cvars[-2]
for i in gen:
build_dims = build_dims + '['+str(i)+']'
if cvars[0] == 'input':
self.seq_in.append(build_dims)
else:
self.seq_out.append(build_dims)
while (j < len(dims)-1 and gen[j+1] == 0):
j += 1
else:
gen[j] = 0
j -= 1
gen[j] += 1
else:
if cvars[0] == 'input':
self.seq_in.append(cvars[-2])
else:
self.seq_out.append(cvars[-2])
elif 'SOLVE' == cvars[-1] and ('input' == cvars[0] or 'output' == cvars[0]):
#if cvars[3] == 'strg_ub_pre_fetch_0_input_latency':
# continue
if len(cvars) == 6:
dim = int(cvars[2][1:cvars[2].find(':')],0)
for i in range(dim+1):
self.config_names.append(cvars[-2]+'['+str(i)+']')
else:
self.config_names.append(cvars[-2])
elif 'CLK' == cvars[-1]: self.clk_name = cvars[-2]
elif 'RSTN' == cvars[-1]: self.rst_n_name = cvars[-2]
else:
assert 'X' == cvars[-1]
| 40.431472 | 129 | 0.417075 | 7,134 | 0.895669 | 0 | 0 | 0 | 0 | 0 | 0 | 1,292 | 0.16221 |
16d35857ae1d82e14e5940b8e5331b8a6a44ca39 | 2,177 | py | Python | neyesem/main.py | omerfarukbaysal/neyesem | f69bf4446ce902f00389c8d71f68e1b7db05f86d | [
"MIT"
] | null | null | null | neyesem/main.py | omerfarukbaysal/neyesem | f69bf4446ce902f00389c8d71f68e1b7db05f86d | [
"MIT"
] | null | null | null | neyesem/main.py | omerfarukbaysal/neyesem | f69bf4446ce902f00389c8d71f68e1b7db05f86d | [
"MIT"
] | null | null | null | from flask import Blueprint, render_template, redirect, url_for, request, flash, make_response
from werkzeug.security import generate_password_hash
from flask_login import login_required, current_user
from . import db
import datetime
from .models import Visitor, User
main = Blueprint('main', __name__)
@main.route('/')
def index():
#control visitor with cookies
cookie = request.cookies.get('isvisited')
if cookie:
#visit=True
pass
else:
resp = make_response(render_template('index.html'))
resp.set_cookie('isvisited', 'yess')
return resp
visitor_ip = request.remote_addr
visited_time = datetime.datetime.now()
visitors = Visitor.query.all()
visited = Visitor.query.filter_by(ip=visitor_ip).first()
visit = False
if visited:
difference = abs((visited_time - visited.last_visit).seconds)
if difference > 60:
visit = True
visited.last_visit = visited_time
db.session.commit()
else:
new_visitor = Visitor(ip=visitor_ip,last_visit=visited_time)
db.session.add(new_visitor)
db.session.commit()
return render_template('index.html',visitors=visitors,visit=visit)
@main.route('/', methods=['POST'])
def index_post():
email = request.form.get('email')
name = request.form.get('name')
password = request.form.get('password')
user = User.query.filter_by(email=email).first() # if this returns a user, then the email already exists in database
if user: # if a user is found, we want to redirect back to signup page so user can try again
flash('Email address already exists')
return redirect(url_for('auth.signup'))
# create a new user with the form data. Hash the password so the plaintext version isn't saved.
new_user = User(email=email, name=name, password=generate_password_hash(password, method='sha256'))
# add the new user to the database
db.session.add(new_user)
db.session.commit()
return redirect(url_for('auth.login'))
@main.route('/profile')
@login_required
def profile():
return render_template('profile.html', name=current_user.name)
| 33.492308 | 120 | 0.692237 | 0 | 0 | 0 | 0 | 1,866 | 0.857143 | 0 | 0 | 499 | 0.229215 |
16d397fdfd404f351b1fb42cfa6cff5538a49320 | 790 | py | Python | 00-Aulas/Aula007_2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | 00-Aulas/Aula007_2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | 00-Aulas/Aula007_2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | # Funções
cabecalho = "SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n"
rodape = "\n\n\n Obrigada pela preferencia"
def imprimir_tela(conteudo):
print(cabecalho)
#print(opcao_menu)
print(conteudo)
print(rodape)
def ler_opcoes():
opcao = int(input("Insira a opção: "))
return opcao
def carregar_opcoes(opcao):
if opcao == 1:
imprimir_tela("A opção escolhida foi 'Cadastrar funcionário'")
elif opcao == 2:
imprimir_tela("A opção escolhida foi 'Listar funcionários'")
elif opcao == 3:
imprimir_tela("A opção escolhida foi 'Editar funcionário'")
elif opcao == 4:
imprimir_tela("A opção escolhida foi 'Deletar funcionário'")
elif opcao == 5:
imprimir_tela("A opção escolhida foi 'Sair'")
else:
pass | 27.241379 | 70 | 0.655696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 350 | 0.433168 |
16d47d0537155255ce27cd3c3479b098ca6ecf13 | 665 | py | Python | ast_version/src/binop.py | lucassa3/CCompiler | ad788f692dc2863da9111b4a42f54277ac29d5ae | [
"MIT"
] | 1 | 2020-04-29T21:30:11.000Z | 2020-04-29T21:30:11.000Z | ast_version/src/binop.py | lucassa3/CCompiler | ad788f692dc2863da9111b4a42f54277ac29d5ae | [
"MIT"
] | 10 | 2018-08-20T18:10:56.000Z | 2019-04-05T14:45:11.000Z | ast_version/src/binop.py | lucassa3/CCompiler | ad788f692dc2863da9111b4a42f54277ac29d5ae | [
"MIT"
] | null | null | null | from node import Node
class BinOp(Node):
def eval(self, st):
a = self.children[0].eval(st)
b = self.children[1].eval(st)
if self.value == "MINUS":
return a - b
elif self.value == "PLUS":
return a + b
elif self.value == "MULT":
return a * b
elif self.value == "DIV":
return a // b
elif self.value == "GREATER":
return a > b
elif self.value == "LESS":
return a < b
elif self.value == "GE":
return a >= b
elif self.value == "LE":
return a <= b
elif self.value == "EQUALS":
return a == b
elif self.value == "AND":
return a and b
elif self.value == "OR":
return a or b
| 19.558824 | 32 | 0.542857 | 628 | 0.944361 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.096241 |
16d53c81f0a6c59b031bb33f8b48778a56657258 | 7,180 | py | Python | aqt/installer.py | pylipp/aqtinstall | e08667cb5c9ced27994c4cde16d0c1b4a4386455 | [
"MIT"
] | null | null | null | aqt/installer.py | pylipp/aqtinstall | e08667cb5c9ced27994c4cde16d0c1b4a4386455 | [
"MIT"
] | null | null | null | aqt/installer.py | pylipp/aqtinstall | e08667cb5c9ced27994c4cde16d0c1b4a4386455 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (C) 2018 Linus Jahn <[email protected]>
# Copyright (C) 2019,2020 Hiroshi Miura <[email protected]>
# Copyright (C) 2020, Aurélien Gâteau
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import concurrent.futures
import os
import pathlib
import subprocess
import sys
import time
from logging import getLogger
import py7zr
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from aqt.archives import QtPackage
from aqt.helper import altlink, versiontuple
from aqt.qtpatch import Updater
from aqt.settings import Settings
class ExtractionError(Exception):
pass
class QtInstaller:
"""
Installer class to download packages and extract it.
"""
def __init__(self, qt_archives, logging=None, command=None, target_dir=None):
self.qt_archives = qt_archives
if logging:
self.logger = logging
else:
self.logger = getLogger('aqt')
self.command = command
if target_dir is None:
self.base_dir = os.getcwd()
else:
self.base_dir = target_dir
self.settings = Settings()
def retrieve_archive(self, package: QtPackage):
archive = package.archive
url = package.url
name = package.name
start_time = time.perf_counter()
self.logger.info("Downloading {}...".format(name))
self.logger.debug("Download URL: {}".format(url))
session = requests.Session()
retry = Retry(connect=5, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
try:
r = session.get(url, allow_redirects=False, stream=True)
if r.status_code == 302:
newurl = altlink(r.url, r.headers['Location'], logger=self.logger)
self.logger.info('Redirected URL: {}'.format(newurl))
r = session.get(newurl, stream=True)
except requests.exceptions.ConnectionError as e:
self.logger.error("Connection error: %s" % e.args)
raise e
else:
try:
with open(archive, 'wb') as fd:
for chunk in r.iter_content(chunk_size=8196):
fd.write(chunk)
fd.flush()
if self.command is None:
with open(archive, 'rb') as fd:
self.extract_archive(fd)
except Exception as e:
exc = sys.exc_info()
self.logger.error("Download error: %s" % exc[1])
raise e
else:
if self.command is not None:
self.extract_archive_ext(archive)
os.unlink(archive)
self.logger.info("Finish installation of {} in {}".format(archive, time.perf_counter() - start_time))
def extract_archive(self, archive):
szf = py7zr.SevenZipFile(archive)
szf.extractall(path=self.base_dir)
szf.close()
def extract_archive_ext(self, archive):
if self.base_dir is not None:
command_args = [self.command, 'x', '-aoa', '-bd', '-y', '-o{}'.format(self.base_dir), archive]
else:
command_args = [self.command, 'x', '-aoa', '-bd', '-y', archive]
try:
proc = subprocess.run(command_args, stdout=subprocess.PIPE, check=True)
self.logger.debug(proc.stdout)
except subprocess.CalledProcessError as cpe:
self.logger.error("Extraction error: %d" % cpe.returncode)
if cpe.stdout is not None:
self.logger.error(cpe.stdout)
if cpe.stderr is not None:
self.logger.error(cpe.stderr)
raise cpe
def get_arch_dir(self, arch):
if arch.startswith('win64_mingw'):
arch_dir = arch[6:] + '_64'
elif arch.startswith('win32_mingw'):
arch_dir = arch[6:] + '_32'
elif arch.startswith('win'):
arch_dir = arch[6:]
else:
arch_dir = arch
return arch_dir
def make_conf_files(self, qt_version, arch):
"""Make Qt configuration files, qt.conf and qtconfig.pri"""
arch_dir = self.get_arch_dir(arch)
try:
# prepare qt.conf
with open(os.path.join(self.base_dir, qt_version, arch_dir, 'bin', 'qt.conf'), 'w') as f:
f.write("[Paths]\n")
f.write("Prefix=..\n")
# update qtconfig.pri only as OpenSource
with open(os.path.join(self.base_dir, qt_version, arch_dir, 'mkspecs', 'qconfig.pri'), 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
for line in lines:
if line.startswith('QT_EDITION ='):
line = 'QT_EDITION = OpenSource\n'
if line.startswith('QT_LICHECK ='):
line = 'QT_LICHECK =\n'
f.write(line)
except IOError as e:
self.logger.error("Configuration file generation error: %s\n", e.args, exc_info=True)
raise e
def install(self):
with concurrent.futures.ThreadPoolExecutor(self.settings.concurrency) as executor:
futures = [executor.submit(self.retrieve_archive, ar) for ar in self.qt_archives.get_archives()]
done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_EXCEPTION)
if len(not_done) > 0:
self.logger.error("Installation error detected.")
exit(1)
try:
for feature in done:
feature.result()
except Exception:
exit(1)
def finalize(self):
target = self.qt_archives.get_target_config()
self.make_conf_files(target.version, target.arch)
prefix = pathlib.Path(self.base_dir) / target.version / target.arch
updater = Updater(prefix, self.logger)
if versiontuple(target.version) < (5, 14, 2):
updater.patch_qt(target)
| 40.111732 | 110 | 0.608635 | 5,569 | 0.775411 | 0 | 0 | 0 | 0 | 0 | 0 | 1,880 | 0.261766 |
16d55202daea41a875b382f2393a76063d29376b | 4,865 | py | Python | lib/django-0.96/django/views/generic/list_detail.py | MiCHiLU/google_appengine_sdk | 3da9f20d7e65e26c4938d2c4054bc4f39cbc5522 | [
"Apache-2.0"
] | 790 | 2015-01-03T02:13:39.000Z | 2020-05-10T19:53:57.000Z | AppServer/lib/django-0.96/django/views/generic/list_detail.py | nlake44/appscale | 6944af660ca4cb772c9b6c2332ab28e5ef4d849f | [
"Apache-2.0"
] | 1,361 | 2015-01-08T23:09:40.000Z | 2020-04-14T00:03:04.000Z | AppServer/lib/django-0.96/django/views/generic/list_detail.py | nlake44/appscale | 6944af660ca4cb772c9b6c2332ab28e5ef4d849f | [
"Apache-2.0"
] | 155 | 2015-01-08T22:59:31.000Z | 2020-04-08T08:01:53.000Z | from django.template import loader, RequestContext
from django.http import Http404, HttpResponse
from django.core.xheaders import populate_xheaders
from django.core.paginator import ObjectPaginator, InvalidPage
from django.core.exceptions import ObjectDoesNotExist
def object_list(request, queryset, paginate_by=None, page=None,
allow_empty=False, template_name=None, template_loader=loader,
extra_context=None, context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic list of objects.
Templates: ``<app_label>/<model_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
"""
if extra_context is None: extra_context = {}
queryset = queryset._clone()
if paginate_by:
paginator = ObjectPaginator(queryset, paginate_by)
if not page:
page = request.GET.get('page', 1)
try:
page = int(page)
object_list = paginator.get_page(page - 1)
except (InvalidPage, ValueError):
if page == 1 and allow_empty:
object_list = []
else:
raise Http404
c = RequestContext(request, {
'%s_list' % template_object_name: object_list,
'is_paginated': paginator.pages > 1,
'results_per_page': paginate_by,
'has_next': paginator.has_next_page(page - 1),
'has_previous': paginator.has_previous_page(page - 1),
'page': page,
'next': page + 1,
'previous': page - 1,
'last_on_page': paginator.last_on_page(page - 1),
'first_on_page': paginator.first_on_page(page - 1),
'pages': paginator.pages,
'hits' : paginator.hits,
}, context_processors)
else:
c = RequestContext(request, {
'%s_list' % template_object_name: queryset,
'is_paginated': False
}, context_processors)
if not allow_empty and len(queryset) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
model = queryset.model
template_name = "%s/%s_list.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
def object_detail(request, queryset, object_id=None, slug=None,
slug_field=None, template_name=None, template_name_field=None,
template_loader=loader, extra_context=None,
context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic detail of an object.
Templates: ``<app_label>/<model_name>_detail.html``
Context:
object
the object
"""
if extra_context is None: extra_context = {}
model = queryset.model
if object_id:
queryset = queryset.filter(pk=object_id)
elif slug and slug_field:
queryset = queryset.filter(**{slug_field: slug})
else:
raise AttributeError, "Generic detail view must be called with either an object_id or a slug/slug_field."
try:
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404, "No %s found matching the query" % (model._meta.verbose_name)
if not template_name:
template_name = "%s/%s_detail.html" % (model._meta.app_label, model._meta.object_name.lower())
if template_name_field:
template_name_list = [getattr(obj, template_name_field), template_name]
t = template_loader.select_template(template_name_list)
else:
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
response = HttpResponse(t.render(c), mimetype=mimetype)
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.name))
return response
| 36.856061 | 113 | 0.623227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,336 | 0.274615 |
16d68949a023a20451569c4bd42476cab180bd99 | 5,398 | py | Python | pax/_src/core/utility_modules.py | NTT123/pax | b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335 | [
"MIT"
] | 11 | 2021-08-28T17:45:38.000Z | 2022-01-26T17:50:03.000Z | pax/_src/core/utility_modules.py | NTT123/pax | b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335 | [
"MIT"
] | 1 | 2021-09-13T17:29:33.000Z | 2021-09-13T21:50:34.000Z | pax/_src/core/utility_modules.py | NTT123/pax | b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335 | [
"MIT"
] | null | null | null | """Utility Modules."""
from typing import Any, Callable, Dict, List, Optional, Sequence, TypeVar, Union
import jax
import jax.numpy as jnp
from .module import Module, parameters_method
T = TypeVar("T", bound=Module)
O = TypeVar("O")
class ParameterModule(Module):
"""A PAX module that registers attributes as parameters by default."""
def parameters(self):
return self.apply_submodules(lambda x: x.parameters())
class StateModule(Module):
"""A PAX module that registers attributes as states by default."""
parameters = parameters_method()
class LazyModule(Module):
"""A lazy module is a module that only creates submodules when needed.
Example:
>>> from dataclasses import dataclass
>>> @dataclass
... class MLP(pax.experimental.LazyModule):
... features: list
...
... def __call__(self, x):
... sizes = zip(self.features[:-1], self.features[1:])
... for i, (in_dim, out_dim) in enumerate(sizes):
... fc = self.get_or_create(f"fc_{i}", lambda: pax.Linear(in_dim, out_dim))
... x = jax.nn.relu(fc(x))
... return x
...
...
>>> mlp, _ = MLP([1, 2, 3]) % jnp.ones((1, 1))
>>> print(mlp.summary())
MLP(features=[1, 2, 3])
├── Linear(in_dim=1, out_dim=2, with_bias=True)
└── Linear(in_dim=2, out_dim=3, with_bias=True)
"""
def get_or_create(self, name, create_fn: Callable[[], T]) -> T:
"""Create and register a new attribute when it is not exist.
Return the attribute.
"""
if hasattr(self, name):
value = getattr(self, name)
else:
assert callable(create_fn), "Expect a callable function"
value = create_fn()
setattr(self, name, value)
return value
class Lambda(Module):
"""Convert a function to a module.
Example:
>>> net = pax.Lambda(jax.nn.relu)
>>> print(net.summary())
x => relu(x)
>>> y = net(jnp.array(-1))
>>> y
DeviceArray(0, dtype=int32, weak_type=True)
"""
func: Callable
def __init__(self, func: Callable, name: Optional[str] = None):
super().__init__(name=name)
self.func = func
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def __repr__(self) -> str:
if self.name is not None:
return super().__repr__()
else:
return f"{self.__class__.__qualname__}({self.func.__name__})"
def summary(self, return_list: bool = False) -> Union[str, List[str]]:
if self.name is not None:
name = self.name
elif isinstance(self.func, jax.custom_jvp) and hasattr(self.func, "fun"):
if hasattr(self.func.fun, "__name__"):
name = self.func.fun.__name__
else:
name = f"{self.func.fun}"
elif hasattr(self.func, "__name__"):
name = self.func.__name__
else:
name = f"{self.func}"
output = f"x => {name}(x)"
return [output] if return_list else output
class Flattener(Module):
"""Flatten PAX modules for better performance.
Example:
>>> net = pax.Linear(3, 3)
>>> opt = opax.adam(1e-3)(net.parameters())
>>> flat_mods = pax.experimental.Flattener(model=net, optimizer=opt)
>>> net, opt = flat_mods.model, flat_mods.optimizer
>>> print(net.summary())
Linear(in_dim=3, out_dim=3, with_bias=True)
>>> print(opt.summary())
chain.<locals>.Chain
├── scale_by_adam.<locals>.ScaleByAdam
│ ├── Linear(in_dim=3, out_dim=3, with_bias=True)
│ └── Linear(in_dim=3, out_dim=3, with_bias=True)
└── scale.<locals>.Scale
"""
treedef_dict: Dict[str, Any]
leaves_dict: Dict[str, Sequence[jnp.ndarray]]
def __init__(self, **kwargs):
"""Create a new flattener."""
super().__init__()
self.treedef_dict = {}
self.leaves_dict = {}
for name, value in kwargs.items():
leaves, treedef = jax.tree_flatten(value)
self.treedef_dict[name] = treedef
self.leaves_dict[name] = leaves
def __getattr__(self, name: str) -> Any:
if name in self.treedef_dict:
treedef = self.treedef_dict[name]
leaves = self.leaves_dict[name]
value = jax.tree_unflatten(treedef, leaves)
return value
else:
raise AttributeError()
def update(self: T, **kwargs) -> T:
"""Update the flattener.
Example:
>>> net = pax.Linear(3, 3)
>>> flats = pax.experimental.Flattener(net=net)
>>> flats = flats.update(net=pax.Linear(4, 4))
>>> print(flats.net.summary())
Linear(in_dim=4, out_dim=4, with_bias=True)
"""
new_self = self.copy()
for name, value in kwargs.items():
leaves, treedef = jax.tree_flatten(value)
new_self.treedef_dict[name] = treedef
new_self.leaves_dict[name] = leaves
return new_self
def parameters(self: T) -> T:
"""Raise an error.
Need to reconstruct the original module before getting parameters.
"""
raise ValueError(
"A flattener only stores ndarray leaves as non-trainable states.\n"
"Reconstruct the original module before getting parameters."
)
| 29.659341 | 91 | 0.582994 | 5,185 | 0.953476 | 0 | 0 | 0 | 0 | 0 | 0 | 2,629 | 0.48345 |
16d79dca474781cfacdcca9ed1544b5e9e33234c | 2,612 | py | Python | src/richie/apps/courses/lms/edx.py | kernicPanel/richie | 803deda3e29383ce85593e1836a3cf4efc6b847e | [
"MIT"
] | null | null | null | src/richie/apps/courses/lms/edx.py | kernicPanel/richie | 803deda3e29383ce85593e1836a3cf4efc6b847e | [
"MIT"
] | null | null | null | src/richie/apps/courses/lms/edx.py | kernicPanel/richie | 803deda3e29383ce85593e1836a3cf4efc6b847e | [
"MIT"
] | null | null | null | """
Backend to connect Open edX richie with an LMS
"""
import logging
import re
import requests
from requests.auth import AuthBase
from ..serializers import SyncCourseRunSerializer
from .base import BaseLMSBackend
logger = logging.getLogger(__name__)
def split_course_key(key):
"""Split an OpenEdX course key by organization, course and course run codes.
We first try splitting the key as a version 1 key (course-v1:org+course+run)
and fallback the old version (org/course/run).
"""
if key.startswith("course-v1:"):
organization, course, run = key[10:].split("+")
else:
organization, course, run = key.split("/")
return organization, course, run
class EdXTokenAuth(AuthBase):
"""Attach HTTP token authentication to the given Request object."""
def __init__(self, token):
"""Set-up token value in the instance."""
self.token = token
def __call__(self, request):
"""Modify and return the request."""
request.headers.update(
{"X-Edx-Api-Key": self.token, "Content-Type": "application/json"}
)
return request
class TokenAPIClient(requests.Session):
"""
A :class:`requests.Session` that automatically authenticates against edX's preferred
authentication method up to Dogwood, given a secret token.
For more usage details, see documentation of the :class:`requests.Session` object:
https://requests.readthedocs.io/en/master/user/advanced/#session-objects
"""
def __init__(self, token, *args, **kwargs):
"""Extending the session object by setting the authentication token."""
super().__init__(*args, **kwargs)
self.auth = EdXTokenAuth(token)
class EdXLMSBackend(BaseLMSBackend):
"""LMS backend for Richie tested with Open EdX Dogwood to Hawthorn."""
@property
def api_client(self):
"""Instantiate and return an edx token API client."""
return TokenAPIClient(self.configuration["API_TOKEN"])
def extract_course_id(self, url):
"""Extract the LMS course id from the course run url."""
return re.match(self.configuration["COURSE_REGEX"], url).group("course_id")
def extract_course_number(self, data):
"""Extract the LMS course number from data dictionary."""
course_id = self.extract_course_id(data.get("resource_link"))
return split_course_key(course_id)[1]
@staticmethod
def get_course_run_serializer(data, partial=False):
"""Prepare data and return a bound serializer."""
return SyncCourseRunSerializer(data=data, partial=partial)
| 32.246914 | 88 | 0.68683 | 1,906 | 0.729709 | 0 | 0 | 354 | 0.135528 | 0 | 0 | 1,215 | 0.465161 |
16d7b7c1c6e2def8cf0c9ec10f6916a0a8cf367f | 4,106 | py | Python | BitTorrent-5.2.2/BTL/brpclib.py | jpabb7/p2pScrapper | 0fd57049606864223eb45f956a58adda1231af88 | [
"MIT"
] | 4 | 2016-04-26T03:43:54.000Z | 2016-11-17T08:09:04.000Z | BitTorrent-5.2.2/BTL/brpclib.py | jpabb7/p2pScrapper | 0fd57049606864223eb45f956a58adda1231af88 | [
"MIT"
] | 17 | 2015-01-05T21:06:22.000Z | 2015-12-07T20:45:44.000Z | BitTorrent-5.2.2/BTL/brpclib.py | jpabb7/p2pScrapper | 0fd57049606864223eb45f956a58adda1231af88 | [
"MIT"
] | 7 | 2015-07-28T09:17:17.000Z | 2021-11-07T02:29:41.000Z | # by Greg Hazel
import xmlrpclib
from xmlrpclib2 import *
from BTL import brpc
old_PyCurlTransport = PyCurlTransport
class PyCurlTransport(old_PyCurlTransport):
def set_connection_params(self, h):
h.add_header('User-Agent', "brpclib.py/1.0")
h.add_header('Connection', "Keep-Alive")
h.add_header('Content-Type', "application/octet-stream")
def _parse_response(self, response):
# read response from input file/socket, and parse it
return brpc.loads(response.getvalue())[0]
# --------------------------------------------------------------------
# request dispatcher
class _Method:
# some magic to bind an B-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args, **kwargs):
args = (args, kwargs)
return self.__send(self.__name, args)
# ARG! prevent repr(_Method()) from submiting an RPC call!
def __repr__(self):
return "<%s instance at 0x%08X>" % (self.__class__, id(self))
# Double underscore is BAD!
class BRPC_ServerProxy(xmlrpclib.ServerProxy):
"""uri [,options] -> a logical connection to an B-RPC server
uri is the connection point on the server, given as
scheme://host/target.
The standard implementation always supports the "http" scheme. If
SSL socket support is available (Python 2.0), it also supports
"https".
If the target part and the slash preceding it are both omitted,
"/RPC2" is assumed.
The following options can be given as keyword arguments:
transport: a transport factory
encoding: the request encoding (default is UTF-8)
All 8-bit strings passed to the server proxy are assumed to use
the given encoding.
"""
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0):
# establish a "logical" server connection
# get the url
import urllib
type, uri = urllib.splittype(uri)
if type not in ("http", "https"):
raise IOError, "unsupported B-RPC protocol"
self.__host, self.__handler = urllib.splithost(uri)
if not self.__handler:
self.__handler = "/RPC2"
if transport is None:
if type == "https":
transport = xmlrpclib.SafeTransport()
else:
transport = xmlrpclib.Transport()
self.__transport = transport
self.__encoding = encoding
self.__verbose = verbose
self.__allow_none = allow_none
def __request(self, methodname, params):
# call a method on the remote server
request = brpc.dumps(params, methodname, encoding=self.__encoding,
allow_none=self.__allow_none)
response = self.__transport.request(
self.__host,
self.__handler,
request,
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<ServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self.__request, name)
def new_server_proxy(url):
c = cache_set.get_cache(PyCURL_Cache, url)
t = PyCurlTransport(c)
return BRPC_ServerProxy(url, transport=t)
ServerProxy = new_server_proxy
if __name__ == '__main__':
s = ServerProxy('https://greg.mitte.bittorrent.com:7080/')
def ping(*a, **kw):
(a2, kw2) = s.ping(*a, **kw)
assert a2 == list(a), '%s list is not %s' % (r, list(a))
assert kw2 == dict(kw), '%s dict is not %s' % (kw2, dict(kw))
ping(0, 1, 1, name="potato")
ping(0, 1, 1, name="anime")
ping("phish", 0, 1, 1)
ping("games", 0, 1, 1)
| 30.641791 | 74 | 0.605212 | 3,264 | 0.794934 | 0 | 0 | 0 | 0 | 0 | 0 | 1,437 | 0.349976 |
16d7e9187801937282012d38f8b28fd55938bd25 | 1,207 | py | Python | database_files/views.py | den-gts/django-database-files-3000 | 0a135004427c021944b30ef8aace844ab20b9cfb | [
"BSD-3-Clause"
] | 8 | 2016-12-11T02:24:21.000Z | 2020-08-07T10:02:32.000Z | database_files/views.py | den-gts/django-database-files-3000 | 0a135004427c021944b30ef8aace844ab20b9cfb | [
"BSD-3-Clause"
] | 41 | 2015-08-11T16:57:21.000Z | 2022-01-18T19:19:41.000Z | database_files/views.py | den-gts/django-database-files-3000 | 0a135004427c021944b30ef8aace844ab20b9cfb | [
"BSD-3-Clause"
] | 7 | 2015-08-02T05:32:41.000Z | 2019-06-17T11:53:14.000Z | import base64
import mimetypes
import os
from django.conf import settings
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import cache_control
from django.views.static import serve as django_serve
from database_files.models import File
@cache_control(max_age=86400)
def serve(request, name):
"""
Retrieves the file from the database.
"""
f = get_object_or_404(File, name=name)
f.dump()
mimetype = mimetypes.guess_type(name)[0] or 'application/octet-stream'
response = HttpResponse(f.content, content_type=mimetype)
response['Content-Length'] = f.size
return response
def serve_mixed(request, *args, **kwargs):
"""
First attempts to serve the file from the filesystem,
then tries the database.
"""
name = kwargs.get('name') or kwargs.get('path')
document_root = kwargs.get('document_root')
document_root = document_root or settings.MEDIA_ROOT
try:
# First attempt to serve from filesystem.
return django_serve(request, name, document_root)
except Http404:
# Then try serving from database.
return serve(request, name)
| 29.439024 | 74 | 0.724109 | 0 | 0 | 0 | 0 | 366 | 0.303231 | 0 | 0 | 294 | 0.243579 |
16d80d08df5b20660db28d091611ed67b6dfa076 | 2,026 | py | Python | NoiseFiltersPy/Injector.py | TVect/NoiseFiltersPy | fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5 | [
"MIT"
] | 6 | 2019-11-20T19:32:41.000Z | 2021-06-25T19:47:26.000Z | NoiseFiltersPy/Injector.py | TVect/NoiseFiltersPy | fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5 | [
"MIT"
] | null | null | null | NoiseFiltersPy/Injector.py | TVect/NoiseFiltersPy | fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5 | [
"MIT"
] | 1 | 2021-06-25T19:47:34.000Z | 2021-06-25T19:47:34.000Z | import numpy as np
import pandas as pd
from abc import ABC
class Injector(ABC):
"""Base class for the injectors of artificial noise.
Attributes
----------
rem_indx : :obj:`List`
Removed indexes (rows) from the dataset after the filtering.
parameters : :obj:`Dict`
Parameters used to define the behaviour of the filter.
clean_data : :obj:`Sequence`
Filtered independent attributes(X) of the dataset.
clean_classes : :obj:`Sequence`
Filtered target attributes(y) of the dataset.
"""
def __init__(self, attributes, labels, rate: float = 0.1) -> None:
self._new_noise = []
if not isinstance(attributes, pd.DataFrame):
self._attrs = pd.DataFrame(attributes)
else:
self._attrs = attributes
if not isinstance(labels, pd.DataFrame):
self._labels = pd.DataFrame(labels)
else:
self._labels = labels
self._rate = rate
self.verify()
self._num_noise = int(self._rate * self._attrs.shape[0])
self._label_types = set(self.labels[0].unique())
@property
def labels(self):
return self._labels
@property
def noise_indx(self):
return self._new_noise
def verify(self) -> None:
if min(self._labels.value_counts()) < 2:
raise ValueError("Number of examples in the minority class must be >= 2.")
if self._attrs.shape[0] != self.labels.shape[0]:
raise ValueError("Attributes and classes must have the sime size.")
if self._rate < 0 or self._rate > 1:
raise ValueError("")
def _gen_random(self, seed: int = None):
"""[summary]
Args:
seed (int, optional): [description]. Defaults to 123.
"""
rng = np.random.default_rng(seed)
for example in self._new_noise:
self._labels.iloc[example] = rng.choice(list(self._label_types - set(self._labels.iloc[example])))
| 30.238806 | 110 | 0.600197 | 1,959 | 0.96693 | 0 | 0 | 125 | 0.061698 | 0 | 0 | 675 | 0.333169 |
16d81711460bcdde5df04988352f117c180dbed8 | 19,516 | py | Python | application/mod_user/forms.py | hackBCA/hackbcafour | 971120ff88423cc660f92985790cddf9939838bf | [
"MIT"
] | 2 | 2016-11-13T21:32:51.000Z | 2017-03-22T02:50:26.000Z | application/mod_user/forms.py | hackBCA/hackbcafour | 971120ff88423cc660f92985790cddf9939838bf | [
"MIT"
] | 1 | 2021-02-08T20:18:59.000Z | 2021-02-08T20:18:59.000Z | application/mod_user/forms.py | hackBCA/hackbcafour | 971120ff88423cc660f92985790cddf9939838bf | [
"MIT"
] | null | null | null | from wtforms import Form, TextField, PasswordField, SelectField, TextAreaField, BooleanField, validators, ValidationError, RadioField
import re
phone_regex = "(\+\d+-?)?((\(?\d{3}\)?)|(\d{3}))-?\d{3}-?\d{4}$"
gender_choices = [
("", "Gender"),
("male", "Male"),
("female", "Female"),
("other", "Other"),
("rns", "Rather Not Say")
]
beginner_choices = [
("", "Are you a beginner?"),
("yes", "Yes"),
("no", "No")
]
ethnicity_choices = [
("", "Ethnicity"),
("white", "White"),
("african_american", "African American"),
("asian_pacific", "Asian or Pacific Islander"),
("american_indian_alaskan_native", "American Indian or Alaskan Native"),
("multiracial", "Multiracial"),
("hispanic", "Hispanic origin"),
("other", "Other"),
("rns", "Rather Not Say")
]
num_hackathons_choices = [
("", "How many hackathons have you been to?"),
("0", "0"),
("1", "1"),
("2", "2"),
("3", "3"),
("4", "4"),
("5", "5+")
]
num_hackathons_choices_mentor = [
("", "How many hackathons have you mentored at?"),
("0", "0"),
("1", "1"),
("2", "2"),
("3", "3"),
("4", "4"),
("5", "5+")
]
grade_choices = [
("", "What grade are you in?"),
("9", "9th"),
("10", "10th"),
("11", "11th"),
("12", "12th")
]
shirt_sizes = [
("", "What is your shirt size?"),
("XS", "Extra Small"),
("S", "Small"),
("M", "Medium"),
("L", "Large"),
("XL", "Extra Large")
]
type_account_choices = [
("hacker", "Hacker"),
("mentor", "Mentor")
]
free_response1_prompt = "Why do you want to come to hackBCA?"
free_response1_prompt_mentor = "Please list languages/frameworks/technologies that you would like to mentor students in."
free_response2_prompt_mentor = "Would you like to run a workshop? If so, please briefly describe your ideas."
class HackerRegistrationForm(Form):
email = TextField("Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address.")
], render_kw={"class": 'text'}, description = "Email")
first_name = TextField("First Name", [
validators.Required(message = "You must enter a first name.")
], render_kw={"class": 'text'}, description = "First Name")
last_name = TextField("Last Name", [
validators.Required(message = "You must enter a last name.")
], render_kw={"class": 'text'}, description = "Last Name")
school = TextField("School Name", [
validators.Required(message = "Enter your school's name.")
], render_kw={"class": 'text'}, description = "School Name")
gender = SelectField("Gender", [validators.Required(message = "You must select an option.")], choices = gender_choices, render_kw={"class": 'text'}, description = "Gender")
beginner = SelectField("Are you a beginner?", [validators.Required(message = "You must select an option.")], choices = beginner_choices, render_kw={"class": 'text'}, description = "Are you a beginner?")
ethnicity = SelectField("Ethnicity", [validators.Required(message = "You must select an option.")], choices = ethnicity_choices, render_kw={"class": 'text'}, description = "Ethnicity")
grade = SelectField("Grade", [validators.Required(message = "You must select an option.")], choices = grade_choices, render_kw={"class": 'text'}, description = "Grade")
age = TextField("Age", [
validators.Required(message = "Enter your age")
], render_kw={"class": 'text'}, description = "Age")
num_hackathons = SelectField("How many hackathons have you attended?", [validators.Required(message = "You must select an option.")], choices = num_hackathons_choices, render_kw={"class": 'text'}, description = "How many hackathons have you attended?")
free_response1 = TextAreaField(free_response1_prompt, [
validators.Required(message = "You must answer this question."),
validators.Length(max = 1500, message = "Response must be less than 1500 characters long.")
], render_kw={"class": 'text'}, description = "1500 characters maximum.")
link1 = TextField("Link #1", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Link #1 (Optional)")
link2 = TextField("Link #2", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Link #2 (Optional)")
link3 = TextField("Link #3", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Link #3 (Optional)")
password = PasswordField("Password", [
validators.Required(message = "You must enter a password."),
validators.Length(min = 8, message = "Password must be at least 8 characters.")
], render_kw={"class": 'text'}, description = "Password")
confirm_password = PasswordField("Confirm Password", render_kw={"class": 'text'}, description = "Confirm Password")
mlh_coc = BooleanField("I agree", [
validators.Required(message = "Please read and agree to the MLH Code of Conduct.")
], description = "I have read & agree to the MLH Code of Conduct.", default = False)
mlh_terms = BooleanField("I agree", [
validators.Required(message = "Please read and agree to the MLH Terms and Conditions.")
], description = "I agree to the MLH Contest Terms and Conditions and the MLH Privacy Policy.", default = False)
def validate_confirm_password(form, field):
password = form['password'].data
if len(password) >= 8 and password != field.data:
raise ValidationError("Passwords must match.")
def validate(self): #Man I love validators.URL
links = ["link1", "link2", "link3"]
originalValues = {}
for link in links: #Temporarily prefix all links with http:// if they are missing it
attr = getattr(self, link)
val = attr.data
originalValues[link] = val
if re.match("^(http|https)://", val) is None:
val = "http://" + val
attr.data = val
setattr(self, link, attr)
rv = Form.validate(self)
for link in links: #Revert link values back to actual values
attr = getattr(self, link)
attr.data = originalValues[link]
setattr(self, link, attr)
if not rv:
return False
return True
def validate_other_gender(form, field):
if form['gender'].data == 'other' and field.data == "":
raise ValidationError("Enter your gender.")
class MentorRegistrationForm(Form):
email = TextField("Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address.")
], render_kw={"class": 'text'}, description = "Email")
first_name = TextField("First Name", [
validators.Required(message = "You must enter a first name.")
], render_kw={"class": 'text'}, description = "First Name")
last_name = TextField("Last Name", [
validators.Required(message = "You must enter a last name.")
], render_kw={"class": 'text'}, description = "Last Name")
school = TextField("Company/School Name", [
validators.Required(message = "Enter your company/schools's name.")
], render_kw={"class": 'text'}, description = "Company/School Name")
phone = TextField("Phone Number", [
validators.Required(message = "Enter your preferred contact number."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "Phone Number")
num_hackathons = SelectField("How many hackathons have you mentored at?", [validators.Required(message = "You must select an option.")], choices = num_hackathons_choices_mentor, render_kw={"class": 'text'}, description = "How many hackathons have you mentored at?")
mentor_free_response1 = TextAreaField(free_response1_prompt_mentor, [
validators.Length(max = 1500, message = "Response must be less than 1500 characters long.")
], render_kw={"class": 'text'}, description = "1500 characters maximum.")
mentor_free_response2 = TextAreaField(free_response2_prompt_mentor, [
validators.Length(max = 1500, message = "Response must be less than 1500 characters long.")
], render_kw={"class": 'text'}, description = "1500 characters maximum.")
github_link = TextField("Github Link", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Github Link (Optional)")
linkedin_link = TextField("LinkedIn", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "LinkedIn Link (Optional)")
site_link = TextField("Personal Site", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Personal Site Link (Optional)")
other_link = TextField("other", [
validators.optional(),
validators.URL(message = "Invalid URL.")
], render_kw={"class": 'text'}, description = "Other Link (Optional)")
password = PasswordField("Password", [
validators.Required(message = "You must enter a password."),
validators.Length(min = 8, message = "Password must be at least 8 characters.")
], render_kw={"class": 'text'}, description = "Password")
confirm_password = PasswordField("Confirm Password", render_kw={"class": 'text'}, description = "Confirm Password")
mlh_coc = BooleanField("I agree", [
validators.Required(message = "Please read and agree to the MLH Code of Conduct.")
], description = "I have read & agree to the MLH Code of Conduct.", default = False)
mlh_terms = BooleanField("I agree", [
validators.Required(message = "Please read and agree to the MLH Terms and Conditions.")
], description = "I agree to the MLH Contest Terms and Conditions and the MLH Privacy Policy.", default = False)
def validate(self):
links = ["github_link", "linkedin_link", "site_link", "other_link"]
originalValues = {}
for link in links: #Temporarily prefix all links with http:// if they are missing it
attr = getattr(self, link)
val = attr.data
originalValues[link] = val
if re.match("^(http|https)://", val) is None:
val = "http://" + val
attr.data = val
setattr(self, link, attr)
rv = Form.validate(self)
for link in links: #Revert link values back to actual values
attr = getattr(self, link)
attr.data = originalValues[link]
setattr(self, link, attr)
if not rv:
return False
return True
class LoginForm(Form):
email = TextField("Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'},description = "Email")
password = PasswordField("Password", [], render_kw={"class": 'text'}, description = "Password")
class EmailForm(Form):
email = TextField("Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'}, description = "Email")
class RecoverForm(Form):
password = PasswordField("Password", [
validators.Required(message = "You must enter a password."),
validators.Length(min = 8, message = "Password must be at least 8 characters.")
], render_kw={"class": 'text'}, description = "Password")
confirm_password = PasswordField("Confirm Password", render_kw={"class": 'text'}, description = "Confirm Password")
def validate_confirm_password(form, field):
password = form['password'].data
if len(password) >= 8 and password != field.data:
raise ValidationError("Passwords must match.")
class ChangeNameForm(Form):
first_name = TextField("First Name", [
validators.Required(message = "You must enter a first name.")
], render_kw={"class": 'text'}, description = "First Name")
last_name = TextField("Last Name", [
validators.Required(message = "You must enter a last name.")
], render_kw={"class": 'text'}, description = "Last Name")
class ChangePasswordForm(Form):
password = PasswordField("Password", [
validators.Required(message = "You must enter your current password."),
validators.Length(min = 8, message = "Password must be at least 8 characters.")
], render_kw={"class": 'text'}, description = "Current Password")
new_password = PasswordField("New Password", [
validators.Required(message = "You must choose a new password."),
validators.Length(min = 8, message = "Password must be at least 8 characters.")
], render_kw={"class": 'text'}, description = "New Password")
confirm_password = PasswordField("Confirm New Password", render_kw={"class": 'text'}, description = "Confirm New Password")
def validate_confirm_password(form, field):
password = form['new_password'].data
if len(password) >= 8 and password != field.data:
raise ValidationError("Passwords must match.")
attending_choices = [
("Attending", "Yes, I will!"),
("Not Attending", "No, I won't.")
]
class RsvpForm(Form):
# attending = RadioField("Are you attending hackBCA III?", [validators.Required(message = "Please tell us if you are attending hackBCA III.")], render_kw={"class": 'text'}, choices = attending_choices, description = "Will you be at hackBCA?")
# t_shirt_size = SelectField("What is your shirt size?", [validators.Required(message = "You must select an option.")], choices = shirt_sizes, description = "What is your shirt size?")
dietary_restrictions = TextField("Dietary Restrictions", [
validators.optional(),
], render_kw={"class": 'text'}, description = "Do you have any dietary restrictions?")
guardian_name = TextField("Guardian Full Name", [
validators.Required(message = "You must enter a name.")
], render_kw={"class": 'text'}, description = "Guardian Name")
guardian_home_num = TextField("Guardian Home Number", [
validators.Required(message = "Enter your guardian's home number."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "Guardian Home Number")
guardian_cell_num = TextField("Guardian Cellphone", [
validators.Required(message = "Enter your guardian's cellphone number."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "Guardian Cellphone")
guardian_email = TextField("Guardian Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'}, description = "Guardian Email")
emergency_name = TextField("Emergency Contact Full Name", [
validators.Required(message = "You must enter a name.")
], render_kw={"class": 'text'}, description = "Emergency Contact Name")
emergency_home_num = TextField("Emergency Contact Home Number", [
validators.Required(message = "Enter your emergency contact's home number."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "Emergency Contact Home Number")
emergency_cell_num = TextField("Emergency Contact Cellphone", [
validators.Required(message = "Enter your emergency contact's cellphone."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "Emergency Contact Cellphone")
emergency_email = TextField("Emergency Contact Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'}, description = "Emergency Contact Email")
school_address = TextField("School Address", [
validators.Required(message = "Enter your school address."),
], render_kw={"class": 'text'}, description = "School Address")
school_town = TextField("School Town", [
validators.Required(message = "Enter your school town."),
], render_kw={"class": 'text'}, description = "School Town")
school_state = TextField("School State", [
validators.Required(message = "Enter your school state."),
], render_kw={"class": 'text'}, description = "School State")
school_phone_num = TextField("School Phone Number", [
validators.Required(message = "Enter school's home number."),
validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
], render_kw={"class": 'text'}, description = "School Phone Number")
school_principal_name = TextField("Principal Name", [
validators.Required(message = "You must enter a name."),
], render_kw={"class": 'text'}, description = "Principal Name")
school_principal_email = TextField("Principal Email", [
validators.Required(message = "Enter an email."),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'}, description = "Principal Email")
cs_teacher_name = TextField("CS Teacher Name", [
validators.optional(),
], render_kw={"class": 'text'}, description = "CS Teacher Name (if applicable)")
cs_teacher_email = TextField("CS Teacher Email", [
validators.optional(),
validators.Email(message = "Invalid email address."
)], render_kw={"class": 'text'}, description = "CS Teacher Email (if applicable)")
# class MentorRsvpForm(Form):
# attending = RadioField("Are you attending hackBCA III?", [validators.Required(message = "Please tell us if you are attending hackBCA III.")], choices = attending_choices)
# phone = TextField("Phone Number", [
# validators.Required(message = "Confirm your preferred contact number."),
# validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
# ], description = "Phone Number Confirmation")
# t_shirt_size = SelectField("What is your shirt size?", [validators.Required(message = "You must select an option.")], choices = shirt_sizes, description = "What is your shirt size?")
# food_allergies = TextAreaField("Allergies", [
# validators.optional(),
# ], description = "Do you have any allergies?")
# medical_information = TextAreaField("Medical Information", [
# validators.optional(),
# ], description = "Are there any other medical issues that we should know about? (ex. Other allergies, illnesses, etc.)")
# hackbca_rules = BooleanField("I agree",[
# validators.Required(message = "Please read and agree to our rules.")
# ], description = "I agree to the rules set forth by hackBCA.", default = False)
# mlh_terms = BooleanField("I agree",[
# validators.Required(message = "Please read and agree to the MLH Code of Conduct.")
# ], description = "I agree to the MLH Code of Conduct.", default = False)
| 45.071594 | 269 | 0.645522 | 16,057 | 0.822761 | 0 | 0 | 0 | 0 | 0 | 0 | 8,509 | 0.436001 |
16d86786252483bb0df3775ba6255b1dd3edd2a1 | 2,181 | py | Python | src/app.py | gh640/coding-challenge | 3be31d643ac081bfec3495cb8f705c400be82553 | [
"MIT"
] | null | null | null | src/app.py | gh640/coding-challenge | 3be31d643ac081bfec3495cb8f705c400be82553 | [
"MIT"
] | 2 | 2017-11-17T03:14:45.000Z | 2019-10-19T07:17:22.000Z | src/app.py | gh640/coding-challenge | 3be31d643ac081bfec3495cb8f705c400be82553 | [
"MIT"
] | 1 | 2017-11-16T09:33:38.000Z | 2017-11-16T09:33:38.000Z | # coding: utf-8
'''フロントコントローラを提供する
'''
from math import ceil
import os
from flask import json
from flask import Flask
from flask import request
from flask import send_from_directory
from flask import render_template
# from json_loader import load_locations
# from json_loader import prepare_locations
from models import Location
# ページ毎のロケ地表示する
LOCATION_ITEMS_PER_PAGE = 20
app = Flask(__name__)
app.config['GOOGLE_API_KEY'] = os.environ['GOOGLE_API_KEY']
app.config['ROOT'] = (app.config['APPLICATION_ROOT']
if app.config['APPLICATION_ROOT'] else '')
@app.route('/static/<path:path>')
def send_js(path):
return send_from_directory('static', path)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/location')
def location():
req_title = request.args.get('title', None)
try:
req_page = int(request.args.get('page', 1))
except ValueError as e:
req_page = 1
query = Location.selectbase()
if req_title:
query = query.where(Location.title ** '%{}%'.format(req_title))
total_items = query.count()
total_pages = ceil(total_items / LOCATION_ITEMS_PER_PAGE)
current_page = req_page if req_page <= total_pages else total_pages
query = query.paginate(current_page, LOCATION_ITEMS_PER_PAGE)
locations = [l.as_dict() for l in query]
return json.jsonify({
'meta': {
'pager_data': {
'totalItems': total_items,
'totalPages': total_pages,
'currentItems': len(locations),
'currentPage': current_page,
'itemsPerPage': LOCATION_ITEMS_PER_PAGE,
},
},
'entities': {
'locations': locations,
},
})
@app.route('/movie')
def movie():
req_title = request.args.get('title', None)
if not req_title:
return json.jsonify([])
query = (Location.select(Location.title)
.distinct()
.where(Location.title ** '%{}%'.format(req_title)))
movies = [{'id': index, 'title': l.title} for index, l in enumerate(query)]
return json.jsonify(movies)
| 23.706522 | 79 | 0.629069 | 0 | 0 | 0 | 0 | 1,588 | 0.710515 | 0 | 0 | 474 | 0.212081 |
16d86a94620baf9944e6bd338662eefcd3ab573e | 2,180 | py | Python | corkus/objects/dungeon.py | MrBartusek/corkus.py | 031c11e3e251f0bddbcb67415564357460fe7fea | [
"MIT"
] | 5 | 2021-09-10T14:20:15.000Z | 2022-01-09T11:27:49.000Z | corkus/objects/dungeon.py | MrBartusek/corkus.py | 031c11e3e251f0bddbcb67415564357460fe7fea | [
"MIT"
] | 11 | 2021-08-15T09:39:09.000Z | 2022-01-12T14:11:24.000Z | corkus/objects/dungeon.py | MrBartusek/corkus.py | 031c11e3e251f0bddbcb67415564357460fe7fea | [
"MIT"
] | 2 | 2021-12-01T23:33:14.000Z | 2022-01-12T11:08:18.000Z | from __future__ import annotations
from .base import CorkusBase
from enum import Enum
class DungeonType(Enum):
REMOVED = "REMOVED"
"""Dungeons that were removed from the game in version ``1.14.1`` like ``Skeleton`` or ``Spider``"""
REMOVED_MINI = "REMOVED_MINI"
"""Minidungeons that were reworked in version ``1.17`` like ``Ice`` or ``Ocean``"""
STANDARD = "STANDARD"
"""Generic dungeons like ``Galleon's Graveyard`` or ``Fallen Factory``"""
CORRUPTED = "CORRUPTED"
"""Harder variant of standard dungeons like ``Corrupted Decrepit Sewers`` or ``Corrupted Sand-Swept Tomb``"""
class Dungeon(CorkusBase):
"""Represents a `Dungeon <https://wynncraft.fandom.com/wiki/Dungeons>`_ completed by a :py:class:`Player`"""
@property
def name(self) -> str:
"""Name of the dungeon like ``Decrepit Sewers``, ``Galleon's Graveyard`` or ``Fallen Factory``."""
return self._attributes.get("name", "")
@property
def type(self) -> DungeonType:
"""Type of the dungeon."""
if self.name.startswith("Corrupted"):
return DungeonType.CORRUPTED
elif self.name in (
"Zombie",
"Animal",
"Skeleton",
"Spider",
"Silverfish",):
return DungeonType.REMOVED
elif self.name in (
"Jungle",
"Ice",
"Ocean"):
return DungeonType.REMOVED_MINI
elif self.name in (
"Decrepit Sewers",
"Infested Pit",
"Ice Barrows",
"Lost Sanctuary",
"Sand-Swept Tomb",
"Underworld Crypt",
"Undergrowth Ruins",
"Eldritch Outlook",
"Galleon's Graveyard",
"Fallen Factory"):
return DungeonType.STANDARD
else:
raise ValueError(f"Invalid dungeon: {self.name}")
@property
def completed(self) -> int:
"""Total runs completed by the player. Failed runs are not counted."""
return self._attributes.get("completed", 0)
def __repr__(self) -> str:
return f"<Dungeon name={self.name!r} completed={self.completed}>"
| 34.0625 | 113 | 0.580734 | 2,090 | 0.958716 | 0 | 0 | 1,304 | 0.598165 | 0 | 0 | 1,066 | 0.488991 |
16d935b63ca1c52fcdad82da9c168df67d096ff5 | 527 | py | Python | src/brisk.py | chaoer/brisk-descriptor | 140b08539768b8038680fd86d7fda9688dd5b908 | [
"BSD-3-Clause"
] | 18 | 2015-02-05T00:44:24.000Z | 2018-11-30T03:20:51.000Z | src/brisk.py | chaoer/brisk-descriptor | 140b08539768b8038680fd86d7fda9688dd5b908 | [
"BSD-3-Clause"
] | 4 | 2016-06-25T20:04:59.000Z | 2019-01-29T19:34:24.000Z | src/brisk.py | chaoer/brisk-descriptor | 140b08539768b8038680fd86d7fda9688dd5b908 | [
"BSD-3-Clause"
] | 14 | 2015-11-15T05:20:28.000Z | 2019-01-02T12:50:44.000Z |
import pybrisk
class Brisk:
def __init__(self, thresh=60, octaves=4):
self.thresh = thresh
self.octaves = octaves
self.descriptor_extractor = pybrisk.create()
def __del__(self):
pybrisk.destroy(self.descriptor_extractor)
def detect(self, img):
return pybrisk.detect(self.descriptor_extractor,
img, self.thresh, self.octaves)
def compute(self, img, keypoints):
return pybrisk.compute(self.descriptor_extractor,
img, keypoints)
| 26.35 | 57 | 0.648956 | 509 | 0.965844 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
16dc5aa7f7c7413a9e340c8bb600ebd849d60e67 | 2,897 | py | Python | hale_hub/outlet_interface.py | tantinlala/hale-hub | da2e6d24e3869ee533d2e272ce87b9e7eede9a79 | [
"MIT"
] | null | null | null | hale_hub/outlet_interface.py | tantinlala/hale-hub | da2e6d24e3869ee533d2e272ce87b9e7eede9a79 | [
"MIT"
] | null | null | null | hale_hub/outlet_interface.py | tantinlala/hale-hub | da2e6d24e3869ee533d2e272ce87b9e7eede9a79 | [
"MIT"
] | null | null | null | import serial
import serial.tools.list_ports
from hale_hub.constants import STARTING_OUTLET_COMMAND, SERIAL_BAUD_RATE, SERIAL_TIMEOUT
from hale_hub.ifttt_logger import send_ifttt_log
class _Outlet:
def __init__(self, name):
self.state = 0
self.name = name
class _OutletInterface:
def __init__(self):
self.outlets = [_Outlet('Outlet 0'), _Outlet('Outlet 1'), _Outlet('Outlet 2')]
self.serial_interface = None
self.serial_interface_string = None
def set_outlet_name(self, name, outlet_id):
if outlet_id < len(self.outlets):
self.outlets[outlet_id].name = name
def set_serial_interface(self, serial_interface_string):
try:
print('Setting serial interface with description: {}'.format(serial_interface_string))
self.serial_interface_string = serial_interface_string
ports = [p.device for p in serial.tools.list_ports.comports() if self.serial_interface_string in p.description]
self.serial_interface = serial.Serial(ports[0], SERIAL_BAUD_RATE, timeout=SERIAL_TIMEOUT)
except IndexError:
send_ifttt_log(__name__, 'No serial ports could be upon!')
def _send_outlet_command(self, outlet_id, outlet_state):
try:
print('Changing outlet {0} to {1} state'.format(outlet_id, outlet_state))
command = bytearray([STARTING_OUTLET_COMMAND + (outlet_id << 1) + outlet_state])
print('Writing {0} to serial'.format(command))
self.serial_interface.write(command)
except (serial.SerialException, AttributeError):
send_ifttt_log(__name__, 'No serial bytes could be written')
if self.serial_interface.is_open():
self.serial_interface.close()
self.set_serial_interface(self.serial_interface_string)
def toggle_outlet(self, outlet_id):
if outlet_id < len(self.outlets):
self.outlets[outlet_id].state ^= 1
self._send_outlet_command(outlet_id, self.outlets[outlet_id].state)
def turn_on_outlet(self, outlet_id):
if outlet_id < len(self.outlets):
self.outlets[outlet_id].state = 1
self._send_outlet_command(outlet_id, self.outlets[outlet_id].state)
def turn_off_outlet(self, outlet_id):
if outlet_id < len(self.outlets):
self.outlets[outlet_id].state = 0
self._send_outlet_command(outlet_id, self.outlets[outlet_id].state)
def get_outlets(self):
return self.outlets
_outlet_interface = _OutletInterface()
set_outlet_serial_interface = _outlet_interface.set_serial_interface
toggle_outlet = _outlet_interface.toggle_outlet
turn_on_outlet = _outlet_interface.turn_on_outlet
turn_off_outlet = _outlet_interface.turn_off_outlet
get_outlets = _outlet_interface.get_outlets
set_outlet_name = _outlet_interface.set_outlet_name
| 41.385714 | 123 | 0.706593 | 2,352 | 0.811874 | 0 | 0 | 0 | 0 | 0 | 0 | 200 | 0.069037 |
16dcdf8ea3ba055a8650580e31092f4149c84a27 | 3,233 | py | Python | helix/core.py | carbonscott/helix | e2ee6e1293cae4f0bd1220ed5a41268d20a095db | [
"MIT"
] | null | null | null | helix/core.py | carbonscott/helix | e2ee6e1293cae4f0bd1220ed5a41268d20a095db | [
"MIT"
] | null | null | null | helix/core.py | carbonscott/helix | e2ee6e1293cae4f0bd1220ed5a41268d20a095db | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
def remove_nan(xyzs): return xyzs[~np.isnan(xyzs).any(axis = 1)]
def measure_twocores(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical distance vector between the centers.
- Interhelical angle (0-90 degree)
'''
# Obtain the centers...
center_ref = np.nanmean(core_xyz_ref, axis = 0)
center_tar = np.nanmean(core_xyz_tar, axis = 0)
# Construct the interhelical distance vector...
ih_dvec = center_tar - center_ref
# Calculate the length of interhelical distance vector...
norm_ih_dvec = np.linalg.norm(ih_dvec)
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Calculate the interhelical angle...
core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref)
core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar)
ih_ang = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) )
return ih_dvec, norm_ih_dvec, core_vec_ref_unit, core_vec_tar_unit, ih_ang
def calc_interangle(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical angle (0-90 degree)
'''
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Calculate the interhelical angle...
core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref)
core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar)
inter_angle = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) )
if inter_angle > np.pi / 2.0: inter_angle = np.pi - inter_angle
return inter_angle
def calc_interdist(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical distance vector between the centers.
Refers to http://geomalgorithms.com/a07-_distance.html for the method.
Q is ref, P is tar.
'''
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Obtain the starting point...
q0 = core_xyz_ref_nonan[0]
p0 = core_xyz_tar_nonan[0]
w0 = p0 - q0
# Obtain the directional vector with magnitude...
v = core_vec_ref
u = core_vec_tar
# Math part...
a = np.dot(u, u)
b = np.dot(u, v)
c = np.dot(v, v)
d = np.dot(u, w0)
e = np.dot(v, w0)
de = a * c - b * b # Denominator
if de == 0: sc, tc = 0, d / b
else: sc, tc = (b * e - c * d) / de, (a * e - b * d) / de
# Calculate distance...
wc = w0 + sc * u - tc * v
inter_dist = np.linalg.norm(wc)
return inter_dist
| 33.677083 | 78 | 0.683266 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 988 | 0.305599 |
16dd18d4c9d6b529392f25ddf3a0704445995def | 675 | py | Python | matury/2011/6.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | 2 | 2021-03-06T22:09:44.000Z | 2021-03-14T14:41:03.000Z | matury/2011/6.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | 1 | 2020-03-25T15:42:47.000Z | 2020-10-06T21:41:14.000Z | matury/2011/6.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | null | null | null | from typing import List
with open("dane/liczby.txt") as f:
nums: List[int] = []
nums_9_chars: List[int] = []
for line in f:
sline = line.strip()
num = int(sline, 2)
if len(sline) == 9:
nums_9_chars.append(num)
nums.append(num)
count_even = 0
max_num = 0
for num in nums:
if num % 2 == 0:
count_even += 1
if num > max_num:
max_num = num
print(f"{count_even=}")
print(f"max_num(10): {max_num}, max_num(2): {bin(max_num)[2:]}")
sum_9_chars = 0
for num in nums_9_chars:
sum_9_chars += num
print(f"count of numbers with 9 digits: {len(nums_9_chars)}, their sum: {bin(sum_9_chars)[2:]}")
| 20.454545 | 96 | 0.58963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.265185 |
16de03e641bb707c0257c647f4e57b0375e2b543 | 668 | py | Python | Python/fibs.py | familug/FAMILUG | ef8c11d92f4038d80f3f1a24cbab022c19791acf | [
"BSD-2-Clause"
] | 5 | 2015-10-13T04:13:04.000Z | 2020-12-23T13:47:43.000Z | Python/fibs.py | familug/FAMILUG | ef8c11d92f4038d80f3f1a24cbab022c19791acf | [
"BSD-2-Clause"
] | null | null | null | Python/fibs.py | familug/FAMILUG | ef8c11d92f4038d80f3f1a24cbab022c19791acf | [
"BSD-2-Clause"
] | 8 | 2015-07-20T15:37:38.000Z | 2021-04-14T07:18:10.000Z | def fib(n):
if n < 2:
return n
else:
return fib(n-1) + fib(n-2)
def fib_fast(n):
from math import sqrt
s5 = sqrt(5)
x = (1 + s5) ** n
y = (1 - s5) ** n
return int((x - y)/(s5 * 2**n))
def print_fib(n):
for i in range(n):
print fib(i),
print
for i in range(n):
print fib_fast(i),
def print_fib2(n):
fibs = [0, 1]
a, b = 0, 1
if n == 0:
print a
elif n == 1:
print a, b
else:
print 0, 1,
for i in range(2, n):
a, b = b, a + b
print b,
if __name__ == "__main__":
print_fib(10)
print
print_fib2(10)
| 15.904762 | 35 | 0.438623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.01497 |
16de052924f6b7a0503a267b4aaeda1587303cff | 3,681 | py | Python | src/model/ParseInput.py | slavi010/polyhash-2020 | a11aa694fbf901be4f4db565cb09800f8f57eae7 | [
"MIT"
] | null | null | null | src/model/ParseInput.py | slavi010/polyhash-2020 | a11aa694fbf901be4f4db565cb09800f8f57eae7 | [
"MIT"
] | null | null | null | src/model/ParseInput.py | slavi010/polyhash-2020 | a11aa694fbf901be4f4db565cb09800f8f57eae7 | [
"MIT"
] | null | null | null | import os
from typing import List
from src.model.Etape import Etape
from src.model.Grille import Grille
from src.model.ItemCase import ItemCase
from src.model.PointMontage import PointMontage
from src.model.Robot import Robot
from src.model.Tache import Tache
class ParseInput:
"""Parser qui permet de lire le fichier texte d'input fourni par Google.
Va transformer ce fichier en données et classes exploitables pour nous
"""
grille: Grille
def __init__(self):
pass
def parse(self, file_path: str) -> Grille:
"""parse le fichier google et retourne la Grille correspondante
:rtype: Grille
"""
# tests si file_path est un fichier
assert os.path.isfile(file_path)
with open(file_path, 'r') as file:
index: int = 0
# récupère toutes les lignes du fichiers
lines: List = file.readlines()
# Transformation des lignes en liste d'entiers
for index_line in range(len(lines)):
lines[index_line] = lines[index_line].split(' ')
for index_val in range(len(lines[index_line])):
lines[index_line][index_val] = int(lines[index_line][index_val])
# crée un instance de Grille
grille = Grille(lines[0][0], lines[0][1])
# instancie dans grille le nombre de robot correspondant
# crée les robots
for idx_robot in range(lines[0][2]):
grille.robots.append(Robot())
# Crée les points de montage, et les place dans la grille
for idx_point_montage in range(lines[0][3]):
index += 1
grille.add_point_montage(PointMontage(lines[index][0], lines[index][1]))
# Récupère le nombre de tour d'horloge autorisé
grille.step_simulation = lines[0][5]
# Récupére les informations de chaque tâche
# instancier dans grille les tâches correspondantes
# si une étape (assembly point) n'est pas encore créée dans la grille aux cordonnées correspondantes,
# l'instancier et la mettre dans la grille (et ne pas oublier de l'associer à la tâche)
# Crée les instances Taches et Etapes
for index_tache in range(lines[0][4]):
index += 1
tache_tampon: Tache = Tache(lines[index][0], index_tache)
index += 1
g_x = 0
g_y = 0
for index_etape in range(lines[index-1][1]):
#ajoute les étapes
etape = Etape(lines[index][index_etape*2+0], lines[index][index_etape*2+1])
tache_tampon.add_etape(etape)
g_x += (etape.x - g_x)/len(tache_tampon.etapes)
g_y += (etape.y - g_y)/len(tache_tampon.etapes)
#ajoute les paramètres dans la classe tache
tache_tampon.centre_gravite = ItemCase(int(g_x), int(g_y))
tache_tampon.distance_centre_gravite = max(tache_tampon.etapes,
key=lambda etape: tache_tampon.centre_gravite.distance(etape)) \
.distance(tache_tampon.centre_gravite)
grille.add_tache(tache_tampon)
# calcul la distance et la surface aproximative entre chaque étape
for etape_from, etape_to in zip(tache_tampon.etapes[0::1], tache_tampon.etapes[1::1]):
tache_tampon.distance += etape_from.distance(etape_to)
tache_tampon.surface += etape_from.distance(etape_to)
return grille | 40.9 | 123 | 0.594947 | 3,441 | 0.928996 | 0 | 0 | 0 | 0 | 0 | 0 | 1,056 | 0.285097 |
16df196ac8b1d19487d9f38ab432516956acf44f | 13,440 | py | Python | test.py | UnKafkaesque/Sentiment-Analysis | bd8517420534bcfe76f2f60a4f178d1dac540075 | [
"MIT"
] | null | null | null | test.py | UnKafkaesque/Sentiment-Analysis | bd8517420534bcfe76f2f60a4f178d1dac540075 | [
"MIT"
] | null | null | null | test.py | UnKafkaesque/Sentiment-Analysis | bd8517420534bcfe76f2f60a4f178d1dac540075 | [
"MIT"
] | null | null | null | import os
import sys
import time
import traceback
import project1_Copy as p1
import numpy as np
verbose = False
def green(s):
return '\033[1;32m%s\033[m' % s
def yellow(s):
return '\033[1;33m%s\033[m' % s
def red(s):
return '\033[1;31m%s\033[m' % s
def log(*m):
print(" ".join(map(str, m)))
def log_exit(*m):
log(red("ERROR:"), *m)
exit(1)
def check_real(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not np.isreal(res):
log(red("FAIL"), ex_name, ": does not return a real number, type: ", type(res))
return True
if res != exp_res:
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def equals(x, y):
if type(y) == np.ndarray:
return (x == y).all()
return x == y
def check_tuple(ex_name, f, exp_res, *args, **kwargs):
try:
res = f(*args, **kwargs)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == tuple:
log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected a tuple of size ", len(exp_res), " but got tuple of size", len(res))
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_array(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == np.ndarray:
log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape)
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_list(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == list:
log(red("FAIL"), ex_name, ": does not return a list, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected a list of size ", len(exp_res), " but got list of size", len(res))
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_get_order():
ex_name = "Get order"
if check_list(
ex_name, p1.get_order,
[0], 1):
log("You should revert `get_order` to its original implementation for this test to pass")
return
if check_list(
ex_name, p1.get_order,
[1, 0], 2):
log("You should revert `get_order` to its original implementation for this test to pass")
return
log(green("PASS"), ex_name, "")
def check_hinge_loss_single():
ex_name = "Hinge loss single"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -0.2
exp_res = 1 - 0.8
if check_real(
ex_name, p1.hinge_loss_single,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_hinge_loss_full():
ex_name = "Hinge loss full"
feature_vector = np.array([[1, 2], [1, 2]])
label, theta, theta_0 = np.array([1, 1]), np.array([-1, 1]), -0.2
exp_res = 1 - 0.8
if check_real(
ex_name, p1.hinge_loss_full,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_perceptron_single_update():
ex_name = "Perceptron single update"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1.5
exp_res = (np.array([0, 3]), -0.5)
if check_tuple(
ex_name, p1.perceptron_single_step_update,
exp_res, feature_vector, label, theta, theta_0):
return
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1
exp_res = (np.array([0, 3]), 0)
if check_tuple(
ex_name + " (boundary case)", p1.perceptron_single_step_update,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_perceptron():
ex_name = "Perceptron"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 1
exp_res = (np.array([0, 2]), 2)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 2
exp_res = (np.array([0, 2]), 2)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
log(green("PASS"), ex_name, "")
def check_average_perceptron():
ex_name = "Average perceptron"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 1
exp_res = (np.array([-0.5, 1]), 1.5)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 2
exp_res = (np.array([-0.25, 1.5]), 1.75)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
log(green("PASS"), ex_name, "")
def check_pegasos_single_update():
ex_name = "Pegasos single update"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1.5
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.18]), -1.4)
if check_tuple(
ex_name, p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
feature_vector = np.array([1, 1])
label, theta, theta_0 = 1, np.array([-1, 1]), 1
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.08]), 1.1)
if check_tuple(
ex_name + " (boundary case)", p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -2
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.18]), -1.9)
if check_tuple(
ex_name, p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_pegasos():
ex_name = "Pegasos"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
L = 0.2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.pegasos,
exp_res, feature_matrix, labels, T, L):
return
feature_matrix = np.array([[1, 1], [1, 1]])
labels = np.array([1, 1])
T = 1
L = 1
exp_res = (np.array([1-1/np.sqrt(2), 1-1/np.sqrt(2)]), 1)
if check_tuple(
ex_name, p1.pegasos,
exp_res, feature_matrix, labels, T, L):
return
log(green("PASS"), ex_name, "")
def check_classify():
ex_name = "Classify"
feature_matrix = np.array([[1, 1], [1, 1], [1, 1]])
theta = np.array([1, 1])
theta_0 = 0
exp_res = np.array([1, 1, 1])
if check_array(
ex_name, p1.classify,
exp_res, feature_matrix, theta, theta_0):
return
feature_matrix = np.array([[-1, 1]])
theta = np.array([1, 1])
theta_0 = 0
exp_res = np.array([-1])
if check_array(
ex_name + " (boundary case)", p1.classify,
exp_res, feature_matrix, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_classifier_accuracy():
ex_name = "Classifier accuracy"
train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]])
val_feature_matrix = np.array([[1, 1], [2, -1]])
train_labels = np.array([1, -1, 1])
val_labels = np.array([-1, 1])
exp_res = 1, 0
T=1
if check_tuple(
ex_name, p1.classifier_accuracy,
exp_res,
p1.perceptron,
train_feature_matrix, val_feature_matrix,
train_labels, val_labels,
T=T):
return
train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]])
val_feature_matrix = np.array([[1, 1], [2, -1]])
train_labels = np.array([1, -1, 1])
val_labels = np.array([-1, 1])
exp_res = 1, 0
T=1
L=0.2
if check_tuple(
ex_name, p1.classifier_accuracy,
exp_res,
p1.pegasos,
train_feature_matrix, val_feature_matrix,
train_labels, val_labels,
T=T, L=L):
return
log(green("PASS"), ex_name, "")
def check_bag_of_words():
ex_name = "Bag of words"
texts = [
"He loves to walk on the beach",
"There is nothing better"]
try:
res = p1.bag_of_words(texts)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return
if not type(res) == dict:
log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res))
return
vals = sorted(res.values())
exp_vals = list(range(len(res.keys())))
if not vals == exp_vals:
log(red("FAIL"), ex_name, ": wrong set of indices. Expected: ", exp_vals, " got ", vals)
return
log(green("PASS"), ex_name, "")
keys = sorted(res.keys())
exp_keys = ['beach', 'better', 'he', 'is', 'loves', 'nothing', 'on', 'the', 'there', 'to', 'walk']
stop_keys = ['beach', 'better', 'loves', 'nothing', 'walk']
if keys == exp_keys:
log(yellow("WARN"), ex_name, ": does not remove stopwords:", [k for k in keys if k not in stop_keys])
elif keys == stop_keys:
log(green("PASS"), ex_name, " stopwords removed")
else:
log(red("FAIL"), ex_name, ": keys are missing:", [k for k in stop_keys if k not in keys], " or are not unexpected:", [k for k in keys if k not in stop_keys])
def check_extract_bow_feature_vectors():
ex_name = "Extract bow feature vectors"
texts = [
"He loves her ",
"He really really loves her"]
keys = ["he", "loves", "her", "really"]
dictionary = {k:i for i, k in enumerate(keys)}
exp_res = np.array(
[[1, 1, 1, 0],
[1, 1, 1, 1]])
non_bin_res = np.array(
[[1, 1, 1, 0],
[1, 1, 1, 2]])
try:
res = p1.extract_bow_feature_vectors(texts, dictionary)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return
if not type(res) == np.ndarray:
log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res))
return
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape)
return
log(green("PASS"), ex_name)
if (res == exp_res).all():
log(yellow("WARN"), ex_name, ": uses binary indicators as features")
elif (res == non_bin_res).all():
log(green("PASS"), ex_name, ": correct non binary features")
else:
log(red("FAIL"), ex_name, ": unexpected feature matrix")
return
def main():
log(green("PASS"), "Import project1")
try:
check_get_order()
check_hinge_loss_single()
check_hinge_loss_full()
check_perceptron_single_update()
check_perceptron()
check_average_perceptron()
check_pegasos_single_update()
check_pegasos()
check_classify()
check_classifier_accuracy()
check_bag_of_words()
check_extract_bow_feature_vectors()
except Exception:
log_exit(traceback.format_exc())
if __name__ == "__main__":
main()
| 29.154013 | 165 | 0.564658 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,968 | 0.146429 |
16e1f96bcb5b1ba1faf14b289b7309040c63b043 | 1,619 | py | Python | homework_1/tests/test_3.py | mag-id/epam_python_autumn_2020 | 2488817ba039f5722030a23edc97abe9f70a9a30 | [
"MIT"
] | null | null | null | homework_1/tests/test_3.py | mag-id/epam_python_autumn_2020 | 2488817ba039f5722030a23edc97abe9f70a9a30 | [
"MIT"
] | null | null | null | homework_1/tests/test_3.py | mag-id/epam_python_autumn_2020 | 2488817ba039f5722030a23edc97abe9f70a9a30 | [
"MIT"
] | null | null | null | """
Unit tests for module `homework_1.tasks.task_3`.
"""
from tempfile import NamedTemporaryFile
from typing import Tuple
import pytest
from homework_1.tasks.task_3 import find_maximum_and_minimum
@pytest.mark.parametrize(
["file_content", "expected_result"],
[
pytest.param(
"0\n",
(0, 0),
id="'0\n', result is (0, 0).",
),
pytest.param(
"1\n2\n3\n4\n5\n",
(1, 5),
id="'1\n2\n3\n4\n5\n', result is (1, 5).",
),
pytest.param(
"1\n-2\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n11\n-12\n",
(-12, 11),
id="'1\n-2\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n11\n-12\n', result: (11,-12).",
),
pytest.param(
"11\n-12\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n1\n-2\n",
(-12, 11),
id="'11\n-12\n3\n-4\n5\n-6\n7\n-8\n9\n-10\n1\n-2\n', result: (11,-12).",
),
pytest.param(
"\n".join(str(num) for num in range(0, 667000)),
(0, 666999),
id="Integers from 0 to 666999 delimited by '\n'.",
),
],
)
def test_find_maximum_and_minimum(file_content: str, expected_result: Tuple[int, int]):
"""
Mocks file using `NamedTemporaryFile` instance with writed
`file_content` inside, where `file_name` == `file.name`.
Passes test if `find_maximum_and_minimum`(`file.name`)
is equal to `expected_result`.
"""
with NamedTemporaryFile(mode="wt") as file:
file.write(file_content)
file.seek(0)
assert find_maximum_and_minimum(file.name) == expected_result
| 29.436364 | 87 | 0.542928 | 0 | 0 | 0 | 0 | 1,416 | 0.874614 | 0 | 0 | 687 | 0.424336 |
16e3d9c0e2f0128dd26f1a69eb5d1f88d973387a | 16,524 | py | Python | sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = ['BlobContainerArgs', 'BlobContainer']
@pulumi.input_type
class BlobContainerArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
container_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
public_access: Optional[pulumi.Input['PublicAccess']] = None):
"""
The set of arguments for constructing a BlobContainer resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] container_name: The name of the blob container within the specified storage account. Blob container names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: A name-value pair to associate with the container as metadata.
:param pulumi.Input['PublicAccess'] public_access: Specifies whether data in the container may be accessed publicly and the level of access.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if public_access is not None:
pulumi.set(__self__, "public_access", public_access)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the blob container within the specified storage account. Blob container names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
"""
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A name-value pair to associate with the container as metadata.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter(name="publicAccess")
def public_access(self) -> Optional[pulumi.Input['PublicAccess']]:
"""
Specifies whether data in the container may be accessed publicly and the level of access.
"""
return pulumi.get(self, "public_access")
@public_access.setter
def public_access(self, value: Optional[pulumi.Input['PublicAccess']]):
pulumi.set(self, "public_access", value)
class BlobContainer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
public_access: Optional[pulumi.Input['PublicAccess']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Properties of the blob container, including Id, resource name, resource type, Etag.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[str] container_name: The name of the blob container within the specified storage account. Blob container names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: A name-value pair to associate with the container as metadata.
:param pulumi.Input['PublicAccess'] public_access: Specifies whether data in the container may be accessed publicly and the level of access.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BlobContainerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Properties of the blob container, including Id, resource name, resource type, Etag.
:param str resource_name: The name of the resource.
:param BlobContainerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BlobContainerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
public_access: Optional[pulumi.Input['PublicAccess']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BlobContainerArgs.__new__(BlobContainerArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["container_name"] = container_name
__props__.__dict__["metadata"] = metadata
__props__.__dict__["public_access"] = public_access
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["etag"] = None
__props__.__dict__["has_immutability_policy"] = None
__props__.__dict__["has_legal_hold"] = None
__props__.__dict__["immutability_policy"] = None
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["lease_duration"] = None
__props__.__dict__["lease_state"] = None
__props__.__dict__["lease_status"] = None
__props__.__dict__["legal_hold"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage/v20181101:BlobContainer"), pulumi.Alias(type_="azure-native:storage:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180201:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180201:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180301preview:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180301preview:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180701:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180701:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20190401:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20190401:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20190601:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20200801preview:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20200801preview:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20210101:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20210101:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20210201:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20210201:BlobContainer")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(BlobContainer, __self__).__init__(
'azure-native:storage/v20181101:BlobContainer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'BlobContainer':
"""
Get an existing BlobContainer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = BlobContainerArgs.__new__(BlobContainerArgs)
__props__.__dict__["etag"] = None
__props__.__dict__["has_immutability_policy"] = None
__props__.__dict__["has_legal_hold"] = None
__props__.__dict__["immutability_policy"] = None
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["lease_duration"] = None
__props__.__dict__["lease_state"] = None
__props__.__dict__["lease_status"] = None
__props__.__dict__["legal_hold"] = None
__props__.__dict__["metadata"] = None
__props__.__dict__["name"] = None
__props__.__dict__["public_access"] = None
__props__.__dict__["type"] = None
return BlobContainer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
Resource Etag.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="hasImmutabilityPolicy")
def has_immutability_policy(self) -> pulumi.Output[bool]:
"""
The hasImmutabilityPolicy public property is set to true by SRP if ImmutabilityPolicy has been created for this container. The hasImmutabilityPolicy public property is set to false by SRP if ImmutabilityPolicy has not been created for this container.
"""
return pulumi.get(self, "has_immutability_policy")
@property
@pulumi.getter(name="hasLegalHold")
def has_legal_hold(self) -> pulumi.Output[bool]:
"""
The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. There can be a maximum of 1000 blob containers with hasLegalHold=true for a given account.
"""
return pulumi.get(self, "has_legal_hold")
@property
@pulumi.getter(name="immutabilityPolicy")
def immutability_policy(self) -> pulumi.Output['outputs.ImmutabilityPolicyPropertiesResponse']:
"""
The ImmutabilityPolicy property of the container.
"""
return pulumi.get(self, "immutability_policy")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> pulumi.Output[str]:
"""
Returns the date and time the container was last modified.
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter(name="leaseDuration")
def lease_duration(self) -> pulumi.Output[str]:
"""
Specifies whether the lease on a container is of infinite or fixed duration, only when the container is leased.
"""
return pulumi.get(self, "lease_duration")
@property
@pulumi.getter(name="leaseState")
def lease_state(self) -> pulumi.Output[str]:
"""
Lease state of the container.
"""
return pulumi.get(self, "lease_state")
@property
@pulumi.getter(name="leaseStatus")
def lease_status(self) -> pulumi.Output[str]:
"""
The lease status of the container.
"""
return pulumi.get(self, "lease_status")
@property
@pulumi.getter(name="legalHold")
def legal_hold(self) -> pulumi.Output['outputs.LegalHoldPropertiesResponse']:
"""
The LegalHold property of the container.
"""
return pulumi.get(self, "legal_hold")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A name-value pair to associate with the container as metadata.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publicAccess")
def public_access(self) -> pulumi.Output[Optional[str]]:
"""
Specifies whether data in the container may be accessed publicly and the level of access.
"""
return pulumi.get(self, "public_access")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| 50.53211 | 1,363 | 0.676834 | 16,088 | 0.973614 | 0 | 0 | 11,663 | 0.705822 | 0 | 0 | 7,905 | 0.478395 |
16e4af35a62847ccd702cb32c6b8a27f27bee59d | 129 | py | Python | app/admin/views/__init__.py | CAUCHY2932/Northern_Hemisphere | 06e5b3e3f0b47940d5b4549899d062373b019579 | [
"BSD-3-Clause"
] | null | null | null | app/admin/views/__init__.py | CAUCHY2932/Northern_Hemisphere | 06e5b3e3f0b47940d5b4549899d062373b019579 | [
"BSD-3-Clause"
] | 8 | 2021-03-19T03:28:32.000Z | 2022-03-11T23:59:00.000Z | app/admin/views/__init__.py | CAUCHY2932/Northern_Hemisphere | 06e5b3e3f0b47940d5b4549899d062373b019579 | [
"BSD-3-Clause"
] | null | null | null | # coding:utf-8
import app.admin.views.start
import app.admin.views.book
import app.admin.views.user
import app.admin.views.site
| 18.428571 | 28 | 0.79845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.108527 |
16e4dfbf8bd61eccd8ee52165a28c0666d169326 | 840 | py | Python | test_mnist.py | aidiary/chainer-siamese | 6abce9192298e14682a7c766e2a5cdd10f519193 | [
"MIT"
] | null | null | null | test_mnist.py | aidiary/chainer-siamese | 6abce9192298e14682a7c766e2a5cdd10f519193 | [
"MIT"
] | null | null | null | test_mnist.py | aidiary/chainer-siamese | 6abce9192298e14682a7c766e2a5cdd10f519193 | [
"MIT"
] | null | null | null | import os
import chainer
import chainer.links as L
from net import SiameseNetwork
import numpy as np
import matplotlib.pyplot as plt
# 訓練済みモデルをロード
model = SiameseNetwork()
chainer.serializers.load_npz(os.path.join('result', 'model.npz'), model)
# テストデータをロード
_, test = chainer.datasets.get_mnist(ndim=3)
test_data, test_label = test._datasets
# テストデータを学習した低次元空間(2次元)に写像
y = model.forward_once(test_data)
feat = y.data
# ラベルごとに描画
c = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff',
'#ff00ff', '#990000', '#999900', '#009900', '#009999']
# 各ラベルごとに異なる色でプロット
# 同じクラス内のインスタンスが近くに集まり、
# 異なるクラスのインスタンスが離れていれば成功
for i in range(10):
f = feat[np.where(test_label == i)]
plt.plot(f[:, 0], f[:, 1], '.', c=c[i])
plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
plt.savefig(os.path.join('result', 'result.png'))
| 24.705882 | 72 | 0.667857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 510 | 0.480226 |
16e5abfcca6728651310e1b9d7d20815d0685476 | 5,535 | py | Python | TwoFeetTempoMove.py | b0nz0/TwisterTempo | fc975af4095509d8ec4fe2f84313fe152577bed2 | [
"MIT"
] | null | null | null | TwoFeetTempoMove.py | b0nz0/TwisterTempo | fc975af4095509d8ec4fe2f84313fe152577bed2 | [
"MIT"
] | null | null | null | TwoFeetTempoMove.py | b0nz0/TwisterTempo | fc975af4095509d8ec4fe2f84313fe152577bed2 | [
"MIT"
] | null | null | null | from random import randrange, random
from time import time
import logging
from TwisterTempoGUI import TwisterTempoGUI
class TwoFeetTempoMove(object):
COLORS_ALPHA = {0: 'RED', 1: 'BLUE', 2: 'YELLOW', 3: 'GREEN'}
COLORS_RGB = {0: (255, 0, 0), 1: (0, 0, 255), 2: (255, 255, 0), 3: (0, 255, 0)}
FOOT_CHANGE_PERC = 0.3
FOOT_ON_AIR_PERC = 0.08
FEET_ON_SAME_CIRCLE_PERC = 0.05
def __init__(self, min_delay=0, max_delay=100):
assert min_delay >= 0
assert max_delay > 0
self.min_delay = min_delay
self.max_delay = max_delay
self._last_beat_millis = 0
self._left_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA))
self._right_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA))
self._left_direction = "FW"
self._right_direction = "FW"
self._next_foot = 'RIGHT'
logging.info("Starting with LEFT: %s, RIGHT: %s" %
(TwoFeetTempoMove.COLORS_ALPHA[self._left_color],
TwoFeetTempoMove.COLORS_ALPHA[self._right_color]))
self.tt_gui = TwisterTempoGUI()
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
self._starting_millis = time() * 1000
def get_colors_alpha(self):
return {'RIGHT': TwoFeetTempoMove.COLORS_ALPHA[self._right_color],
'LEFT': TwoFeetTempoMove.COLORS_ALPHA[self._left_color]}
def get_colors_rgb(self):
return {'RIGHT': TwoFeetTempoMove.COLORS_RGB[self._right_color],
'LEFT': TwoFeetTempoMove.COLORS_RGB[self._left_color]}
def increase_speed(self):
self.min_delay = self.min_delay - 10
def decrease_speed(self):
self.min_delay = self.min_delay + 10
def tempo_found_callback(self, seconds, millis, confidence):
act_delay = millis - self._last_beat_millis + randrange(0, self.max_delay)
if act_delay >= self.min_delay:
self._last_beat_millis = millis
self.beat_found()
def beat_found(self):
millis = self._last_beat_millis
logging.debug("Randomized beat found at: %d:%d.%d" %
(millis / 60000, millis / 1000, millis % 1000))
act_millis = time() * 1000 - self._starting_millis
logging.debug("\tActual: %d:%d.%d" %
(act_millis / 60000, act_millis / 1000, act_millis % 1000))
# special moves
if random() < TwoFeetTempoMove.FOOT_ON_AIR_PERC: # randomized next foot on air move
if self._next_foot == 'RIGHT':
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color], on_air=True)
else:
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color], on_air=True)
logging.debug("\tmove next foot On Air")
elif random() < TwoFeetTempoMove.FEET_ON_SAME_CIRCLE_PERC: # randomized both feet on same circle
if self._next_foot == 'RIGHT':
self._right_color = self._left_color
self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
else:
self._left_color = self._right_color
self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
logging.debug("\tmove both feet on same circle")
# end special moves
else:
if random() < TwoFeetTempoMove.FOOT_CHANGE_PERC: # randomize at 30% the switch on foot
if self._next_foot == 'RIGHT':
self._next_foot = 'LEFT'
else:
self._next_foot = 'RIGHT'
if self._next_foot == 'RIGHT':
if self._right_direction == "FW":
if self._right_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1:
self._right_color = self._right_color - 1
self._right_direction = "BW"
else:
self._right_color = self._right_color + 1
else:
if self._right_color == 0:
self._right_color = self._right_color + 1
self._right_direction = "FW"
else:
self._right_color = self._right_color - 1
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
logging.debug("\tmove RIGHT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
self._next_foot = 'LEFT'
else:
if self._left_direction == "FW":
if self._left_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1:
self._left_color = self._left_color - 1
self._left_direction = "BW"
else:
self._left_color = self._left_color + 1
else:
if self._left_color == 0:
self._left_color = self._left_color + 1
self._left_direction = "FW"
else:
self._left_color = self._left_color - 1
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
logging.debug("\tmove LEFT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
self._next_foot = 'RIGHT'
| 45.368852 | 106 | 0.592954 | 5,413 | 0.977958 | 0 | 0 | 0 | 0 | 0 | 0 | 481 | 0.086902 |
16e6fb3c075a8554e7e6d5fe5397106b44ef9bf3 | 311 | py | Python | plugins/panorama/panorama/__init__.py | mohnjahoney/website_source | edc86a869b90ae604f32e736d9d5ecd918088e6a | [
"MIT"
] | 13 | 2020-01-27T09:02:25.000Z | 2022-01-20T07:45:26.000Z | plugins/panorama/panorama/__init__.py | mohnjahoney/website_source | edc86a869b90ae604f32e736d9d5ecd918088e6a | [
"MIT"
] | 29 | 2020-03-22T06:57:57.000Z | 2022-01-24T22:46:42.000Z | plugins/panorama/panorama/__init__.py | mohnjahoney/website_source | edc86a869b90ae604f32e736d9d5ecd918088e6a | [
"MIT"
] | 6 | 2020-07-10T00:13:30.000Z | 2022-01-26T08:22:33.000Z | # -*- coding: utf-8 -*-
"""
Panorama is a Pelican plugin to generate statistics from blog posts
(number of posts per month, categories and so on) display them as beautiful charts.
Project location: https://github.com/romainx/panorama
"""
__version__ = "0.2.0"
__author__ = "romainx"
from .panorama import *
| 22.214286 | 83 | 0.726688 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 253 | 0.813505 |
16e747bd6febb0a03dbe8fb17268efc47ff0c0ee | 7,999 | py | Python | transitfeed/transfer.py | cclauss/transitfeed | 54a4081b59bfa015d5f0405b68203e61762d4a52 | [
"Apache-2.0"
] | 9 | 2015-07-21T17:41:25.000Z | 2020-08-26T13:37:08.000Z | transitfeed/transfer.py | cclauss/transitfeed | 54a4081b59bfa015d5f0405b68203e61762d4a52 | [
"Apache-2.0"
] | 4 | 2015-06-11T18:40:16.000Z | 2020-04-03T20:31:40.000Z | transitfeed/transfer.py | cclauss/transitfeed | 54a4081b59bfa015d5f0405b68203e61762d4a52 | [
"Apache-2.0"
] | 4 | 2016-02-09T21:45:50.000Z | 2020-07-30T21:52:50.000Z | #!/usr/bin/python2.5
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gtfsobjectbase import GtfsObjectBase
import problems as problems_module
import util
class Transfer(GtfsObjectBase):
"""Represents a transfer in a schedule"""
_REQUIRED_FIELD_NAMES = ['from_stop_id', 'to_stop_id', 'transfer_type']
_FIELD_NAMES = _REQUIRED_FIELD_NAMES + ['min_transfer_time']
_TABLE_NAME = 'transfers'
_ID_COLUMNS = ['from_stop_id', 'to_stop_id']
def __init__(self, schedule=None, from_stop_id=None, to_stop_id=None, transfer_type=None,
min_transfer_time=None, field_dict=None):
self._schedule = None
if field_dict:
self.__dict__.update(field_dict)
else:
self.from_stop_id = from_stop_id
self.to_stop_id = to_stop_id
self.transfer_type = transfer_type
self.min_transfer_time = min_transfer_time
if getattr(self, 'transfer_type', None) in ("", None):
# Use the default, recommended transfer, if attribute is not set or blank
self.transfer_type = 0
else:
try:
self.transfer_type = util.NonNegIntStringToInt(self.transfer_type)
except (TypeError, ValueError):
pass
if hasattr(self, 'min_transfer_time'):
try:
self.min_transfer_time = util.NonNegIntStringToInt(self.min_transfer_time)
except (TypeError, ValueError):
pass
else:
self.min_transfer_time = None
if schedule is not None:
# Note from Tom, Nov 25, 2009: Maybe calling __init__ with a schedule
# should output a DeprecationWarning. A schedule factory probably won't
# use it and other GenericGTFSObject subclasses don't support it.
schedule.AddTransferObject(self)
def ValidateFromStopIdIsPresent(self, problems):
if util.IsEmpty(self.from_stop_id):
problems.MissingValue('from_stop_id')
return False
return True
def ValidateToStopIdIsPresent(self, problems):
if util.IsEmpty(self.to_stop_id):
problems.MissingValue('to_stop_id')
return False
return True
def ValidateTransferType(self, problems):
if not util.IsEmpty(self.transfer_type):
if (not isinstance(self.transfer_type, int)) or \
(self.transfer_type not in range(0, 4)):
problems.InvalidValue('transfer_type', self.transfer_type)
return False
return True
def ValidateMinimumTransferTime(self, problems):
if not util.IsEmpty(self.min_transfer_time):
if self.transfer_type != 2:
problems.MinimumTransferTimeSetWithInvalidTransferType(
self.transfer_type)
# If min_transfer_time is negative, equal to or bigger than 24h, issue
# an error. If smaller than 24h but bigger than 3h issue a warning.
# These errors are not blocking, and should not prevent the transfer
# from being added to the schedule.
if (isinstance(self.min_transfer_time, int)):
if self.min_transfer_time < 0:
problems.InvalidValue('min_transfer_time', self.min_transfer_time,
reason="This field cannot contain a negative " \
"value.")
elif self.min_transfer_time >= 24*3600:
problems.InvalidValue('min_transfer_time', self.min_transfer_time,
reason="The value is very large for a " \
"transfer time and most likely " \
"indicates an error.")
elif self.min_transfer_time >= 3*3600:
problems.InvalidValue('min_transfer_time', self.min_transfer_time,
type=problems_module.TYPE_WARNING,
reason="The value is large for a transfer " \
"time and most likely indicates " \
"an error.")
else:
# It has a value, but it is not an integer
problems.InvalidValue('min_transfer_time', self.min_transfer_time,
reason="If present, this field should contain " \
"an integer value.")
return False
return True
def GetTransferDistance(self):
from_stop = self._schedule.stops[self.from_stop_id]
to_stop = self._schedule.stops[self.to_stop_id]
distance = util.ApproximateDistanceBetweenStops(from_stop, to_stop)
return distance
def ValidateFromStopIdIsValid(self, problems):
if self.from_stop_id not in self._schedule.stops.keys():
problems.InvalidValue('from_stop_id', self.from_stop_id)
return False
return True
def ValidateToStopIdIsValid(self, problems):
if self.to_stop_id not in self._schedule.stops.keys():
problems.InvalidValue('to_stop_id', self.to_stop_id)
return False
return True
def ValidateTransferDistance(self, problems):
distance = self.GetTransferDistance()
if distance > 10000:
problems.TransferDistanceTooBig(self.from_stop_id,
self.to_stop_id,
distance)
elif distance > 2000:
problems.TransferDistanceTooBig(self.from_stop_id,
self.to_stop_id,
distance,
type=problems_module.TYPE_WARNING)
def ValidateTransferWalkingTime(self, problems):
if util.IsEmpty(self.min_transfer_time):
return
if self.min_transfer_time < 0:
# Error has already been reported, and it does not make sense
# to calculate walking speed with negative times.
return
distance = self.GetTransferDistance()
# If min_transfer_time + 120s isn't enough for someone walking very fast
# (2m/s) then issue a warning.
#
# Stops that are close together (less than 240m appart) never trigger this
# warning, regardless of min_transfer_time.
FAST_WALKING_SPEED= 2 # 2m/s
if self.min_transfer_time + 120 < distance / FAST_WALKING_SPEED:
problems.TransferWalkingSpeedTooFast(from_stop_id=self.from_stop_id,
to_stop_id=self.to_stop_id,
transfer_time=self.min_transfer_time,
distance=distance)
def ValidateBeforeAdd(self, problems):
result = True
result = self.ValidateFromStopIdIsPresent(problems) and result
result = self.ValidateToStopIdIsPresent(problems) and result
result = self.ValidateTransferType(problems) and result
result = self.ValidateMinimumTransferTime(problems) and result
return result
def ValidateAfterAdd(self, problems):
valid_stop_ids = True
valid_stop_ids = self.ValidateFromStopIdIsValid(problems) and valid_stop_ids
valid_stop_ids = self.ValidateToStopIdIsValid(problems) and valid_stop_ids
# We need both stop IDs to be valid to able to validate their distance and
# the walking time between them
if valid_stop_ids:
self.ValidateTransferDistance(problems)
self.ValidateTransferWalkingTime(problems)
def Validate(self,
problems=problems_module.default_problem_reporter):
if self.ValidateBeforeAdd(problems) and self._schedule:
self.ValidateAfterAdd(problems)
def _ID(self):
return tuple(self[i] for i in self._ID_COLUMNS)
def AddToSchedule(self, schedule, problems):
schedule.AddTransferObject(self, problems)
| 40.811224 | 91 | 0.665833 | 7,305 | 0.913239 | 0 | 0 | 0 | 0 | 0 | 0 | 2,176 | 0.272034 |
16e7d64f5a23705a73ced1fae75f2e7697ae34b2 | 2,067 | py | Python | social/urls.py | Kizito-Alberrt/insta-social | c632e901cd81b0b139f88ad55236efd6c7ddbef1 | [
"MIT"
] | null | null | null | social/urls.py | Kizito-Alberrt/insta-social | c632e901cd81b0b139f88ad55236efd6c7ddbef1 | [
"MIT"
] | null | null | null | social/urls.py | Kizito-Alberrt/insta-social | c632e901cd81b0b139f88ad55236efd6c7ddbef1 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
from . views import UserPostListView, PostDetailView, PostDeleteview, PostCreateView, PostUpdateView,CommentUpdateView, VideoCreateView, video_update
urlpatterns = [
path('',views.base, name='base'),
path('login',views.login, name='login'),
path('register',views.register, name='register'),
path('index',views.index, name='index'),
path('logout',views.logout, name='logout'),
path('like_post', views.like_post, name='like_post'),
path('find_friends',views.find_friends, name='find_friends'),
path('profile',views.profile, name='profile'),
path('profile_update', views.profile_update, name='profile_update'),
path('user/<str:username>', UserPostListView.as_view(), name='user_posts'),
path('post/<int:pk>/',PostDetailView.as_view(), name='post_details' ),
path('post/<int:pk>/delete/',PostDeleteview.as_view(), name='post_delete' ),
path('profile_posts',views.profile_posts, name='profile_posts'),
path('results',views.results, name='results'),
path('post/new/',PostCreateView.as_view(), name='post-create' ),
path('post_update',views.post_update, name='post_update'),
path('post/<int:pk>/update',PostUpdateView.as_view(), name='post-update' ),
path('profile_photos',views.profile_photos, name='profile_photos'),
path('comment_update/<int:id>',views.comment_update, name='comment_update'),
path('comment/<int:pk>/update',CommentUpdateView.as_view(), name='comment-update' ),
path('delete/<int:id>',views.delete, name='delete'),
path('favourite',views.favourite, name='favourite'),
path('favourite_posts',views.favourite_posts, name='favourite_posts'),
path('video/new/',VideoCreateView.as_view(), name='video-create' ),
path('post/<int:pk>/video',video_update.as_view(), name='video_update' ),
# path('<str:username>',views.userprofile, name='userprofile'),
path('video_posts',views.video_posts, name='video_posts'),
path('user_videos',views.user_videos,name='user_videos'),
]
| 43.0625 | 149 | 0.701016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 775 | 0.37494 |
16e8783047883ecc17068c1f63c87b161a271a5f | 1,054 | py | Python | vtkplotter_examples/other/dolfin/collisions.py | ismarou/vtkplotter-examples | 1eefcc026be169ab7a77a5bce6dec8044c33b554 | [
"MIT"
] | 4 | 2020-07-30T02:38:29.000Z | 2021-09-12T14:30:18.000Z | vtkplotter_examples/other/dolfin/collisions.py | ismarou/vtkplotter-examples | 1eefcc026be169ab7a77a5bce6dec8044c33b554 | [
"MIT"
] | null | null | null | vtkplotter_examples/other/dolfin/collisions.py | ismarou/vtkplotter-examples | 1eefcc026be169ab7a77a5bce6dec8044c33b554 | [
"MIT"
] | null | null | null | '''
compute_collision() will compute the collision of all the entities with
a Point while compute_first_collision() will always return its first entry.
Especially if a point is on an element edge this can be tricky.
You may also want to compare with the Cell.contains(Point) tool.
'''
# Script by Rudy at https://fenicsproject.discourse.group/t/
# any-function-to-determine-if-the-point-is-in-the-mesh/275/3
import dolfin
from vtkplotter.dolfin import shapes, plot, printc
n = 4
Px = 0.5
Py = 0.5
mesh = dolfin.UnitSquareMesh(n, n)
bbt = mesh.bounding_box_tree()
collisions = bbt.compute_collisions(dolfin.Point(Px, Py))
collisions1st = bbt.compute_first_entity_collision(dolfin.Point(Px, Py))
printc("collisions : ", collisions)
printc("collisions 1st: ", collisions1st)
for cell in dolfin.cells(mesh):
contains = cell.contains(dolfin.Point(Px, Py))
printc("Cell", cell.index(), "contains P:", contains, c=contains)
###########################################
pt = shapes.Point([Px, Py], c='blue')
plot(mesh, pt, text=__doc__)
| 35.133333 | 75 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 519 | 0.49241 |
16e8943240219eac91364d8b6c27599e32680763 | 622 | py | Python | alice_check_train/__main__.py | AsciiShell/Alice-Check-Train | 49d5804d28a237756a7cf27e451ff56166fbee5c | [
"MIT"
] | null | null | null | alice_check_train/__main__.py | AsciiShell/Alice-Check-Train | 49d5804d28a237756a7cf27e451ff56166fbee5c | [
"MIT"
] | null | null | null | alice_check_train/__main__.py | AsciiShell/Alice-Check-Train | 49d5804d28a237756a7cf27e451ff56166fbee5c | [
"MIT"
] | null | null | null | import datetime
import os
from alice_check_train.main import rasp_to_text
from alice_check_train.rasp_api import get_rasp, filter_rasp
def main():
key = os.getenv('RASP_KEY')
station_from = os.getenv('STATION_FROM')
station_to = os.getenv('STATION_TO')
date = datetime.date.today().strftime('%Y-%m-%d')
js = get_rasp(key, station_from, station_to, date)
filtered = filter_rasp(js['segments'], 300)
message = rasp_to_text(filtered, 1000)
if len(message) > 1024:
print('Too long message: {} > 1024'.format(len(message)))
print(message)
if __name__ == '__main__':
main()
| 25.916667 | 65 | 0.688103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 95 | 0.152733 |
16e89821c774aa40fe5b74ea387488fc99280078 | 7,309 | py | Python | aws-KNN-RESTful.py | cakebytheoceanLuo/k-NN | 52c66b5e38490431b3079c2baaad38785802f4e5 | [
"Apache-2.0"
] | 1 | 2021-11-16T13:22:09.000Z | 2021-11-16T13:22:09.000Z | aws-KNN-RESTful.py | cakebytheoceanLuo/k-NN | 52c66b5e38490431b3079c2baaad38785802f4e5 | [
"Apache-2.0"
] | null | null | null | aws-KNN-RESTful.py | cakebytheoceanLuo/k-NN | 52c66b5e38490431b3079c2baaad38785802f4e5 | [
"Apache-2.0"
] | null | null | null | # https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb
# https://docs.aws.amazon.com/opensearch-service/latest/developerguide/knn.html
import sys
import requests
import h5py
import numpy as np
import json
import aiohttp
import asyncio
import time
import httpx
from requests.auth import HTTPBasicAuth
from statistics import mean
# if len(sys.argv) != 2:
# print("Type in the efSearch!")
# sys.exit()
# path = '/tmp/sift-128-euclidean.hdf5.1M' # float dataset
# path = '/tmp/sift-128-euclidean.hdf5' # float dataset
path = '/home/ubuntu/sift-128-euclidean.hdf5' # float dataset
output_csv = '/tmp/sift-es.csv'
# url = 'http://127.0.0.1:9200/sift-index/'
host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # single node
# host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # two nodes
url = host + 'sift-index/'
requestHeaders = {'content-type': 'application/json'} # https://stackoverflow.com/questions/51378099/content-type-header-not-supported
auth = HTTPBasicAuth('admin', 'I#vu7bTAHB')
# Build an index
#https://stackoverflow.com/questions/17301938/making-a-request-to-a-restful-api-using-python
# PUT sift-index
data = '''{
"settings": {
"index": {
"knn": true,
"knn.space_type": "l2",
"knn.algo_param.m": 6,
"knn.algo_param.ef_construction": 50,
"knn.algo_param.ef_search": 50,
"refresh_interval": -1,
"translog.flush_threshold_size": "10gb",
"number_of_replicas": 0
}
},
"mappings": {
"properties": {
"sift_vector": {
"type": "knn_vector",
"dimension": 128
}
}
}
}'''
# https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb
response = requests.put(url, data=data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
# response = requests.put(url, data=data, verify=False, headers=requestHeaders, auth=auth)
assert response.status_code==requests.codes.ok
# cluster_url = 'http://127.0.0.1:9200/_cluster/settings'
cluster_url = host + '_cluster/settings'
cluster_data = '''{
"persistent" : {
"knn.algo_param.index_thread_qty": 16
}
}
'''
response = requests.put(cluster_url, data=cluster_data, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), headers=requestHeaders)
assert response.status_code==requests.codes.ok
# Bulkload into index
bulk_template = '{ "index": { "_index": "sift-index", "_id": "%s" } }\n{ "sift_vector": [%s] }\n'
hf = h5py.File(path, 'r')
for key in hf.keys():
print("A key of hf is %s" % key) #Names of the groups in HDF5 file.
vectors = np.array(hf["train"][:])
num_vectors, dim = vectors.shape
print("num_vectors: %d" % num_vectors)
print("dim: %d" % dim)
bulk_data = ""
start = time.time()
for (id,vector) in enumerate(vectors):
assert len(vector)==dim
vector_str = ""
for num in vector:
vector_str += str(num) + ','
vector_str = vector_str[:-1]
id_str = str(id)
single_bulk_done = bulk_template % (id_str, vector_str)
bulk_data += single_bulk_done
if (id+1) % 100000 == 0:
print(str(id+1))
# POST _bulk
response = requests.put(url + '_bulk', data=bulk_data, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), headers=requestHeaders)
assert response.status_code==requests.codes.ok
bulk_data = ""
end = time.time()
print("Insert Time: %d mins" % ((end - start) / 60.0)) # Unit: min
# refresh_url = 'http://127.0.0.1:9200/sift-index/_settings'
refresh_url = host + 'sift-index/_settings'
refresh_data = '''{
"index" : {
"refresh_interval": "1s"
}
}
'''
response = requests.put(refresh_url, data=refresh_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert response.status_code==requests.codes.ok
# response = requests.post('http://127.0.0.1:9200/sift-index/_refresh', verify=False, headers=requestHeaders)
# assert response.status_code==requests.codes.ok
# merge_url = 'http://127.0.0.1:9200/sift-index/_forcemerge?max_num_segments=1'
merge_url = host + 'sift-index/_forcemerge?max_num_segments=1'
merge_response = requests.post(merge_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), timeout=600)
assert merge_response.status_code==requests.codes.ok
# warmup_url = 'http://127.0.0.1:9200/_opendistro/_knn/warmup/sift-index'
warmup_url = host + '_opendistro/_knn/warmup/sift-index'
warmup_response = requests.get(warmup_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert warmup_response.status_code==requests.codes.ok
# Send queries
total_time = 0 # in ms
hits = 0 # for recall calculation
query_template = '''
{
"size": 50,
"query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}}
}
'''
queries = np.array(hf["test"][:])
nq = len(queries)
neighbors = np.array(hf["neighbors"][:])
# distances = np.array(hf["distances"][:])
num_queries, q_dim = queries.shape
print("num_queries: %d" % num_queries)
print("q_dim: %d" % q_dim)
assert q_dim==dim
ef_search_list = [50, 100, 150, 200, 250, 300]
for ef_search in ef_search_list:
ef_data = '''{
"index": {
"knn.algo_param.ef_search": %d
}
}'''
ef_data = ef_data % ef_search
### Update Index Setting: efSearch
response = requests.put(url + '_settings', data=ef_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert response.status_code==requests.codes.ok
total_time_list = []
hits_list = []
for count in range(5):
total_time = 0 # in ms
hits = 0 # for recall calculation
query_template = '''
'''
single_query = '''{}\n{"size": 50, "query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}}}\n'''
for (id,query) in enumerate(queries):
assert len(query)==dim
query_str = ""
for num in query:
query_str += str(num) + ','
query_str = query_str[:-1]
# GET sift-index/_search
single_query_done = single_query % (query_str)
query_template += single_query_done
query_data = query_template
# print(query_data)
response = requests.get(url + '_msearch', data=query_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), stream=True)
assert response.status_code==requests.codes.ok
# print(response.text)
result = json.loads(response.text)
# QPS
total_time = result['took']
# tooks = []
# for i in range(len(queries)):
# for ele in result['responses']:
# tooks.append(int(ele['took']))
for id in range(len(queries)):
# Recall
neighbor_id_from_result = []
for ele in result['responses'][id]['hits']['hits']:
neighbor_id_from_result.append(int(ele['_id']))
assert len(neighbor_id_from_result)==50
# print("neighbor_id_from_result: ")
# print(neighbor_id_from_result)
neighbor_id_gt = neighbors[id][0:50] # topK=50
# print("neighbor_id_gt")
# print(neighbor_id_gt)
hits_q = len(list(set(neighbor_id_from_result) & set(neighbor_id_gt)))
# print("# hits of this query with topk=50: %d" % hits_q)
hits += hits_q
total_time_list.append(total_time)
hits_list.append(hits)
print(total_time_list)
total_time_avg = mean(total_time_list[2:-1])
hits_avg = mean(hits_list)
QPS = 1.0 * nq / (total_time_avg / 1000.0)
recall = 1.0 * hits_avg / (nq * 50)
print(ef_search, QPS, recall)
| 33.374429 | 142 | 0.675332 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,478 | 0.475852 |
16ea2d8be166b5650aea4af33dbde9040a41f768 | 1,438 | py | Python | test/test_docker_images.py | bauerj/cibuildwheel | b4addbf4a94daa76769d4f779e169406b0ef99ae | [
"BSD-2-Clause"
] | null | null | null | test/test_docker_images.py | bauerj/cibuildwheel | b4addbf4a94daa76769d4f779e169406b0ef99ae | [
"BSD-2-Clause"
] | null | null | null | test/test_docker_images.py | bauerj/cibuildwheel | b4addbf4a94daa76769d4f779e169406b0ef99ae | [
"BSD-2-Clause"
] | null | null | null | import platform
import textwrap
import pytest
from . import test_projects, utils
dockcross_only_project = test_projects.new_c_project(
setup_py_add=textwrap.dedent(r'''
import os, sys
# check that we're running in the correct docker image as specified in the
# environment options CIBW_MANYLINUX1_*_IMAGE
if "linux" in sys.platform and not os.path.exists("/dockcross"):
raise Exception(
"/dockcross directory not found. Is this test running in the correct docker image?"
)
''')
)
def test(tmp_path):
if utils.platform != 'linux':
pytest.skip('the test is only relevant to the linux build')
if platform.machine() not in ['x86_64', 'i686']:
pytest.skip('this test is currently only possible on x86_64/i686 due to availability of alternative images')
project_dir = tmp_path / 'project'
dockcross_only_project.generate(project_dir)
actual_wheels = utils.cibuildwheel_run(project_dir, add_env={
'CIBW_MANYLINUX_X86_64_IMAGE': 'dockcross/manylinux2010-x64',
'CIBW_MANYLINUX_I686_IMAGE': 'dockcross/manylinux2010-x86',
'CIBW_SKIP': 'pp* cp39-*',
})
# also check that we got the right wheels built
expected_wheels = [w for w in utils.expected_wheels('spam', '0.1.0')
if '-pp' not in w and '-cp39-' not in w]
assert set(actual_wheels) == set(expected_wheels)
| 35.073171 | 116 | 0.672462 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 770 | 0.535466 |
16eb07b6e691db19202917b717c2ccb87df9fd9d | 32,556 | py | Python | real_trade/MoveAverageTradePosition.py | taka-mochi/cryptocurrency-autotrading | 16677018c793d7bd3fffdcd3575aecb3535dbd04 | [
"BSD-3-Clause"
] | 3 | 2018-05-22T22:45:23.000Z | 2020-02-13T16:45:03.000Z | real_trade/MoveAverageTradePosition.py | taka-mochi/cryptocurrency-autotrading | 16677018c793d7bd3fffdcd3575aecb3535dbd04 | [
"BSD-3-Clause"
] | null | null | null | real_trade/MoveAverageTradePosition.py | taka-mochi/cryptocurrency-autotrading | 16677018c793d7bd3fffdcd3575aecb3535dbd04 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
import math
import dateutil
import dateutil.parser
import json
from ChartBars import Chart
from ChartUpdaterByCCWebsocket import ChartUpdaterByCoincheckWS
from Util import BitcoinUtil
def adjust_price_to_tick(price, tick):
return price - math.fmod(price, tick)
def adjust_amount_to_tick(amount, tick):
return amount - math.fmod(amount, tick)
# a class for one position
class OnePositionTrader(object):
def __init__(self, price_decide_algorithm, api, pair="btc_jpy", use_leverage = True):
self.max_total_position_price_base = 0 # total maximum position size in base currency
self.positioned_price_base = 0 # total position price in base currency (actually paired currency)
self.positioned_value_in_qty = 0 # used only for genbutsu
self.max_free_margin_of_base_currency = 0 # max free margin. we cannot use orders that exceed this margin
self.positions = []
self.position_id_to_sellids = {}
self.got_all_order_ids = []
self.got_close_order_ids = []
self.exist_order_info_list = None
self.exist_close_order_info_list = None
self.last_checked_transaction_id = 0
self.api = api # api: e.g. instance of CoinCheck
self.use_leverage = use_leverage
self.timelimit_to_grouping_transaction = 2 # 約定時刻がこの秒数以下なら同一ポジションとみなす(use_leverage == False の場合のみ)
self.__pair = pair
self.price_decide_algorithm = price_decide_algorithm
print("PositionTrader: inst=" + str(self) + ", pair=" + str(pair))
@property
def pair(self):
return self.__pair
def get_base_currency(self):
return self.pair.split("_")[1].lower()
def get_qty_currency(self):
return self.pair.split("_")[0].lower()
# set usable jpy (available_margin + reserved_margin + (positioned))
def set_max_total_position_price_base(self, p):
self.set_max_total_position_price_of_base_currency(p)
def set_max_total_position_price_of_base_currency(self, p):
self.max_total_position_price_base = p
def set_max_free_margin_of_base_currency(self, p):
self.max_free_margin_of_base_currency = p
def get_max_total_position_price_base(self):
return self.get_max_total_position_price_of_base_currency()
def get_max_total_position_price_of_base_currency(self):
return self.max_total_position_price_base
def get_positioned_price_base(self):
return self.positioned_price_base
def set_timelimit_to_grouping_transaction(self, timelimit_to_grouping_transaction):
self.timelimit_to_grouping_transaction = timelimit_to_grouping_transaction
# check current status and make new positions according to algorithm
# notice: this method should be called after update_status
def update_new_orders(self, chart, do_not_create_new_order=False):
assert (self.price_decide_algorithm is not None)
position_type = None
target_value = None
stoploss_rate = None
decide_make_ret = self.price_decide_algorithm.decide_make_position_order(chart)
if len(decide_make_ret) == 3:
(position_type, target_value, stoploss_rate) = decide_make_ret
else:
(position_type, target_value) = decide_make_ret
if target_value is None or position_type is None:
# algorithm says this instance should not make order. cancel all
if self.exist_order_info_list is not None:
for exist_order_info in self.exist_order_info_list:
self._cancel_order(exist_order_info["id"])
self.exist_order_info_list = None
return False
# round to possible price
tick = self.api.order.tick_price(self.pair)
target_value = adjust_price_to_tick(target_value, tick)
if stoploss_rate is not None:
stoploss_rate = adjust_price_to_tick(stoploss_rate, tick)
# !!round to possible amount
possible_make_total_price_base_cur = self.get_max_total_position_price_of_base_currency() - self.positioned_price_base
possible_make_total_price_base_cur = min(possible_make_total_price_base_cur, self.max_free_margin_of_base_currency)
amount_tick = self.api.order.tick_amount(self.pair)
possible_amount = 1.0 * possible_make_total_price_base_cur / target_value
possible_amount = adjust_amount_to_tick(possible_amount,amount_tick)
print("possible_create_in_base = %f, want to make amount in base = %f, possible amount = %f" %
(self.get_max_total_position_price_of_base_currency() - self.positioned_price_base,
possible_make_total_price_base_cur, possible_amount))
#print("base_cur = %f, positioned = %f, others = %f" % (self.get_max_total_position_price_of_base_currency(), self.positioned_price_base, self.other_reserved_base,))
#print("target_value = %f, possible_base = %f" % (target_value, possible_make_total_price_base_cur,))
if possible_amount <= 0.000001:
# too few btc
print("want to make (price,amount) = (%f,%f) but too few amount" % (target_value, possible_amount))
return False
if not do_not_create_new_order:
success, new_order_created = self._update_or_create_order(position_type, target_value, possible_amount, stop_loss_rate=stoploss_rate)
return new_order_created
else:
self._cancel_exist_all_buy_orders()
print("algorithm wants to create a new order but DO_NOT_CREATE_NEW flag = true")
return False
# update close orders according to current positions
# this class should be called after update_status
def update_close_orders(self, chart, current_time_timezone_aware):
for position in self.positions:
open_rate = float(position["open_rate"])
amount = float(position["amount"])
created_time = position["created_at_datetime"]
target_value = None
if self.price_decide_algorithm.market_sell_decide_algorithm(chart, open_rate, created_time, current_time_timezone_aware) is True:
# market order close
pass
else:
target_value = self.price_decide_algorithm.sell_price_decide_algorithm(open_rate)
target_value = adjust_price_to_tick(target_value, self.api.order.tick_price(self.pair))
self._update_or_create_close_order(position, target_value)
# interface to update internal position & order status
def update_status(self, valid_position_info, valid_transaction_info, valid_order_info):
# update position/order status (assume: pagenations are already cleared)
self._update_order_id_status(valid_order_info)
if self.use_leverage:
self._update_position_status(valid_position_info)
else:
self._update_transaction_status(valid_transaction_info)
def _update_position_status(self, valid_position_info):
# apply real positions status to this instance
# レバレッジ用
if not self.use_leverage:
return
"""
position example (array of "data" will be passed)
{
"data": [
{
"id": 10,
"pair": "btc_jpy",
"status": "open",
"created_at": "2015-12-02T05:27:53.000Z",
"closed_at": null,
"open_rate": "43553.0",
"closed_rate": null,
"amount": "1.51347797",
"all_amount": "1.51045705",
"side": "sell",
"pl": "-8490.81029287",
"new_order": {
"id": 23104033,
"side": "sell",
"rate": null,
"amount": null,
"pending_amount": "0",
"status": "complete",
"created_at": "2015-12-02T05:27:52.000Z"
},
"close_orders": [
{
"id": 23755132,
"side": "buy",
"rate": "10000.0",
"amount": "1.0",
"pending_amount": "0.0",
"status": "cancel",
"created_at": "2015-12-05T05:03:56.000Z"
}
]
}
]
}
"""
####
# parse positions
####
self.positions = []
self.position_id_to_sellids = {}
all_positions = valid_position_info
positioned_value_in_base = 0
for position in all_positions:
status = position["status"]
if status != "open":
continue
pair = position["pair"]
if pair != self.pair:
continue
position_id = position["id"]
# check position that is created by the new_order that is self.order_id:
new_order = position["new_order"]
if new_order["status"] == "cancel":
print("new order: " + str(new_order["id"]) + " state is 'cancel'. probably partially contracted and remain is canceled. this position is not ignored")
#continue
new_order_id = new_order["id"]
if new_order_id in self.got_all_order_ids:
# this position is created by this class's order
created_time = dateutil.parser.parse(position["created_at"])
position["created_at_datetime"] = created_time
amount = position["amount"]
all_amount = position["all_amount"]
if all_amount is not None and all_amount < amount:
amount = all_amount
position["amount"] = position["all_amount"] = amount
self.positions.append(position)
open_rate = position["open_rate"]
positioned_value_in_base += float(amount) * float(open_rate)
# check close orders
self.position_id_to_sellids[position_id] = \
list(map(lambda x:x["id"], filter(lambda x:x["status"] != "cancel", position["close_orders"])))
self.positioned_price_base = positioned_value_in_base
def _update_transaction_status(self, valid_transaction_info):
if self.use_leverage:
return
# 現物用。transactionの結果からポジションの状態を解析. 基本的にupdate_position_statusと挙動は同じ。parseするjsonが異なる
# * ただし、前フレームからの情報を引き継ぐところがupdate_position_statusと違う (現物にはpositionという概念が無い)
positions = self.positions
position_id_to_sellids = self.position_id_to_sellids
close_transactions = []
all_transactions = valid_transaction_info
positioned_value_in_qty = self.positioned_value_in_qty
qty_cur = self.get_qty_currency()
base_cur = self.get_base_currency()
last_transaction_id_in_this_frame = self.last_checked_transaction_id
for transaction in all_transactions:
transaction_id = int(transaction["id"]) # transaction_id means position_id
transaction["id"] = transaction_id
# check only new id
if self.last_checked_transaction_id >= transaction_id:
continue
last_transaction_id_in_this_frame = max(last_transaction_id_in_this_frame, transaction_id)
# check pair
this_pair = transaction["pair"]
if this_pair != self.pair:
continue
# check position that is created by the new_order that is self.order_id:
new_order_id = int(transaction["order_id"])
transaction["order_id"] = new_order_id
is_position_transaction = new_order_id in self.got_all_order_ids
is_close_transaction = new_order_id in self.got_close_order_ids
if not is_position_transaction and not is_close_transaction:
continue
# other pair
if qty_cur not in transaction["funds"] or base_cur not in transaction["funds"]:
continue
# this position is created by this class's order
qty_amount = float(transaction["funds"][qty_cur])
transaction["amount"] = transaction["amount"] = qty_amount
transaction["open_rate"] = float(transaction["rate"])
open_rate = float(transaction["open_rate"])
positioned_value_in_qty += float(qty_amount)
created_time = dateutil.parser.parse(transaction["created_at"])
transaction["created_at_datetime"] = created_time
if is_position_transaction:
# check close orders
# 漏れがあるとまずい(cancelしなくなる)ので、とりあえずあるだけリンクしておく
position_id_to_sellids[transaction_id] = []
transaction["close_orders"] = []
positions.append(transaction)
else:
close_transactions.append(transaction)
# in next frame, only transaction_id > self.last_checked_transaction_id will be checked
self.last_checked_transaction_id = last_transaction_id_in_this_frame
print("last_checked_transaction_id = ", self.last_checked_transaction_id)
print("self.exist_close_order_info_list", self.exist_close_order_info_list)
if self.exist_close_order_info_list is not None:
for pos_i, position in enumerate(positions):
transaction_id = position["id"]
position_id_to_sellids[transaction_id] = list(map(lambda x:x["id"], self.exist_close_order_info_list))
position["close_orders"] = self.exist_close_order_info_list
for i, order in enumerate(position["close_orders"]):
order["status"] = "open"
order["side"] = order["order_type"]
if "amount" not in order:
order["amount"] = float(order["pending_amount"])
position["close_orders"][i] = order
positions[pos_i] = position
# round very small value
if abs(positioned_value_in_qty) < self.api.order.min_create_amount(self.pair)*0.1:
positioned_value_in_qty = 0
positions = sorted(positions, key=lambda x:-x["id"]) # order by desc
# concat very near created_at transactions
grouped_positions = self._group_near_transactions(positions)
# remove closed position & update positioned_value_in_jpy
valid_positions, positioned_value_in_base = self._remain_non_closed_transactions(grouped_positions, positioned_value_in_qty)
if abs(positioned_value_in_base) < self.api.order.tick_price(self.pair) * self.api.order.min_create_amount(self.pair) * 0.1:
positioned_value_in_base = 0
# merge position_id_to_sellids
self.position_id_to_sellids = {}
for position in valid_positions:
pos_id = position["id"]
self.position_id_to_sellids[pos_id] = position_id_to_sellids[pos_id]
self.positioned_price_base = positioned_value_in_base
self.positioned_value_in_qty = positioned_value_in_qty
self.position_id_to_sellids = position_id_to_sellids
self.positions = valid_positions
print("position_count=%d, positioned_%s=%f, positioned_%s=%f" % (len(self.positions), base_cur, self.positioned_price_base, qty_cur, self.positioned_value_in_qty,))
# close したかどうか、残っているポジション残量を計算するのに、全て遡らないといけないのは現実的ではない
# 既にこの段階で解決できるポジション状態(close order id見て、それがあれば反対売買が成立している)
# を用い、↑で貯めたpositionsから、反対売買済みのものを(amount基準で)消していき(前回フレームで残っていたpositionも含めて)、残ったpositionだけを生きているポジションとし、1つに集約する(現物用なので、idが分かれている意味はない)
# その残ったpositionID, 消費した反対売買IDのIDを持っておき、次回からはそれより新しいIDのみを反映する
# ただし、ずっと続けると計算誤差がたまるので、jpyもしくはbtcベースでその合計値が極めて小さくなったら丸めてノーポジ扱いにする
# うーん...現物とレバレッジで管理が結構変わるから同じクラスにするのはまずかった?ごちゃごちゃしてきてしまった
# 時間的に約定時刻が近いpositionをまとめる
def _group_near_transactions(self, target_transactions):
grouped_positions = []
positions = target_transactions
if len(positions) > 0:
def grouping(desced_position_array):
ret_pos = dict(desced_position_array[0])
total_amount = 0
total_jpy = 0
for p in desced_position_array:
total_amount += p["amount"]
total_jpy += p["amount"] * p["open_rate"]
ret_pos["amount"] = total_amount
ret_pos["open_rate"] = total_jpy / total_amount
return ret_pos
concat_start_index = 0
prev_created_at = positions[0]["created_at_datetime"]
for idx, pos in enumerate(positions):
cur_created_at = pos["created_at_datetime"]
if abs((cur_created_at - prev_created_at).total_seconds()) <= self.timelimit_to_grouping_transaction:
# can group
prev_created_at = cur_created_at
continue
# this position cannot be grouped. make a new group from pos[start_index] - pos[idx-1]
grouped_positions.append(grouping(positions[concat_start_index:idx]))
#print(grouped_positions[-1])
concat_start_index = idx
prev_created_at = cur_created_at
# remain positioned not be grouped
grouped_positions.append(grouping(positions[concat_start_index:]))
return grouped_positions
# まだcloseされていないtransactionだけを残す
def _remain_non_closed_transactions(self, target_transactions, positioned_value_in_qty):
valid_positions = []
remain_qty = positioned_value_in_qty
total_base = 0
for position in target_transactions:
if remain_qty <= 0: break
amount = position["amount"]
if remain_qty >= amount:
remain_qty -= amount
else:
position["amount"] = remain_qty
remain_qty = 0
valid_positions.append(position)
total_base += position["amount"] * position["open_rate"]
return valid_positions, total_base
def _update_order_id_status(self, valid_order_info):
####
# parse orders
####
"""
orders example (array of "orders" will be passed)
{
"success": true,
"orders": [
{
"id": 202835,
"order_type": "buy",
"rate": 26890,
"pair": "btc_jpy",
"pending_amount": "0.5527",
"pending_market_buy_amount": null,
"stop_loss_rate": null,
"created_at": "2015-01-10T05:55:38.000Z"
},
{
"id": 202836,
"order_type": "sell",
"rate": 26990,
"pair": "btc_jpy",
"pending_amount": "0.77",
"pending_market_buy_amount": null,
"stop_loss_rate": null,
"created_at": "2015-01-10T05:55:38.000Z"
},
{
"id": 38632107,
"order_type": "buy",
"rate": null,
"pair": "btc_jpy",
"pending_amount": null,
"pending_market_buy_amount": "10000.0",
"stop_loss_rate": "50000.0",
"created_at": "2016-02-23T12:14:50.000Z"
}
]
}
"""
#exist_order_ids = list(map(lambda x:x["id"], valid_order_info))
exist_orders = []
exist_close_orders = []
other_orders = []
for idx, order in enumerate(valid_order_info):
order_id = order["id"]
order_pair = order["pair"]
is_added = False
if order_pair == self.pair:
if order_id in self.got_all_order_ids:
is_added = True
exist_orders.append(order)
elif order_id in self.got_close_order_ids:
is_added = True
exist_close_orders.append(order)
if not is_added:
other_orders.append(order)
print("exist_create_orders", exist_orders)
print("exist_close_orders", exist_close_orders)
self.exist_order_info_list = exist_orders if len(exist_orders) > 0 else None
self.exist_close_order_info_list = exist_close_orders if len(exist_close_orders) > 0 else None
#self.other_reserved_base = 0
#if not self.use_leverage:
# for o in other_orders:
# if o["order_type"] == "buy":
# self.other_reserved_base += float(o["pending_amount"]) * float(o["rate"])
# returns: (is_success, is_new_order_created)
def _update_or_create_order(self, position_type, target_value, possible_qty, stop_loss_rate = None):
assert (self.api is not None)
# order list は現物とleverageで変わらない
if self.exist_order_info_list is not None:
# check the same value or not
if len(self.exist_order_info_list) == 1:
exist_order_info = self.exist_order_info_list[0]
cur_rate = exist_order_info["rate"] if "rate" in exist_order_info else None
# get current stoploss
cur_stoploss = exist_order_info["stop_loss_rate"] if "stop_loss_rate" in exist_order_info else None
cur_stoploss_float_or_none = None
if cur_stoploss is not None:
cur_stoploss_float_or_none = float(cur_stoploss)
target_stoploss_float_or_none = None
if stop_loss_rate is not None:
target_stoploss_float_or_none = float(stop_loss_rate)
cur_amount = None
if "amount" in exist_order_info:
cur_amount = exist_order_info["amount"]
elif "pending_amount" in exist_order_info:
cur_amount = exist_order_info["pending_amount"]
order_type = None
if "order_type" in exist_order_info:
if exist_order_info["order_type"] == "buy" or\
exist_order_info["order_type"] == "leverage_buy":
order_type = "long"
if exist_order_info["order_type"] == "sell" or \
exist_order_info["order_type"] == "leverage_sell":
order_type = "short"
if cur_rate is not None and cur_amount is not None and order_type is not None:
if abs(float(cur_rate)-float(target_value)) < 0.00001 and \
abs(float(cur_amount)-float(possible_qty)) < 0.00001 and \
cur_stoploss_float_or_none == target_stoploss_float_or_none and \
order_type == position_type:
# same order. do nothing
print("You already ordered this order: rate=%.1f, amount=%f, stoploss_rate=%s, position_type=%s" % (target_value, possible_qty, str(stop_loss_rate), position_type,))
return True, False
# cancel all exist orders
if not self._cancel_exist_all_buy_orders():
return False, False
# check minimum btc
min_qty = self.api.order.min_create_amount(self.pair)
if possible_qty < min_qty:
print("Minimum order btc = %f, you requested = %f" % (min_qty, possible_qty,))
return False, False
# make new order
"""
ret val example
"success": true,
"id": 12345,
"rate": "30010.0",
"amount": "1.3",
"order_type": "sell",
"stop_loss_rate": null,
"pair": "btc_jpy",
"created_at": "2015-01-10T05:55:38.000Z"
"""
is_long = position_type == "long"
order_type = 'leverage_buy' if is_long else 'leverage_sell'
if not self.use_leverage:
order_type = 'buy' if is_long else 'sell'
order = {
'rate': "%.8f" % target_value,
'amount': "%.8f" % possible_qty,
'order_type': order_type,
'pair': self.pair
}
# not correct
# this "stop_loss_rate" means: if a value >= stop_loss_rate, sashine will be placed at "rate"
if stop_loss_rate is not None:
order["stop_loss_rate"] = stop_loss_rate
ret_str = self.api.order.create(order)
ret = None
if ret_str is not None:
try:
ret = json.loads(ret_str)
except:
print("failed to parse api.order.create result")
try:
print(ret_str)
except Exception as e:
print("failed to show returned json str")
print(e)
if ret is None or ret["success"] is not True or "id" not in ret:
print("Failed to create order!!")
try:
print(ret_str)
except Exception as e:
print("failed to show returned json str")
print(e)
return False, False
self.exist_order_info_list = [ret]
self.got_all_order_ids.append(ret["id"])
# remove very old orders
if len(self.got_all_order_ids) > 500:
self.got_all_order_ids = self.got_all_order_ids[-500:]
print("order success!", ret_str)
return True, True
def _cancel_exist_all_buy_orders(self):
failed_to_cancel = False
exist_order_i = 0
while exist_order_i < len(self.exist_order_info_list):
exist_order_info = self.exist_order_info_list[exist_order_i]
if self._cancel_order(exist_order_info["id"]) is False:
# something error happened!!
print("order cancel failed %d even if there is a valid order in internal state" % (exist_order_info["id"],))
failed_to_cancel = True
del self.exist_order_info_list[exist_order_i]
else:
exist_order_i += 1
if len(self.exist_order_info_list) == 0:
self.exist_order_info_list = None
if failed_to_cancel:
return False
return True
# target_value: sashine value. if None, market-make
def _update_or_create_close_order(self, position, target_value):
position_id = position["id"]
if position_id not in self.position_id_to_sellids:
return False
sell_qty = float(position["amount"])
sell_ids = self.position_id_to_sellids[position_id]
position_type = position["side"]
# convert position type name
if position_type == "buy": position_type = "long"
if position_type == "sell": position_type = "short"
is_close_long = True
if position_type == "long": is_close_long = True
if position_type == "short": is_close_long = False
# check exist sell-orders. if target value and amount are completely same, do not pass new order
valid_close_orders = list(filter(lambda x:x["status"] != "cancel" and x["id"] in sell_ids, position["close_orders"]))
print("valid_close_order count = %d" % len(valid_close_orders))
if len(valid_close_orders) == 1 and target_value is not None:
# check the order is already created on exchanger
valid_close_order = valid_close_orders[0]
print("your order: rate=%f, amount=%f" % (target_value, sell_qty,))
print("valid_close_order[0]:")
print(valid_close_order)
rate = None
if "rate" in valid_close_order:
rate = float(valid_close_order["rate"])
amount = valid_close_order["amount"]
is_cur_close_long = False
if "side" in valid_close_order:
is_cur_close_long = valid_close_order["side"] == "sell"
elif "order_type" in valid_close_order:
is_cur_close_long = valid_close_order["order_type"] == "sell"
if abs(float(rate)-float(target_value)) < 0.00001 and \
abs(float(amount)-float(sell_qty)) < 0.00001 and \
is_close_long == is_cur_close_long:
# completely same!!
print("requested close order is already ordered on server:")
print(" position id:%s, target_value:%s, amount:%s, close_long:%s" % (str(position_id), str(target_value), str(amount), str(is_cur_close_long),))
return True
min_qty = self.api.order.min_create_amount(self.pair)
if sell_qty < min_qty:
qty_cur = self.get_qty_currency()
print("Minimum order %s = %f, you requested = %f" % (qty_cur, min_qty, sell_qty,))
return False
# cancel all
for sell_id in sell_ids:
self._cancel_order(sell_id)
self.position_id_to_sellids[position_id] = []
# make new order
order = {}
if self.use_leverage:
order = {
'amount': '%.8f' % BitcoinUtil.roundBTCby1satoshi(sell_qty),
'position_id': position_id,
'order_type': 'close_long' if is_close_long else 'close_short',
'pair': 'btc_jpy',
}
if target_value is not None:
order['rate'] = target_value
else:
# if not leverage order, close order is always "sell"
if not is_close_long:
print("normal order cannot make short position!")
print("you passed close 'short' for normal order")
return False
order = {
'amount': '%.8f' % BitcoinUtil.roundBTCby1satoshi(sell_qty),
'order_type': 'sell',
'pair': self.pair,
}
if target_value is None:
# market_sell
order['order_type'] = "market_sell"
else:
order['rate'] = target_value
ret = self.api.order.create(order)
ret_str = ret
if ret is not None:
try:
ret = json.loads(ret)
except:
print("failed to parse close_long order result")
try:
print(ret_str)
except Exception as e:
print("failed to print error")
print(e)
if ret is None or ret["success"] is not True or "id" not in ret or ret["id"] is None:
print("sell order canceled but failed to create new sell order!!: position id: %s" % (str(position_id),))
try:
print(ret_str)
except Exception as e:
print("failed to print error")
print(e)
return False
sell_ids = [ret["id"]]
self.position_id_to_sellids[position_id] = sell_ids
self.got_close_order_ids.append(ret["id"])
if len(self.got_close_order_ids) > 500:
self.got_close_order_ids = self.got_close_order_ids[-500:]
return True
def _cancel_order(self, order_id):
# call apis for current orders
if order_id is None:
print("order is already canceled")
return True
# do something
ret_str = self.api.order.cancel({"id": order_id, "pair": self.pair})
ret = None
if ret_str is not None:
try:
ret = json.loads(ret_str)
except:
print("failed to parse cancel order ret str")
try:
print(ret_str)
except Exception as e:
print("failed to print returned error json")
print(e)
if ret is None or ret["success"] is not True or "id" not in ret:
print("Failed to cancel order %s: %s" % (str(order_id), str(ret_str),))
return False
return True
| 40.593516 | 189 | 0.593593 | 33,184 | 0.98809 | 0 | 0 | 56 | 0.001667 | 0 | 0 | 10,325 | 0.307438 |
16ebc077aad6a4dd684131dc7271bbdbd5696af9 | 743 | py | Python | test.py | sbcshop/PiRelay-8 | 4d881f259c07cd4fdf3c57431feb1587aaa0e861 | [
"MIT"
] | 2 | 2021-09-07T03:25:00.000Z | 2021-09-07T17:28:46.000Z | test.py | sbcshop/PiRelay-8 | 4d881f259c07cd4fdf3c57431feb1587aaa0e861 | [
"MIT"
] | null | null | null | test.py | sbcshop/PiRelay-8 | 4d881f259c07cd4fdf3c57431feb1587aaa0e861 | [
"MIT"
] | null | null | null | from PiRelay8 import Relay
import time
r1 = Relay("RELAY1")
r2 = Relay("RELAY2")
r3 = Relay("RELAY3")
r4 = Relay("RELAY4")
r5 = Relay("RELAY5")
r6 = Relay("RELAY6")
r7 = Relay("RELAY7")
r8 = Relay("RELAY8")
r1.off()
r2.off()
r3.off()
r4.off()
r5.off()
r6.off()
r7.off()
r8.off()
r1.on()
time.sleep(0.5)
r1.off()
time.sleep(0.5)
r2.on()
time.sleep(0.5)
r2.off()
time.sleep(0.5)
r3.on()
time.sleep(0.5)
r3.off()
time.sleep(0.5)
r4.on()
time.sleep(0.5)
r4.off()
time.sleep(0.5)
r5.on()
time.sleep(0.5)
r5.off()
time.sleep(0.5)
r6.on()
time.sleep(0.5)
r6.off()
time.sleep(0.5)
r7.on()
time.sleep(0.5)
r7.off()
time.sleep(0.5)
r8.on()
time.sleep(0.5)
r8.off()
time.sleep(0.5)
| 11.983871 | 27 | 0.572005 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.086137 |
16ebce5b29644a3fdd8bee60c8ef43a322219b10 | 9,086 | py | Python | bot/cogs/clan.py | johnvictorfs/atlantisbot-rewrite | ac6887f91438206ba926be59d8fd2bedd07923ad | [
"MIT"
] | null | null | null | bot/cogs/clan.py | johnvictorfs/atlantisbot-rewrite | ac6887f91438206ba926be59d8fd2bedd07923ad | [
"MIT"
] | 5 | 2018-09-28T18:01:28.000Z | 2019-02-12T18:49:06.000Z | bot/cogs/clan.py | johnvictorfs/atlantisbot-rewrite | ac6887f91438206ba926be59d8fd2bedd07923ad | [
"MIT"
] | 1 | 2018-10-15T22:41:47.000Z | 2018-10-15T22:41:47.000Z | import rs3clans
import discord
from discord.ext import commands
from bot.bot_client import Bot
from bot.utils.tools import separator
from bot.utils.context import Context
class Clan(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
@commands.cooldown(1, 5, commands.BucketType.user)
@commands.bot_has_permissions(embed_links=True)
@commands.command(aliases=['clan'])
async def clan_detail_info(self, ctx: Context, *, clan_name: str):
try:
clan = rs3clans.Clan(name=clan_name, set_exp=True)
except ConnectionError:
return await ctx.send(f"Houve um erro ao tentar conectar a API da Jagex. Tente novamente mais tarde.")
except rs3clans.ClanNotFoundError:
return await ctx.send(f"O clã '{clan_name}' não existe.")
clan_leader = None
for member in clan:
if member.rank == 'Owner':
clan_leader = member.name
clan_url = clan.name.replace(' ', '%20')
clan_embed = discord.Embed(
title=clan.name,
color=discord.Color.green(),
url=f'http://services.runescape.com/m=clan-home/clan/{clan_url}'
)
clan_embed.set_author(name='RuneClan', url=f'https://runeclan.com/clan/{clan_url}')
clan_embed.set_thumbnail(url=f'http://services.runescape.com/m=avatar-rs/{clan_url}/clanmotif.png')
clan_embed.add_field(name="Exp Total", value=f'{clan.exp:,}')
clan_embed.add_field(name="Membros", value=str(clan.count))
clan_embed.add_field(name="Líder", value=clan_leader)
clan_embed.add_field(name="Exp Média por Membro", value=f'{clan.avg_exp:,.0f}')
return await ctx.send(embed=clan_embed)
@commands.cooldown(1, 5, commands.BucketType.user)
@commands.bot_has_permissions(embed_links=True)
@commands.command(aliases=['claninfo', 'clanexp', 'claexp', 'clainfo', 'clãexp', 'clãinfo'])
async def clan_user_info(self, ctx: Context, *, username: str):
try:
player = rs3clans.Player(name=username, runemetrics=True)
except ConnectionError:
return await ctx.send(f"Houve um erro ao tentar conectar a API da Jagex. Tente novamente mais tarde.")
if not player.exists:
return await ctx.send(f"Jogador '{player.name}' não existe.")
if not player.clan:
return await ctx.send(f"Jogador '{player.name}' não está em um clã.")
user_clan = rs3clans.Clan(name=player.clan)
member = user_clan.get_member(username)
user_clan_exp = member.exp
user_rank = member.rank
display_username = player.name
if self.bot.setting.show_titles:
if player.suffix:
display_username = f"{player.name} {player.title}"
else:
display_username = f"{player.title} {player.name}"
user_url_name = player.name.replace(" ", "%20")
user_url_clan = player.clan.replace(" ", "%20")
icon_url = f"https://secure.runescape.com/m=avatar-rs/{user_url_name}/chat.png"
runeclan_url = f"https://runeclan.com/user/{user_url_name}"
clan_banner_url = f"http://services.runescape.com/m=avatar-rs/l=3/a=869/{user_url_clan}/clanmotif.png"
embed_title = "RuneClan"
rank_header = "__Rank__"
clan_header = "__Clã__"
exp_header = "__Exp no Clã__"
total_exp_header = "__Exp Total__"
private_profile_header = "Indisponível - Perfil Privado"
rank_emoji = self.bot.setting.clan_settings[user_rank]['Emoji']
user_rank = self.bot.setting.clan_settings[user_rank]['Translation']
clan_info_embed = discord.Embed(
title=embed_title,
description="",
color=discord.Colour.dark_blue(),
url=runeclan_url,
)
clan_info_embed.set_author(
icon_url=icon_url, name=display_username
)
clan_info_embed.set_thumbnail(
url=clan_banner_url
)
clan_info_embed.add_field(
name=clan_header,
value=player.clan
)
clan_info_embed.add_field(
name=rank_header,
value=f"{user_rank} {rank_emoji}"
)
clan_info_embed.add_field(
name=exp_header,
value=f"{user_clan_exp:,}"
)
if player.private_profile:
clan_info_embed.add_field(
name=total_exp_header,
value=private_profile_header,
inline=False
)
else:
clan_info_embed.add_field(
name=total_exp_header,
value=f"{player.exp:,}"
)
return await ctx.send(content=None, embed=clan_info_embed)
@commands.cooldown(1, 5, commands.BucketType.user)
@commands.bot_has_permissions(embed_links=True)
@commands.command(aliases=['ranksupdate', 'upranks', 'rank'])
async def ranks(self, ctx: Context, *, clan: str = 'Atlantis'):
if clan.lower() == 'atlantis argus':
return await ctx.send('`!rank argus` irmão')
elif clan.lower() == 'atlantis':
exp_general = 2_000_000_000
exp_captain = 1_000_000_000
exp_lieutenant = 500_000_000
exp_seargent = 250_000_000
exp_corporal = 125_000_000
elif clan.lower() == 'argus':
exp_general = 500_000_000
exp_captain = 250_000_000
exp_lieutenant = 125_000_000
exp_seargent = 60_000_000
exp_corporal = 30_000_000
clan = 'Atlantis Argus'
else:
return await ctx.send('Clã não reconhecido.')
rank_emoji = {
'Recruit': self.bot.setting.clan_settings['Recruit']['Emoji'],
'Corporal': self.bot.setting.clan_settings['Corporal']['Emoji'],
'Sergeant': self.bot.setting.clan_settings['Sergeant']['Emoji'],
'Lieutenant': self.bot.setting.clan_settings['Lieutenant']['Emoji'],
'Captain': self.bot.setting.clan_settings['Captain']['Emoji'],
'General': self.bot.setting.clan_settings['General']['Emoji'],
}
ranks_embed = discord.Embed(
title="__Ranks a Atualizar__",
description=" ",
)
found = False
clan = rs3clans.Clan(clan, set_exp=False)
clan_members = reversed([member for member in clan])
member: rs3clans.ClanMember
for member in clan_members:
if len(ranks_embed.fields) >= 20:
await ctx.send('Muitos ranks a serem atualizados, enviando apenas os 20 primeiros.')
break
if member.exp >= exp_corporal and member.rank == 'Recruit':
ranks_embed.add_field(
name=member.name,
value=f"Recruta {rank_emoji['Recruit']} ❯ Cabo {rank_emoji['Corporal']}\n"
f"**__Exp:__** {member.exp:,}\n{separator}",
inline=False)
found = True
elif member.exp >= exp_general and member.rank == 'Captain':
ranks_embed.add_field(
name=member.name,
value=f"Capitão {rank_emoji['Captain']} ❯ General {rank_emoji['General']}\n"
f"**__Exp:__** {member.exp:,}\n{separator}",
inline=False)
found = True
elif member.exp >= exp_captain and member.rank == 'Lieutenant':
ranks_embed.add_field(
name=member.name,
value=f"Tenente {rank_emoji['Lieutenant']} ❯ Capitão {rank_emoji['Captain']}\n"
f"**__Exp:__** {member.exp:,}\n{separator}",
inline=False)
found = True
elif member.exp >= exp_lieutenant and member.rank == 'Sergeant':
ranks_embed.add_field(
name=member.name,
value=f"Sargento {rank_emoji['Sergeant']} ❯ Tenente {rank_emoji['Lieutenant']}\n"
f"**__Exp:__** {member.exp:,}\n{separator}",
inline=False)
found = True
elif member.exp >= exp_seargent and member.rank == 'Corporal':
ranks_embed.add_field(
name=member.name,
value=f"Cabo {rank_emoji['Corporal']} ❯ Sargento {rank_emoji['Sergeant']}\n"
f"**__Exp:__** {member.exp:,}\n{separator}",
inline=False)
found = True
if not found:
ranks_embed.add_field(
name="Nenhum Rank a ser atualizado no momento :)",
value=separator,
inline=False
)
return await ctx.send(embed=ranks_embed)
def setup(bot):
bot.add_cog(Clan(bot))
| 42.064815 | 115 | 0.569117 | 8,880 | 0.974325 | 0 | 0 | 8,770 | 0.962256 | 8,235 | 0.903555 | 2,142 | 0.235023 |
16ec4bab280bd7d838f873bdb4d147f41ca2f107 | 2,539 | py | Python | otcextensions/tests/functional/osclient/vpc/v2/common.py | zsoltn/python-otcextensions | 4c0fa22f095ebd5f9636ae72acbae5048096822c | [
"Apache-2.0"
] | 10 | 2018-03-03T17:59:59.000Z | 2020-01-08T10:03:00.000Z | otcextensions/tests/functional/osclient/vpc/v2/common.py | zsoltn/python-otcextensions | 4c0fa22f095ebd5f9636ae72acbae5048096822c | [
"Apache-2.0"
] | 208 | 2020-02-10T08:27:46.000Z | 2022-03-29T15:24:21.000Z | otcextensions/tests/functional/osclient/vpc/v2/common.py | zsoltn/python-otcextensions | 4c0fa22f095ebd5f9636ae72acbae5048096822c | [
"Apache-2.0"
] | 15 | 2020-04-01T20:45:54.000Z | 2022-03-23T12:45:43.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import json
import uuid
from datetime import datetime
from openstackclient.tests.functional import base
class VpcTestCase(base.TestCase):
"""Common functional test bits for VPC commands"""
CURR_TIME = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
def setUp(self):
super(VpcTestCase, self).setUp()
UUID = uuid.uuid4().hex[:8]
self.LOCAL_ROUTER_NAME = 'test-local-router-otce-cli' + UUID
self.PEER_ROUTER_NAME = 'test-peer-router-otce-cli' + UUID
self.PEERING_NAME = 'test-peering-otce-cli-' + UUID
self.LOCAL_ROUTER_ID = None
self.PEER_ROUTER_ID = None
self.PEERING_ID = None
def create_vpc_peering(self, name=None):
self._create_routers()
name = name or self.PEERING_NAME
json_output = json.loads(self.openstack(
'vpc peering create '
'{name} '
'--local-router-id "{local_router_id}" '
'--peer-router-id "{peer_router_id}" '
'-f json'.format(
name=name,
local_router_id=self.LOCAL_ROUTER_ID,
peer_router_id=self.PEER_ROUTER_ID)
))
self.assertIsNotNone(json_output)
self.PEERING_ID = json_output['id']
return json_output
def delete_vpc_peering(self):
self.addCleanup(self._delete_routers)
self.openstack('vpc peering delete {}'.format(self.PEERING_ID))
def _create_routers(self):
local_router = json.loads(self.openstack(
'router create -f json ' + self.LOCAL_ROUTER_NAME
))
self.LOCAL_ROUTER_ID = local_router['id']
peer_router = json.loads(self.openstack(
'router create -f json ' + self.PEER_ROUTER_NAME
))
self.PEER_ROUTER_ID = peer_router['id']
def _delete_routers(self):
self.openstack(
'router delete {} {}'.format(
self.LOCAL_ROUTER_ID, self.PEER_ROUTER_ID
))
| 33.853333 | 77 | 0.639228 | 1,864 | 0.734147 | 0 | 0 | 0 | 0 | 0 | 0 | 925 | 0.364317 |
16ef740b41f41832481d4956834bb037ddc3b7b6 | 2,614 | py | Python | tests/test_nested_structures_inside_structure_values.py | Robinson04/StructNoSQL | 335c63593025582336bb67ad0b0ed39d30800b74 | [
"MIT"
] | 3 | 2020-10-30T23:31:26.000Z | 2022-03-30T21:48:40.000Z | tests/test_nested_structures_inside_structure_values.py | Robinson04/StructNoSQL | 335c63593025582336bb67ad0b0ed39d30800b74 | [
"MIT"
] | 42 | 2020-09-16T15:23:11.000Z | 2021-09-20T13:00:50.000Z | tests/test_nested_structures_inside_structure_values.py | Robinson04/StructNoSQL | 335c63593025582336bb67ad0b0ed39d30800b74 | [
"MIT"
] | 2 | 2021-01-03T21:37:22.000Z | 2021-08-12T20:28:52.000Z | import unittest
from typing import Set, Optional, Dict, List
from uuid import uuid4
from StructNoSQL import BaseField, MapModel, TableDataModel
from tests.components.playground_table_clients import PlaygroundDynamoDBBasicTable, TEST_ACCOUNT_ID
class TableModel(TableDataModel):
accountId = BaseField(field_type=str, required=True)
nestedDictDictStructure = BaseField(field_type=Dict[str, Dict[str, bool]], required=False, key_name='itemKey')
# nestedDictListStructure = BaseField(field_type=Dict[str, List[str]], required=False)
# nestedDictSetStructure = BaseField(field_type=Dict[str, Set[str]], required=False)
class TestsNestedStructuresInsideStructureValues(unittest.TestCase):
def __init__(self, method_name: str):
super().__init__(methodName=method_name)
self.users_table = PlaygroundDynamoDBBasicTable(data_model=TableModel)
def test_nested_dict_dict_structure(self):
random_parent_key = f"parentKey_{uuid4()}"
random_child_key = f"childKey_{uuid4()}"
keys_fields_switch = list(self.users_table.fields_switch.keys())
self.assertIn('nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}', keys_fields_switch)
update_success = self.users_table.update_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}',
query_kwargs={'itemKey': random_parent_key, 'itemKeyChild': random_child_key},
value_to_set=True
)
self.assertTrue(update_success)
retrieved_item = self.users_table.get_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertEqual(retrieved_item, {'itemKeyChild': True})
removed_item = self.users_table.remove_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertEqual(removed_item, {'itemKeyChild': True})
retrieved_expected_none_item = self.users_table.get_field(
TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertIsNone(retrieved_expected_none_item)
def test_nested_dict_list_structure(self):
# todo: implement
pass
def test_nested_dict_set_structure(self):
# todo: implement
pass
if __name__ == '__main__':
unittest.main()
| 38.441176 | 114 | 0.704285 | 2,314 | 0.885233 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.215379 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.