blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce40ecd136466000b6cd713b9b978e417d2d76d2 | ff2d0e396c2e277751fced5869975faa8260e1d9 | /BASIC/SplitDataset.py | 2e72f8d4b143288ee97d5f5781819cf26a72aea2 | []
| no_license | kongruksiamza/MachineLearning | 53207055090deaea0a44789cebfef01f5d395188 | b6843a5fb97af9d21fe13aee6c0d45f36ff99131 | refs/heads/master | 2023-08-02T11:17:53.359201 | 2023-07-23T16:15:38 | 2023-07-23T16:15:38 | 251,460,002 | 54 | 41 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
iris_dataset=load_iris()
x_train,x_test,y_train,y_test = train_test_split(iris_dataset["data"],iris_dataset["target"],test_size=0.2,random_state=0)
print(x_train.shape)
print(x_test.shape)
print(y_train.shape)
print(y_test.shape)
#150
#train 80% = 120
#test 20% = 30 | [
"[email protected]"
]
| |
3dd5f7ff3422dda82d6a47010f7030a4e120e353 | ba9cb3bbc46faeea1edc01ef7e18131ae2dbf923 | /problem-046.py | 13d019dd467ac87655a99dcce72627471b56b455 | []
| no_license | beautytiger/project-euler | fb9908d35a82cd4b912a541282842adca03b17e2 | a8de90a2e5b98660505169afd9c8c27b1b3af28e | refs/heads/master | 2021-06-19T00:40:17.331130 | 2017-05-31T11:04:05 | 2017-05-31T11:04:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 834 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from tools.runningTime import runTime
from tools.common import is_prime
def fit_conjecture(n_odd):
n, pt = 1, 2
while pt<n_odd:
if is_prime(n_odd-pt):
return True
n, pt = n+1, pt+4*n+2
return False
@runTime
def bruteForce():
odd = 33
while True:
odd += 2
if is_prime(odd) or fit_conjecture(odd):
continue
print "Result: {}".format(odd)
break
@runTime
def newBruteForce():
n = 5
primes = set([2, 3])
while True:
if all(n%p for p in primes):
primes.add(n)
elif not any((n-2*i*i) in primes for i in range(1, int(n**0.5)+1)):
break
n += 2
print "Result: {}".format(n)
if __name__ == "__main__":
bruteForce()
newBruteForce()
| [
"[email protected]"
]
| |
0f4ed3f874c9e27895f523a94e8e2df3b9a707cc | ec827bd5df431c9400946e8d0593448814b5534b | /venv/bin/rst2s5.py | 0e39c10297f2eeb75cd100e23ad0e393e3c581d1 | []
| no_license | grantnicholas/pytone | 7acd70878de8090d06d7a2911a67b3dbb3b64256 | b89c688cc88588a3758fff288bc9b1364534b42e | refs/heads/master | 2021-01-23T06:19:47.203418 | 2014-09-21T21:52:27 | 2014-09-21T21:52:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | #!/home/grant/Desktop/pytone/venv/bin/python
# $Id: rst2s5.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Chris Liechti <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML slides using
the S5 template system.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates S5 (X)HTML slideshow documents from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='s5', description=description)
| [
"[email protected]"
]
| |
36e803ee78844708170f2dc1daa1eff208b5f8b0 | a003200e29c4ea64e9ef65a23eea2d1d8c4ad03b | /client/web/client_control_port.py | 6b54ac526ac9c42588bfaf40df72078e55dcbd25 | []
| no_license | jason12360/crazy_coder | 10b77ef69994ff11e7fde49ad28d928851388be9 | 7ba161ef89ffd2da66f430c4af04a53c2de1667f | refs/heads/master | 2020-03-20T22:10:35.884104 | 2018-06-29T10:06:24 | 2018-06-29T10:06:24 | 137,783,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,653 | py | from socket import *
import sys
import os
import time
import signal
from threading import Thread
import client_data_port
#导入model相关模块
from file import *
from file_folder import *
from database_handler import *
import my_protocol
#导入视图的相关模块
from login_view import Login_Page
from login_handler import Login_handler
from main_page import MainPage
from main_handler import Main_handler
# 用户路径
file_path = '/home/tarena/ftp_web(2)/'
#错误代码
# CODE_NUM=0
def run(ctrl_socket,child_pid,recv_queue,send_queue):
# 把myconnection传给handler进行相关的登录操作
# 创建客户端请求对象
global _ctrl_socket
_ctrl_socket = ctrl_socket
c_ftp = MyFtp_Client(_ctrl_socket,recv_queue)
# 界面
login_page = Login_Page()
login_handler = Login_handler(login_page)
login_handler.bind(comment_handler,c_ftp)
login_page.register_handler(login_handler)
login_handler.setup(child_pid,send_queue)
login_page.run()
main_page = MainPage()
global main_handler
main_handler =Main_handler(main_page)
c_ftp.set_view_handler(main_handler)
main_handler.bind(comment_handler,c_ftp)
main_page.register_handler(main_handler)
#设置父进程监听子进程信号
signal.signal(40,main_handler.display_chat)
main_handler.setup(child_pid,send_queue)
main_page.run()
# 协议结构:请求类别 + 属性 + 内容 + 结束符
# comment='list+'+str(client_add)+'+'+''+'+@end'
# 控制端--功能选择界面调用函数
#控制线程,这个线程中只做控制,具体传输反馈由父线程完成
def comment_handler(comment, c_ftp):
data = comment.split('+')
print(data)
if data[0] == "list":
if not data[2]:
# 判断有没文件夹,没有,就发送list
return c_ftp.list_request()
else:
# data[2]is 文件夹,有酒发送list data[2]
return c_ftp.list_request(data[2])
elif data[0] == "upld":
return c_ftp.upload_request(data[1],data[2])
elif data[0] == "dwld":
return c_ftp.download_request(data[1],data[2])
# 这两个功能只靠tcp---------------------------
elif data[0] == 'chat':
# data[2]是聊天内容
c_ftp.chat_request(data[2])
# 登录
elif data[0] == "login":
# data[1] 是账号
# data[2] 是密码
return c_ftp.login_request(data[1], data[2])
#---------------------------------------------
elif data[0] == "reg":
# data[1] 是账号
# data[2] 是密码
return c_ftp.register_request(data[1], data[2])
#---------------------------------------------
elif data[0] == "quit":
c_ftp.quit_request()
return 0
else:
print("commond is not defined")
class MyFtp_Client():
def __init__(self, s,chat_queue):
self.s = s
self.chat_queue = chat_queue
def list_request(self, foldername=''):
my_protocol.list_bale_TCP(self.s, foldername)
# 等待接收
data = ''
while True:
_ = self.s.recv(2048).decode()
if _[-4:]=='@end':
data += _
break
data+=_
x = data.split('+')
if x[0] in ['list','upld','dwld','chat','quit','login','reg']:
if x[3]=='@end':
return x[2]
else:
print('数据丢包')
return -1
elif x[0]=='':
print("客户端意外退出")
return -1
else:
print('数据丢包')
return -1
#为服务器绑定视图句柄,供副线程调用
def set_view_handler(self,handler):
self.view_handler = handler
#这个线程只负责告知服务器需要发送的文件名和文件属性,服务器会首先判断文件是否重名,
#如果重名,则返回文件重名,如果不重名则添加相应文件信息到数据库
def upload_request(self, file_property,filename):
#初始化错误码用于反馈结果
CODE_NUM='1'
# 打包发送
my_protocol.upld_bale_TCP(self.s,file_property,filename)
# 等待接收
#生成file对象传给副进程使用
file = File()
file.unpack(file_property)
data = my_protocol.unpake_TCP(self.s)
if data != -1:
if data[2] == '3':
CODE_NUM='3'
elif data[2]=='go':
DATA_HOST = self.s.getsockname()[0]
DATA_PORT = 0
DATA_ADDR = (DATA_HOST, DATA_PORT)
data_socket = socket()
data_socket.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)
#客户端监听套接字
data_socket.bind(DATA_ADDR)
# data_socket.listen(10)
data_addr = data_socket.getsockname()
#给服务端发送端口号
my_protocol.upld_bale_TCP(self.s,'',str(data_socket.getsockname()[1]))
#等待服务端连接
data_socket.close()
# 开辟新的线程,上传文件
t = Thread(target=client_data_port.run, args=(
'u', data_addr,file,self.view_handler))
t.setDaemon(True)
t.start()
return CODE_NUM
def download_request(self,download_path,filename):
CODE_NUM="1"
my_protocol.dwld_bale_TCP(self.s,'',filename)
# 等待接收
data = my_protocol.unpake_TCP(self.s)
if data != -1:
if data[2] == '2':
# print("文件在服务器里不存")
CODE_NUM='2'
elif data[2]=='go':
file_path = download_path +'/'+filename
DATA_HOST = self.s.getsockname()[0]
DATA_PORT = 0
DATA_ADDR = (DATA_HOST, DATA_PORT)
data_socket = socket()
data_socket.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)
#客户端监听套接字
data_socket.bind(DATA_ADDR)
# data_socket.listen(10)
data_addr = data_socket.getsockname()
#给服务端发送端口号
my_protocol.upld_bale_TCP(self.s,'',str(data_socket.getsockname()[1]))
#等待服务端连接
data_socket.close()
t = Thread(target=client_data_port.run, args=(
'd', data_addr,file_path,self.view_handler))
t.setDaemon(True)
t.start()
# if R==10:
# CODE_NUM=10
# else:
# CODE_NUM=11
return CODE_NUM
def chat_request(self, message):
# my_protocol.chat_bale_TCP(self.s,message)
pass
def login_request(self, admin, password):
# tcp通信传给服务端数据库中的用户表比对,成功则登录
# 注:admin,password:必须为字符串
my_protocol.login_request(self.s, admin, password)
response = self.s.recv(1024).decode()
return response
def register_request(self, admin, password):
# 注:admin,password:必须为字符串
my_protocol.reg_request(self.s, admin, password)
response = self.s.recv(1024).decode()
return response
def get_chat_word(self):
return self.chat_queue.get()
def quit_request(self):
# 通过协议打包发送
my_protocol.quit_bale_TCP(self.s)
self.s.close()
print("已退出")
| [
"[email protected]"
]
| |
59b868c98b2088899c75c3b49e981d14738f1ed6 | 7ec5aa43d8f2e732189944391447a8551d24abaa | /backend/home/migrations/0002_load_initial_data.py | b7766e8a267c6d8282dbfff6aac063a6b6502fa2 | []
| no_license | crowdbotics-apps/louis-vesovski-19158 | e852e8158074704226a77ee9c564c7f29f39413e | 61f4460d77d9077363392820f719febdd8a30434 | refs/heads/master | 2022-11-26T04:53:16.880047 | 2020-07-25T00:53:05 | 2020-07-25T00:53:05 | 282,343,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,310 | py | from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "Louis Vesovski"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">Louis Vesovski</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "louis-vesovski-19158.botics.co"
site_params = {
"name": "Louis Vesovski",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
| [
"[email protected]"
]
| |
40b818e3e0c2744b716fced907cee2cd4eaad934 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/containerregistry/v20201101preview/private_endpoint_connection.py | 76733558caa78a0936790d6194e3b68d77e277d5 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,847 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['PrivateEndpointConnectionArgs', 'PrivateEndpointConnection']
@pulumi.input_type
class PrivateEndpointConnectionArgs:
def __init__(__self__, *,
registry_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
private_endpoint: Optional[pulumi.Input['PrivateEndpointArgs']] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']] = None):
"""
The set of arguments for constructing a PrivateEndpointConnection resource.
:param pulumi.Input[str] registry_name: The name of the container registry.
:param pulumi.Input[str] resource_group_name: The name of the resource group to which the container registry belongs.
:param pulumi.Input['PrivateEndpointArgs'] private_endpoint: The resource of private endpoint.
:param pulumi.Input[str] private_endpoint_connection_name: The name of the private endpoint connection.
:param pulumi.Input['PrivateLinkServiceConnectionStateArgs'] private_link_service_connection_state: A collection of information about the state of the connection between service consumer and provider.
"""
pulumi.set(__self__, "registry_name", registry_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
if private_endpoint_connection_name is not None:
pulumi.set(__self__, "private_endpoint_connection_name", private_endpoint_connection_name)
if private_link_service_connection_state is not None:
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
@property
@pulumi.getter(name="registryName")
def registry_name(self) -> pulumi.Input[str]:
"""
The name of the container registry.
"""
return pulumi.get(self, "registry_name")
@registry_name.setter
def registry_name(self, value: pulumi.Input[str]):
pulumi.set(self, "registry_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group to which the container registry belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional[pulumi.Input['PrivateEndpointArgs']]:
"""
The resource of private endpoint.
"""
return pulumi.get(self, "private_endpoint")
@private_endpoint.setter
def private_endpoint(self, value: Optional[pulumi.Input['PrivateEndpointArgs']]):
pulumi.set(self, "private_endpoint", value)
@property
@pulumi.getter(name="privateEndpointConnectionName")
def private_endpoint_connection_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the private endpoint connection.
"""
return pulumi.get(self, "private_endpoint_connection_name")
@private_endpoint_connection_name.setter
def private_endpoint_connection_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_endpoint_connection_name", value)
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]:
"""
A collection of information about the state of the connection between service consumer and provider.
"""
return pulumi.get(self, "private_link_service_connection_state")
@private_link_service_connection_state.setter
def private_link_service_connection_state(self, value: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]):
pulumi.set(self, "private_link_service_connection_state", value)
class PrivateEndpointConnection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
private_endpoint: Optional[pulumi.Input[pulumi.InputType['PrivateEndpointArgs']]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An object that represents a private endpoint connection for a container registry.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PrivateEndpointArgs']] private_endpoint: The resource of private endpoint.
:param pulumi.Input[str] private_endpoint_connection_name: The name of the private endpoint connection.
:param pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']] private_link_service_connection_state: A collection of information about the state of the connection between service consumer and provider.
:param pulumi.Input[str] registry_name: The name of the container registry.
:param pulumi.Input[str] resource_group_name: The name of the resource group to which the container registry belongs.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PrivateEndpointConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An object that represents a private endpoint connection for a container registry.
:param str resource_name: The name of the resource.
:param PrivateEndpointConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PrivateEndpointConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
private_endpoint: Optional[pulumi.Input[pulumi.InputType['PrivateEndpointArgs']]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
__props__.__dict__["private_endpoint"] = private_endpoint
__props__.__dict__["private_endpoint_connection_name"] = private_endpoint_connection_name
__props__.__dict__["private_link_service_connection_state"] = private_link_service_connection_state
if registry_name is None and not opts.urn:
raise TypeError("Missing required property 'registry_name'")
__props__.__dict__["registry_name"] = registry_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:containerregistry:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:containerregistry/v20191201preview:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:containerregistry/v20210601preview:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:containerregistry/v20210801preview:PrivateEndpointConnection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PrivateEndpointConnection, __self__).__init__(
'azure-native:containerregistry/v20201101preview:PrivateEndpointConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PrivateEndpointConnection':
"""
Get an existing PrivateEndpointConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
__props__.__dict__["name"] = None
__props__.__dict__["private_endpoint"] = None
__props__.__dict__["private_link_service_connection_state"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return PrivateEndpointConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> pulumi.Output[Optional['outputs.PrivateEndpointResponse']]:
"""
The resource of private endpoint.
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> pulumi.Output[Optional['outputs.PrivateLinkServiceConnectionStateResponse']]:
"""
A collection of information about the state of the connection between service consumer and provider.
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of private endpoint connection resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
| [
"[email protected]"
]
| |
a541a023a5932b483d9c0793625a70dedb494f6b | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /G7m26EdX3AABCSQBv_16.py | 9a9f37cc926a9dcbbedbf54b6e1d21a544cebe38 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | """
Given a very long string of ASCII characters, split the string up into equal
sized groups of size `width`. To properly display the image, join up the
groups with the newline character `\n` and return the output string.
See the miniature examples below for clarity!
### Examples
format_ascii("0123456789", 2) ➞ "01\n23\n45\n67\n89"
format_ascii("................................", 8) ➞ "........\n........\n........\n........"
format_ascii("^^^^^^^^", 1) ➞ "^\n^\n^\n^\n^\n^\n^\n^"
### Notes
Enjoy the (somewhat oversized) art in the **Tests** tab.
"""
def format_ascii(txt, width):
result = ""
for i, letter in enumerate(txt):
if(i % width == 0) & (i > 0):
result += '\n'
result += letter
return result
| [
"[email protected]"
]
| |
59403b93978115731520c62a6f10b86aa0fa685f | 6915d6a20d82ecf2a2a3d3cd84ca22dab2491004 | /cbvproject3/testapp/admin.py | c1eeb35fe942b7f66b8b1bf1196933923b71ca4b | []
| no_license | iitian-gopu/django | bb4302d101f4434fb61ab374807e29699a432e42 | 31db982212bbb453cc4c56c7f5cfad9a00cd231d | refs/heads/master | 2023-05-14T07:22:35.176477 | 2021-06-04T04:43:26 | 2021-06-04T04:43:26 | 366,114,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | from django.contrib import admin
from testapp.models import Company
# Register your models here.
class CompanyAdmin(admin.ModelAdmin):
list_display=['name','location','ceo']
admin.site.register(Company,CompanyAdmin) | [
"[email protected]"
]
| |
a91ee7ae0112a2ba01ad8bf8c33a7d499b5605e0 | 475e2fe71fecddfdc9e4610603b2d94005038e94 | /Coding/listComprehension.py | b45dd7656d0b549f761c910df9a9a7ed7b98f52d | []
| no_license | sidhumeher/PyPractice | 770473c699aab9e25ad1f8b7b7cd8ad05991d254 | 2938c14c2e285af8f02e2cfc7b400ee4f8d4bfe0 | refs/heads/master | 2021-06-28T20:44:50.328453 | 2020-12-15T00:51:39 | 2020-12-15T00:51:39 | 204,987,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | '''
Created on Dec 27, 2018
@author: siddardha.teegela
'''
if __name__ == '__main__':
newList = []
oldList = [1,2,3,4,5]
'''
List Comprehension
[expression for item in list if condition]
'''
newList = [item*2 for item in oldList if item > 1]
print (newList) | [
"[email protected]"
]
| |
70c89281004cff4894e4f6ebe0880822e29655e1 | 941babd1b7711c9e2935704db283568536f06306 | /app/members/backends.py | cf753ebab5f6ce66cf8c987be1950138f731b6b3 | []
| no_license | orca9s/eb-docker-deploy | 8006fc0bcc81f76137f92a11c417708bcc1acbd9 | f5f5ccc2ff8719160528f04c44f737acb39a9b00 | refs/heads/master | 2022-12-10T09:00:08.879297 | 2018-07-17T13:04:24 | 2018-07-17T13:04:24 | 140,647,708 | 0 | 0 | null | 2022-12-08T02:20:14 | 2018-07-12T02:00:52 | Python | UTF-8 | Python | false | false | 805 | py | from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import check_password
from members.management.commands.createsu import Command
User = get_user_model()
class SettingsBackend:
def authenticate(self, request, username=None, password=None):
login_valid = (settings.ADMIN_USERNAME == username)
pwd_valid = check_password(password, settings.ADMIN_PASSWORD)
if login_valid and pwd_valid:
try:
user = User.objects.get(
username=username,
)
except User.DoesNotExist:
user = User(username=username)
user.is_staff = True
user.is_superuser = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None | [
"[email protected]"
]
| |
f351c542553963cd2645833531a8376989706d28 | 62d62fd3ee2f0717c7acbb0c2edfa1e53926f728 | /phoenix/wizard/views/catalogsearch.py | 6d0f2b3bbffa9f8ff7fc8771dcc7123800a75736 | [
"Apache-2.0"
]
| permissive | rmoorman/pyramid-phoenix | 911a9ef8dcca48889834cf46109321056cdbb35b | ed3ede4dbb80f00bcd647a5e4ae2afbedab94e09 | refs/heads/master | 2021-01-15T14:29:20.450828 | 2016-05-04T16:27:04 | 2016-05-04T16:27:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,893 | py | from pyramid.view import view_config
from phoenix.wizard.views import Wizard
import logging
logger = logging.getLogger(__name__)
import colander
class CatalogSearchSchema(colander.MappingSchema):
pass
class CatalogSearch(Wizard):
def __init__(self, request):
super(CatalogSearch, self).__init__(
request, name='wizard_csw', title="CSW Catalog Search")
self.description = self.wizard_state.get('wizard_complex_inputs')['identifier']
def schema(self):
return CatalogSearchSchema()
def next_success(self, appstruct):
#self.success(appstruct)
return self.next('wizard_done')
def search_csw(self, query=''):
keywords = [k for k in map(str.strip, str(query).split(' ')) if len(k)>0]
# TODO: search all formats
format = self.wizard_state.get('wizard_complex_inputs')['mime_types'][0]
from string import Template
cql_tmpl = Template("""\
dc:creator='${email}'\
and dc:format='${format}'
""")
cql = cql_tmpl.substitute({
'email': self.get_user().get('email'),
'format': format})
cql_keyword_tmpl = Template('and csw:AnyText like "%${keyword}%"')
for keyword in keywords:
cql += cql_keyword_tmpl.substitute({'keyword': keyword})
results = []
try:
self.csw.getrecords(esn="full", cql=cql)
logger.debug('csw results %s', self.csw.results)
for rec in self.csw.records:
myrec = self.csw.records[rec]
results.append(dict(
source = myrec.source,
identifier = myrec.identifier,
title = myrec.title,
abstract = myrec.abstract,
subjects = myrec.subjects,
format = myrec.format,
creator = myrec.creator,
modified = myrec.modified,
bbox = myrec.bbox,
references = myrec.references,
))
except:
logger.exception('could not get items for csw.')
return results
@view_config(route_name='wizard_csw_select', renderer='json')
def select_record(self):
recordid = self.request.matchdict.get('recordid')
# TODO: refactor this ... not efficient
appstruct = self.appstruct()
if recordid is not None:
selection = appstruct.get('selection', [])
if recordid in selection:
selection.remove(recordid)
else:
selection.append(recordid)
appstruct['selection'] = selection
self.success(appstruct)
return {}
def custom_view(self):
query = self.request.params.get('query', None)
checkbox = self.request.params.get('checkbox', None)
items = self.search_csw(query)
for item in items:
# TODO: refactor this
if item['identifier'] in self.appstruct().get('selection', []):
item['selected'] = True
else:
item['selected'] = False
grid = CatalogSearchGrid(
self.request,
items,
['title', 'format', 'selected'],
)
return dict(grid=grid, items=items)
@view_config(route_name='wizard_csw', renderer='../templates/wizard/csw.pt')
def view(self):
return super(CatalogSearch, self).view()
from phoenix.grid import MyGrid
class CatalogSearchGrid(MyGrid):
def __init__(self, request, *args, **kwargs):
super(CatalogSearchGrid, self).__init__(request, *args, **kwargs)
self.column_formats['selected'] = self.selected_td
self.column_formats['title'] = self.title_td
self.column_formats['format'] = self.format_td
self.column_formats['modified'] = self.modified_td
def title_td(self, col_num, i, item):
return self.render_title_td(item['title'], item['abstract'], item.get('subjects'))
def format_td(self, col_num, i, item):
return self.render_format_td(item['format'], item['source'])
def modified_td(self, col_num, i, item):
return self.render_timestamp_td(timestamp=item.get('modified'))
def selected_td(self, col_num, i, item):
from string import Template
from webhelpers2.html.builder import HTML
icon_class = "glyphicon glyphicon-thumbs-down"
if item.get('selected') == True:
icon_class = "glyphicon glyphicon-thumbs-up"
div = Template("""\
<a class="select" data-value="${recordid}" href="#"><i class="${icon_class}"></i></a>
""")
return HTML.td(HTML.literal(div.substitute({'recordid': item['identifier'],
'icon_class': icon_class} )))
| [
"[email protected]"
]
| |
232c7639ba3f954ee8f10d3f5f37d0a9a52dac8b | 381d5b981dbcff769297351467f4b3e994668a84 | /cmo_purchase_group/models/common.py | 6ff66c6dccda1e3a53ce1b8dc7782c690f1922c7 | []
| no_license | jutamatk/cmo_specific | f6c36da767f267b4c24a6933da54fa1536c4c309 | 14f5232dfde67f5d5dbeb4cf28538132954403cb | refs/heads/master | 2020-03-22T02:48:31.642768 | 2018-05-14T10:28:23 | 2018-05-14T10:28:23 | 139,394,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | # -*- coding: utf-8 -*-
from openerp import api
from lxml import etree
class Common(object):
@api.model
def set_right_readonly_group(self, res):
root = etree.fromstring(res['arch'])
root.set('create', 'false')
root.set('edit', 'false')
root.set('delete', 'false')
res['arch'] = etree.tostring(root)
return res
| [
"[email protected]"
]
| |
044fb07a14c80e7e229a65320964625ed26c6cab | 8f1d6f17d3bdad867518b7b0a164adfe6aeeed95 | /detection/retinaface/rcnn/PY_OP/rpn_fpn_ohem3.py | b8f7d462ec9aec245852338b392fb4d8afd3311c | [
"MIT",
"LicenseRef-scancode-proprietary-license"
]
| permissive | xwyangjshb/insightface | 2c7f030a5d1f5a24b18967bd0d775ee33933d37f | ae233babaf7614ef4ef28dac0171205835d78d64 | refs/heads/master | 2022-09-29T07:49:22.944700 | 2022-09-22T11:36:12 | 2022-09-22T11:36:12 | 221,020,460 | 1 | 0 | MIT | 2019-11-11T16:16:56 | 2019-11-11T16:16:55 | null | UTF-8 | Python | false | false | 7,624 | py | from __future__ import print_function
import sys
import mxnet as mx
import numpy as np
from distutils.util import strtobool
from ..config import config, generate_config
STAT = {0: 0}
STEP = 28800
class RPNFPNOHEM3Operator(mx.operator.CustomOp):
def __init__(self, stride=0, network='', dataset='', prefix=''):
super(RPNFPNOHEM3Operator, self).__init__()
self.stride = int(stride)
self.prefix = prefix
generate_config(network, dataset)
self.mode = config.TRAIN.OHEM_MODE #0 for random 10:245, 1 for 10:246, 2 for 10:30, mode 1 for default
global STAT
for k in config.RPN_FEAT_STRIDE:
STAT[k] = [0, 0, 0]
def forward(self, is_train, req, in_data, out_data, aux):
global STAT
cls_score = in_data[0].asnumpy() #BS, 2, ANCHORS
labels_raw = in_data[1].asnumpy() # BS, ANCHORS
A = config.NUM_ANCHORS
anchor_weight = np.zeros((labels_raw.shape[0], labels_raw.shape[1], 1),
dtype=np.float32)
valid_count = np.zeros((labels_raw.shape[0], 1), dtype=np.float32)
#print('anchor_weight', anchor_weight.shape)
#assert labels.shape[0]==1
#assert cls_score.shape[0]==1
#assert bbox_weight.shape[0]==1
#print('shape', cls_score.shape, labels.shape, file=sys.stderr)
#print('bbox_weight 0', bbox_weight.shape, file=sys.stderr)
#bbox_weight = np.zeros( (labels_raw.shape[0], labels_raw.shape[1], 4), dtype=np.float32)
_stat = [0, 0, 0]
for ibatch in range(labels_raw.shape[0]):
_anchor_weight = np.zeros((labels_raw.shape[1], 1),
dtype=np.float32)
labels = labels_raw[ibatch]
fg_score = cls_score[ibatch, 1, :] - cls_score[ibatch, 0, :]
fg_inds = np.where(labels > 0)[0]
num_fg = int(config.TRAIN.RPN_FG_FRACTION *
config.TRAIN.RPN_BATCH_SIZE)
origin_num_fg = len(fg_inds)
#print(len(fg_inds), num_fg, file=sys.stderr)
if len(fg_inds) > num_fg:
if self.mode == 0:
disable_inds = np.random.choice(fg_inds,
size=(len(fg_inds) -
num_fg),
replace=False)
labels[disable_inds] = -1
else:
pos_ohem_scores = fg_score[fg_inds]
order_pos_ohem_scores = pos_ohem_scores.ravel().argsort()
sampled_inds = fg_inds[order_pos_ohem_scores[:num_fg]]
labels[fg_inds] = -1
labels[sampled_inds] = 1
n_fg = np.sum(labels > 0)
fg_inds = np.where(labels > 0)[0]
num_bg = config.TRAIN.RPN_BATCH_SIZE - n_fg
if self.mode == 2:
num_bg = max(
48, n_fg * int(1.0 / config.TRAIN.RPN_FG_FRACTION - 1))
bg_inds = np.where(labels == 0)[0]
origin_num_bg = len(bg_inds)
if num_bg == 0:
labels[bg_inds] = -1
elif len(bg_inds) > num_bg:
# sort ohem scores
if self.mode == 0:
disable_inds = np.random.choice(bg_inds,
size=(len(bg_inds) -
num_bg),
replace=False)
labels[disable_inds] = -1
else:
neg_ohem_scores = fg_score[bg_inds]
order_neg_ohem_scores = neg_ohem_scores.ravel().argsort(
)[::-1]
sampled_inds = bg_inds[order_neg_ohem_scores[:num_bg]]
#print('sampled_inds_bg', sampled_inds, file=sys.stderr)
labels[bg_inds] = -1
labels[sampled_inds] = 0
if n_fg > 0:
order0_labels = labels.reshape((1, A, -1)).transpose(
(0, 2, 1)).reshape((-1, ))
bbox_fg_inds = np.where(order0_labels > 0)[0]
#print('bbox_fg_inds, order0 ', bbox_fg_inds, file=sys.stderr)
_anchor_weight[bbox_fg_inds, :] = 1.0
anchor_weight[ibatch] = _anchor_weight
valid_count[ibatch][0] = n_fg
#if self.prefix=='face':
# #print('fg-bg', self.stride, n_fg, num_bg)
# STAT[0]+=1
# STAT[self.stride][0] += config.TRAIN.RPN_BATCH_SIZE
# STAT[self.stride][1] += n_fg
# STAT[self.stride][2] += np.sum(fg_score[fg_inds]>=0)
# #_stat[0] += config.TRAIN.RPN_BATCH_SIZE
# #_stat[1] += n_fg
# #_stat[2] += np.sum(fg_score[fg_inds]>=0)
# #print('stride num_fg', self.stride, n_fg, file=sys.stderr)
# #ACC[self.stride] += np.sum(fg_score[fg_inds]>=0)
# #x = float(labels_raw.shape[0]*len(config.RPN_FEAT_STRIDE))
# x = 1.0
# if STAT[0]%STEP==0:
# _str = ['STAT']
# STAT[0] = 0
# for k in config.RPN_FEAT_STRIDE:
# acc = float(STAT[k][2])/STAT[k][1]
# acc0 = float(STAT[k][1])/STAT[k][0]
# #_str.append("%d: all-fg(%d, %d, %.4f), fg-fgcorrect(%d, %d, %.4f)"%(k,STAT[k][0], STAT[k][1], acc0, STAT[k][1], STAT[k][2], acc))
# _str.append("%d: (%d, %d, %.4f)"%(k, STAT[k][1], STAT[k][2], acc))
# STAT[k] = [0,0,0]
# _str = ' | '.join(_str)
# print(_str, file=sys.stderr)
#if self.stride==4 and num_fg>0:
# print('_stat_', self.stride, num_fg, num_bg, file=sys.stderr)
#labels_ohem = mx.nd.array(labels_raw)
#anchor_weight = mx.nd.array(anchor_weight)
#print('valid_count', self.stride, np.sum(valid_count))
#print('_stat', _stat, valid_count)
for ind, val in enumerate([labels_raw, anchor_weight, valid_count]):
val = mx.nd.array(val)
self.assign(out_data[ind], req[ind], val)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
for i in range(len(in_grad)):
self.assign(in_grad[i], req[i], 0)
@mx.operator.register('rpn_fpn_ohem3')
class RPNFPNOHEM3Prop(mx.operator.CustomOpProp):
def __init__(self, stride=0, network='', dataset='', prefix=''):
super(RPNFPNOHEM3Prop, self).__init__(need_top_grad=False)
self.stride = stride
self.network = network
self.dataset = dataset
self.prefix = prefix
def list_arguments(self):
return ['cls_score', 'labels']
def list_outputs(self):
return ['labels_ohem', 'anchor_weight', 'valid_count']
def infer_shape(self, in_shape):
labels_shape = in_shape[1]
#print('in_rpn_ohem', in_shape[0], in_shape[1], in_shape[2], file=sys.stderr)
anchor_weight_shape = [labels_shape[0], labels_shape[1], 1]
#print('in_rpn_ohem', labels_shape, anchor_weight_shape)
return in_shape, \
[labels_shape, anchor_weight_shape, [labels_shape[0], 1]]
def create_operator(self, ctx, shapes, dtypes):
return RPNFPNOHEM3Operator(self.stride, self.network, self.dataset,
self.prefix)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return []
| [
"[email protected]"
]
| |
16c92653ecd2a8eed34a9a224ebae4975ef7cc51 | 972c508bbd49cbb7800af2729328d5421fcbba8f | /flink-python/pyflink/table/tests/test_sort.py | 0d490c7120cbfaa869394ac96ddb3363bd2dd856 | [
"Apache-2.0",
"CC-BY-2.5",
"OFL-1.1",
"AGPL-3.0-only",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-jdom",
"GCC-exception-3.1",
"MIT-0",
"MPL-2.0-no-copyleft-exception",
"CDDL-1.1",
"CDDL-1.0",
"MIT",
"CC0-1.0",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.1-only",
"BSD-2-Clause-Views",
"EPL-1.0",
"Classpath-exception-2.0",
"LicenseRef-scancode-other-permissive",
"Python-2.0",
"MPL-2.0",
"CC-PDDC",
"GPL-2.0-only",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"ISC"
]
| permissive | Xeli/flink | 4e380cc4aa1b0a79d6eb1a1715a1b5e99993f937 | d5e74d6d4f3f6be6b8ebc63c09c42b3bc8eed0d0 | refs/heads/master | 2021-06-02T00:07:47.239600 | 2019-07-04T17:05:41 | 2019-07-04T17:07:45 | 108,726,889 | 0 | 1 | Apache-2.0 | 2019-07-04T17:05:42 | 2017-10-29T11:06:09 | Java | UTF-8 | Python | false | false | 1,766 | py | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.testing.test_case_utils import PyFlinkBatchTableTestCase
class BatchTableSortTests(PyFlinkBatchTableTestCase):
def test_order_by_offset_fetch(self):
t = self.t_env.from_elements([(1, "Hello")], ["a", "b"])
result = t.order_by("a.desc").offset(2).fetch(2)
query_operation = result._j_table.getQueryOperation()
self.assertEqual(2, query_operation.getOffset())
self.assertEqual(2, query_operation.getFetch())
self.assertEqual('[desc(a)]', query_operation.getOrder().toString())
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| [
"[email protected]"
]
| |
40e0e34dc1c0d39e08775b92089714d9db4af63e | 484b72139db4e2b21268c55e2f32cae242b5abae | /src/tools/adt/test_adt_sql.py | 4cd7abef19fab96d7bce42de488f3e32fcd25a9b | []
| no_license | PIWEEK/piweekr-back | a1b3887358e32d27f7a21913586f77e020c5eae0 | 375dbf17472270dcf34651ff8a86b7ed460eb311 | refs/heads/master | 2020-04-06T07:11:44.175583 | 2016-09-15T06:38:38 | 2016-09-15T06:38:38 | 62,820,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,759 | py | import pytest
from .types import ADT_WITH_ID, Field, IntField, StrField
from .relationships import Relationship1N, RoleSingle, RoleMulti, Context
from sqlalchemy.sql import select, outerjoin
from .adt_sql import SQLADTRepository
class Card(ADT_WITH_ID):
deck_id = IntField()
title = StrField()
strength = IntField()
defense = IntField()
class Deck(ADT_WITH_ID):
name = StrField()
class DeckHasCards(Relationship1N):
role_1 = RoleSingle(role_class=Deck, role_name="deck")
role_n = RoleMulti(role_class=Card, role_name="cards", role_fk="deck_id", required=False)
def test_sql_persistence():
repo = SQLADTRepository({
"DB_NAME": "test",
"ECHO": False
})
repo.add_adt_table(Card, "cards")
repo.add_adt_table(Deck, "decks")
repo.create_all_tables()
repo.truncate_all_tables()
with repo.context() as context:
deck = repo.insert_adt(context,
repo.decks,
Deck(
name="Test deck"
)
)
card_1 = repo.insert_adt(context,
repo.cards,
Card(
deck_id=deck.id,
title="Test card #1",
strength=10,
defense=1,
)
)
card_2 = repo.insert_adt(context,
repo.cards,
Card(
deck_id=deck.id,
title="Test card #2",
strength=8,
defense=7,
)
)
with repo.context() as context:
r_deck = repo.retrieve_single_adt(context,
Deck,
select([repo.decks])
.where(repo.decks.c.id == deck.id)
)
assert r_deck.id == deck.id
assert r_deck.name == deck.name
with repo.context() as context:
r_cards = repo.retrieve_adts(context,
Card,
select([repo.cards])
)
assert len(r_cards) == 2
assert card_1.id in [card.id for card in r_cards]
assert card_2.id in [card.id for card in r_cards]
with repo.context() as context:
r_decks = repo.retrieve_joined_adts(context,
Deck, {"decks": Deck, "cards": Card},
select([repo.decks, repo.cards], use_labels=True)
.select_from(outerjoin(
repo.decks, repo.cards, repo.decks.c.id == repo.cards.c.deck_id
))
.where(repo.decks.c.id == deck.id)
)
assert len(r_decks) == 1
r_deck = r_decks[0]
assert r_deck.id == deck.id
assert r_deck.name == deck.name
assert len(context.cards(r_deck)) == 2
assert card_1.id in [card.id for card in context.cards(r_deck)]
assert card_2.id in [card.id for card in context.cards(r_deck)]
| [
"[email protected]"
]
| |
70ac93f65865b3fb1662de2524dd00f377c1feea | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03474/s623958359.py | 5c4abb6865ad4a5abd0e9ef526ce516ab50152cb | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | A, B = input().split(" ")
S = input()
if S[int(A)] == '-' and S[:int(A)].isdecimal() and S[int(A)+1:int(A)+int(B)+1].isdecimal():
print("Yes")
else:
print("No") | [
"[email protected]"
]
| |
27762e0a9409dd7def2603c56329ed23a8c33638 | a214e706c875e0af7221c0c9ae193d9d93ee20a7 | /vaast_2individuals.py | dc21ad72109ea2326e1ef142d06fa97715ba8039 | []
| no_license | inambioinfo/bioinformatics_scripts | fa2292e91ad4134204a09ace27c8a91ae70fa34c | 3a23611f382b7f3dd60e5e2abe841b84408c0d44 | refs/heads/master | 2020-03-20T21:17:10.163061 | 2017-03-28T23:41:39 | 2017-03-28T23:41:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,096 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#running program
#python ~/lgc/scripts/vaast_2individuals.py -i ~/projects/exome_analysis/input/boston/Boston_Exome_noAnnotation_auto_sex.vcf ~/projects/exome_analysis/input/boston/Exome_Brazil_auto_sex.vcf
from optparse import OptionParser
import os
__author__ = "Raony Guimarães"
__copyright__ = "Copyright 2011, The Exome Pipeline"
__credits__ = ["Raony Guimarães"]
__license__ = "GPL"
__version__ = "1.0.1"
__maintainer__ = "Raony Guimarães"
__email__ = "[email protected]"
__status__ = "Production"
#Options
parser = OptionParser()
parser.add_option('-i', help='vcf', dest='affecteds', nargs=2) #, nargs=2
(options, args) = parser.parse_args()
affecteds=options.affecteds
#VAAST ANALYSIS 1.0.3
vaast_dir="/projects/trio_analysis/bin/VAAST_Code_1.0.1/bin"
out_dir="output"
genes="/lgc/datasets/vaast_data2/hg19/Features/refGene_hg19.gff3"
reference="/lgc/datasets/vaast_data2/hg19/Fasta/chrAll.fasta"
background="/lgc/datasets/vaast_data2/hg19/Background_CDR/1KG_179-refGene-hg19-liftover.cdr"
command = "mkdir gvf"
os.system(command)
command = "mkdir simulations"
os.system(command)
#Convert vcf files to GVF Files
command = "perl %s/vaast_tools/vaast_converter --path gvf/ -b hg19 %s" % (vaast_dir, " ".join(affecteds))
os.system(command)
affected_path = os.getcwd()+"/gvf"
#get files in GVF Folter
affecteds = []
dirList=os.listdir(affected_path)
for fname in dirList:
affected = {}
affected['name'] = fname.split('.')[0]
affected['path'] = affected_path+'/'+fname
affecteds.append(affected)
##VAT
print "Start VAT"
#affecteds
for affected in affecteds:
#0277-P1_1
#Exome
#if affected['name'] == 'Exome'
command = "%s/VAT -f %s -a %s -b hg19 %s > %s.vat.gvf" % (vaast_dir, genes, reference, affected['path'], affected['name'])
#os.system(command)
print "Start VST"
vst_list = []
for affected in affecteds:
filename = affected['name']+'.vat.gvf'
vst_list.append(filename)
##VST Daugther
command = "%s/VST -o 'U(0..1)' -b hg19 %s > target.cdr" % (vaast_dir, " ".join(vst_list))
#os.system(command)
############## Finally Running VAAST !!!
print "Start VAAST Analysis"
simulations = []
#Simulation One - run without locus heterogeinety -lh n - look only for one gene
command = "time %s/VAAST -mp1 4 -e -iht d -lh y -fast_gp -m lrt --rate 0.001 --codon_bias --gp 10000 -o simulations/simulation_one %s %s target.cdr" % (vaast_dir, genes, background)
os.system(command)
command = "~/projects/trio_analysis/bin/VAAST_Code_1.0.1/bin/vaast_tools/simple_output.pl simulations/simulation_one.vaast > simulations/simulation_one.list"
os.system(command)
#Simulation Two - run with locus heterogeinety -lh y -look more than one gene
command = "time %s/VAAST -mp1 4 -e -iht d -lh y -fast_gp -d 1e5 -o simulations/simulation_two -r 0.00035 -m lrt -k %s %s target.cdr" % (vaast_dir, genes, background)
os.system(command)
command = "~/projects/trio_analysis/bin/VAAST_Code_1.0.1/bin/vaast_tools/simple_output.pl simulations/simulation_two.vaast > simulations/simulation_two.list"
os.system(command)
#Simulation Three -
simulation = {}
simulation['name'] = 'simulation_three'
simulation['parameters'] = '-iht d -lh y -fast_gp -d 1e7 -r 0.00035 -m lrt -k'
simulations.append(simulation)
#Simulation Four -
simulation = {}
simulation['name'] = 'simulation_four'
simulation['parameters'] = '-iht d -lh y -fast_gp -d 1e10 --significance 2.4e-6 -m lrt -k'
simulations.append(simulation)
#Simulation Five -
simulation = {}
simulation['name'] = 'simulation_five'
simulation['parameters'] = '-iht d -lh n --mode lrt --codon_bias --penetrance i --gp 10000 -k'
simulations.append(simulation)
#Simulation Six -
simulation = {}
simulation['name'] = 'simulation_six'
simulation['parameters'] = '-pnt y -iht d -lh n -fast_gp -d 1e10 --significance 2.4e-6 -r 0.00035 -m lrt -k'
simulations.append(simulation)
for simulation in simulations:
command = "time %s/VAAST -mp1 4 -e %s -o simulations/%s %s %s target.cdr && /projects/trio_analysis/bin/VAAST_Code_1.0.1/bin/vaast_tools/simple_output.pl simulations/%s.vaast > simulations/%s.genelist" % (vaast_dir, simulation['parameters'], simulation['name'], genes, background, simulation['name'], simulation['name'])
os.system(command)
die()
#../bin/VAAST -iht r -lh n -fast_gp -d 1e4 -o test -r 0.00035 -m lrt -k data/easy-hg18-chr16.gff3 data/189genomes-chr16.cdr data/miller_output.cdr
#../bin/VAAST -iht r -lh n -fast_gp -d 1e6 -o test -m lrt -k data/easy-hg18-chr16.gff3 data/189genomes-chr16.cdr data/miller_output.cdr
#../bin/VAAST -iht r -lh n -fast_gp -d 1e4 -o test -r 0.00035 -m lrt -k
die()
#command = "time %s/VAAST -iht r -lh n -fast_gp -d 1e6 -o %s -m lrt -k %s %s %s.cdr" % (vaast_dir, affected_filename, genes, background, affected_filename)
command = "time %s/VAAST -mp 4 -iht r -lh y -pnt c -fast_gp -m lrt -d 1e6 -k --outfile %s %s %s Exome.cdr" % (vaast_dir, affected_filename, genes, background)
#print command
os.system(command)
command = "~/projects/trio_analysis/bin/VAAST_Code_1.0.1/bin/vaast_tools/simple_output.pl vcf_withchr.vaast > vaast.genelist"
os.system(command)
#INFO : processing_finished : Output can be found in vcf_withchr.vaast
#106844.33user 3080.28system 5:22:43elapsed 567%CPU (0avgtext+0avgdata 131395648maxresident)k
#0inputs+9273720outputs (0major+13023585minor)pagefaults 0swaps
#command = "%s/VAAST -mp1 8 --mode lrt --outfile %s %s %s %s.cdr" % (vaast_dir, affected_filename, genes, background, affected_filename)
#os.system(command)
##affected
#command = "time %s/VAAST -mp1 8 -mp2 8 --mode lrt --codon_bias --gp 10000 --outfile %s/vaast_results/affected_analysis %s %s %s/affecteds.cdr 2>run2_log" % (vaast_dir, out_dir, genes, background, out_dir)
##os.system(command)
##Parametrization
#command = "%s/VAAST -t %s/parents_union.cdr -pnt c -iht r -lh n -fast_gp -d 1e10 --significance 2.4e-6 -mp1 4 -mp2 4 -o trio_nalaysis_v1 -r 0.00035 -m lrt -k %s %s %s/daughter.cdr" % (vaast_dir, out_dir, genes, background, out_dir)
##os.system(command)
##generate list
#command = "%s/vaast_tools/simple_output.pl %s/trio_nalaysis_v1 > trio_nalaysis_v1.genelist" % (vaast_dir, out_dir)
##os.system(command)
#command = "time %s/VAAST -t %s/parents_union.cdr -pnt i -iht r -lh n -fast_gp -d 1e10 --significance 2.4e-6 -mp1 4 -o trio_nalaysis_v1_pnt_i -r 0.00035 -m lrt -k %s %s %s/daughter.cdr" % (vaast_dir, out_dir, genes, background, out_dir)
##os.system(command)
##generate list
#command = "%s/vaast_tools/simple_output.pl %s/trio_nalaysis_v1_pnt_i > trio_nalaysis_v1_pnt_i.genelist" % (vaast_dir, out_dir)
##os.system(command)
#command = "time %s/VAAST --mode lrt --codon_bias --inheritance r --penetrance i --gp 10000 --trio %s/parents_union.cdr --outfile %s/trio_analysis_with_179bg_withpenetrance %s %s %s/daughter.cdr" % (vaast_dir, out_dir, out_dir, genes, background, out_dir)
##os.system(command)
##generate list
#command = "%s/vaast_tools/simple_output.pl %s/trio_analysis_with_179bg_withpenetrance > trio_analysis_with_179bg_withpenetrance.genelist" % (vaast_dir, out_dir)
##os.system(command)
#%s/daughter.cdr
#../bin/vaast_tools/simple_output.pl
#VAAST
#time $VAAST_DIR/VAAST -p 8 -q 8 --mode lrt --rate 0.001 --codon_bias --gp 10000 --outfile $OUT_DIR/normal $GENES $BACKGROUND $OUT_DIR/affecteds.cdr
#Trio Analysis
#Generate parents.cdr using intersection or union ? (Intersection)
#time $VAAST_DIR/VAAST --mode lrt --codon_bias --inheritance r --penetrance i --gp 10000 --trio $OUT_DIR/parents.cdr --outfile $OUT_DIR/vaast_output/trio_analysis_with_179bg_withpenetrance $GENES $BACKGROUND $OUT_DIR/daughter.cdr
#time $VAAST_DIR/VAAST --mode lrt --codon_bias --gp 10000 --trio $OUT_DIR/parents.cdr --outfile $OUT_DIR/trio_analysis $GENES $BACKGROUND $OUT_DIR/daughter.cdr
#VAAST -m lrt –iht r –lh n –pnt c -r 0.00035 -p 4 -w 100000 -d 100000 -k -g 4 -x 35bp_se.wig -o <output_file> <feature_file> <189genome.cdr> <target>
#AAS with OMIM command-line:
#VAAST -q <degree of parallelization> -f -c <chromosome_number> -g 4 -k -d 100000000 -j 2.38E-6 -m lrt -o <output_file> <feature_file > <189genome.cdr> <target>
#AAS with blosum62 command-line:
#VAAST -q <degree of parallelization> -f -c <chromosome_number> -g 4 -k -b blosum62.matrix -d 100000000 -j 2.38E-6 -m lrt -o <output_file> <feature_file> <189genome.cdr> <target>
#No AAS command-line:
#VAAST -q <degree of parallelization> -f -c <chromosome_number> -g 4 -d 100000000 -j 2.38E-6 -m lrt -o <output_file> <feature_file> <189genome.cdr> <target>
#WSS command-line:
#VAAST -q <degree of parallelization> -c <chromosome_number> -d 100000000 -j 2.38E-6 -m wss -o <output_file> <feature_file> <189genome.cdr> <target>
#$VAAST_DIR/VAAST --mode lrt --codon_bias --gp 10000 --outfile $OUT_DIR/2affected $GENES $BACKGROUND $OUT_DIR/affecteds.cdr
#try this
#VAAST -m lrt –iht r –lh n –pnt c -r 0.00035 -p 4 -w 100000 -d 100000 -k -g 4 -x 35bp_se.wig -o <output_file> <feature_file> <189genome.cdr> <target>
| [
"[email protected]"
]
| |
751e7e510693cd71b1f4b18d04e6b3a6188682f2 | 98d832289b7437247ce03ea54ad3cb7b95451159 | /test/test_cpu_info.py | 84e719a037ea9352189cdc1d041d264c10e1d0bb | [
"MIT"
]
| permissive | rmehilli-r7/vm-console-client-python | 7f02f13345dce4f4d4d85e18da7146daeefbceb9 | 069041c1c7b53c6b3d8bfdd81b974141bfca3c0c | refs/heads/master | 2020-03-23T11:20:33.364442 | 2018-08-10T20:06:37 | 2018-08-10T20:06:37 | 141,498,444 | 0 | 0 | MIT | 2018-08-08T19:58:45 | 2018-07-18T23:00:41 | Python | UTF-8 | Python | false | false | 48,826 | py | # coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"/api/3/json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is like` ` not like` | | `container-status` | `is` ` is not` | | `containers` | `are` | | `criticality-tag` | `is` ` is not` ` is greater than` ` is less than` ` is applied` ` is not applied` | | `custom-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `cve` | `is` ` is not` ` contains` ` does not contain` | | `cvss-access-complexity` | `is` ` is not` | | `cvss-authentication-required` | `is` ` is not` | | `cvss-access-vector` | `is` ` is not` | | `cvss-availability-impact` | `is` ` is not` | | `cvss-confidentiality-impact` | `is` ` is not` | | `cvss-integrity-impact` | `is` ` is not` | | `cvss-v3-confidentiality-impact` | `is` ` is not` | | `cvss-v3-integrity-impact` | `is` ` is not` | | `cvss-v3-availability-impact` | `is` ` is not` | | `cvss-v3-attack-vector` | `is` ` is not` | | `cvss-v3-attack-complexity` | `is` ` is not` | | `cvss-v3-user-interaction` | `is` ` is not` | | `cvss-v3-privileges-required` | `is` ` is not` | | `host-name` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is empty` ` is not empty` ` is like` ` not like` | | `host-type` | `in` ` not in` | | `ip-address` | `is` ` is not` ` in range` ` not in range` ` is like` ` not like` | | `ip-address-type` | `in` ` not in` | | `last-scan-date` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `location-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is earlier than` | | `open-ports` | `is` ` is not` ` in range` | | `operating-system` | `contains` ` does not contain` ` is empty` ` is not empty` | | `owner-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is not` ` in range` ` greater than` ` less than` | | `service-name` | `contains` ` does not contain` | | `site-id` | `in` ` not in` | | `software` | `contains` ` does not contain` | | `vAsset-cluster` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-datacenter` | `is` ` is not` | | `vAsset-host-name` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-power-state` | `in` ` not in` | | `vAsset-resource-pool-path` | `contains` ` does not contain` | | `vulnerability-assessed` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `vulnerability-category` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` | | `vulnerability-cvss-v3-score` | `is` ` is not` | | `vulnerability-cvss-score` | `is` ` is not` ` in range` ` is greater than` ` is less than` | | `vulnerability-exposures` | `includes` ` does not include` | | `vulnerability-title` | `contains` ` does not contain` ` is` ` is not` ` starts with` ` ends with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `string` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.cpu_info import CPUInfo # noqa: E501
from swagger_client.rest import ApiException
class TestCPUInfo(unittest.TestCase):
"""CPUInfo unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCPUInfo(self):
"""Test CPUInfo"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.cpu_info.CPUInfo() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
f402f3c6a9657695f8d857cecbdc279ef499cf31 | 640121a717de7b587c56d2136ee9c981bd7c9660 | /server/dvaui/apps.py | b0d4d87d6ecf7c11817286075e99549266b03bd5 | [
"BSD-3-Clause",
"MIT",
"BSL-1.0",
"Apache-2.0"
]
| permissive | jbkarle/DeepVideoAnalytics | dd4535e990a7f3af9e53843e6df97340ee0c6b71 | 9a3717ea30a86f97511a150d6538e309e19b7fbc | refs/heads/master | 2020-03-31T15:01:52.854557 | 2018-10-07T17:11:37 | 2018-10-07T17:11:37 | 152,320,716 | 1 | 0 | null | 2018-10-09T21:01:10 | 2018-10-09T21:01:09 | null | UTF-8 | Python | false | false | 150 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class DvauiConfig(AppConfig):
name = 'dvaui'
| [
"[email protected]"
]
| |
6403015b2e85eccd4b5985e9530f4e7ed6ce27bb | ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f | /P.O.R.-master/pirates/battle/FishingRod.py | a362c40a58d8ec0d8eed09445dd7b6dcb932ffd6 | []
| no_license | BrandonAlex/Pirates-Online-Retribution | 7f881a64ec74e595aaf62e78a39375d2d51f4d2e | 980b7448f798e255eecfb6bd2ebb67b299b27dd7 | refs/heads/master | 2020-04-02T14:22:28.626453 | 2018-10-24T15:33:17 | 2018-10-24T15:33:17 | 154,521,816 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,038 | py | import Weapon
import WeaponGlobals
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
from direct.interval.IntervalGlobal import *
from pandac.PandaModules import *
from pirates.uberdog.UberDogGlobals import InventoryType
from pirates.piratesbase import PLocalizer
from pirates.effects import PolyTrail
import random
class FishingRod(Weapon.Weapon):
modelTypes = [
'models/handheld/pir_m_hnd_tol_fishingPole',
'models/handheld/pir_m_hnd_tol_fishingPoleMed',
'models/handheld/pir_m_hnd_tol_fishingPoleLarge']
models = { }
icons = { }
vertex_list = [
Vec4(0.0, 0.40000000000000002, 0.0, 1.0),
Vec4(0.0, 2.0, 0.0, 1.0),
Vec4(-0.55000000000000004, 2.9500000000000002, 0.0, 1.0)]
motion_color = {
InventoryType.CutlassWeaponL1: [
Vec4(0.29999999999999999, 0.40000000000000002, 0.10000000000000001, 0.5),
Vec4(0.29999999999999999, 0.29999999999999999, 0.29999999999999999, 0.5),
Vec4(0.59999999999999998, 0.59999999999999998, 0.59999999999999998, 0.5)],
InventoryType.CutlassWeaponL2: [
Vec4(0.10000000000000001, 0.20000000000000001, 0.40000000000000002, 0.5),
Vec4(0.40000000000000002, 0.5, 0.69999999999999996, 0.5),
Vec4(0.5, 0.5, 0.90000000000000002, 0.75)],
InventoryType.CutlassWeaponL3: [
Vec4(1, 1, 0.40000000000000002, 0.5),
Vec4(0.40000000000000002, 0.5, 0.59999999999999998, 0.5),
Vec4(0.69999999999999996, 0.69999999999999996, 0.80000000000000004, 0.75)],
InventoryType.CutlassWeaponL4: [
Vec4(0.59999999999999998, 0.59999999999999998, 0.75, 1),
Vec4(0.59999999999999998, 0.5, 0.20000000000000001, 1),
Vec4(0.59999999999999998, 0.59999999999999998, 0.40000000000000002, 1)],
InventoryType.CutlassWeaponL5: [
Vec4(1, 0.20000000000000001, 0.20000000000000001, 0.5),
Vec4(0.5, 0.5, 0.5, 0.75),
Vec4(0.69999999999999996, 0.69999999999999996, 0.90000000000000002, 1)],
InventoryType.CutlassWeaponL6: [
Vec4(1, 1, 0, 0.5),
Vec4(0.29999999999999999, 0.29999999999999999, 0.29999999999999999, 1),
Vec4(0.10000000000000001, 0.10000000000000001, 0.10000000000000001, 1)] }
def __init__(self, itemId):
Weapon.Weapon.__init__(self, itemId, 'fishingRod')
def loadModel(self):
self.prop = self.getModel(self.itemId)
self.prop.reparentTo(self)
def delete(self):
self.endAttack(None)
self.removeTrail()
Weapon.Weapon.delete(self)
def getDrawIval(self, av, ammoSkillId = 0, blendInT = 0.10000000000000001, blendOutT = 0):
track = Parallel(Func(base.playSfx, self.drawSfx, node = av, cutoff = 60), av.actorInterval('sword_draw', playRate = 1.5, endFrame = 15, blendInT = blendInT, blendOutT = blendOutT), Sequence(Wait(0.187), Func(self.attachTo, av)))
return track
def getReturnIval(self, av, blendInT = 0, blendOutT = 0.10000000000000001):
track = Parallel(Func(base.playSfx, self.returnSfx, node = av, cutoff = 60), av.actorInterval('sword_putaway', playRate = 2, endFrame = 35, blendInT = blendInT, blendOutT = blendOutT), Sequence(Wait(0.56000000000000005), Func(self.detachFrom, av)))
return track
def attachTo(self, av):
Weapon.Weapon.attachTo(self, av)
if hasattr(av, 'isGhost') and av.isGhost:
return None
self.createTrail(av)
def detachFrom(self, av):
Weapon.Weapon.detachFrom(self, av)
self.removeTrail()
def createTrail(self, target):
if self.isEmpty():
return None
if not self.motion_trail:
self.motion_trail = PolyTrail.PolyTrail(target, self.vertex_list, self.motion_color.get(self.itemId))
self.motion_trail.reparentTo(self)
self.motion_trail.setUseNurbs(1)
card = loader.loadModel('models/effects/swordtrail_effects')
tex = card.find('**/swordtrail_lines').findTexture('*')
self.motion_trail.setTexture(tex)
self.motion_trail.setBlendModeOn()
if self.itemId == InventoryType.CutlassWeaponL6:
self.motion_trail.setBlendModeOff()
card.removeNode()
def removeTrail(self):
if self.motion_trail:
self.motion_trail.destroy()
self.motion_trail = None
def getBlurColor(self):
return self.motion_color.get(self.itemId)[2]
def beginAttack(self, av):
Weapon.Weapon.beginAttack(self, av)
def setupSounds(cls):
FishingRod.hitSfxs = (loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_CLASHCLANG), loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SWIPECLANG_01), loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SWIPECLANG_02), loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SWIPECLANG_03))
FishingRod.missSfxs = (loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SWOOSH_01), loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SWOOSH_02))
FishingRod.skillSfxs = {
InventoryType.FishingRodStall: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_HACK),
InventoryType.FishingRodPull: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_HACK),
InventoryType.FishingRodHeal: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_HACK),
InventoryType.FishingRodTug: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_HACK),
InventoryType.FishingRodSink: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_HACK),
InventoryType.FishingRodOceanEye: loadSfx(SoundGlobals.SFX_WEAPON_CUTLASS_SLASH) }
FishingRod.drawSfx = loadSfx(SoundGlobals.SFX_MINIGAME_FISHING_REEL_END)
FishingRod.returnSfx = loadSfx(SoundGlobals.SFX_MINIGAME_FISHING_ROD_OUT)
setupSounds = classmethod(setupSounds)
def getHitSfx():
return FishingRod.hitSfxs
def getMissSfx():
return FishingRod.missSfxs
| [
"[email protected]"
]
| |
6990e4b87e0734225f804e3c1b915ae8f9869911 | 6ecff67d6103ddbd787f78c35182722b83b8a37e | /백준/Python/알고파/최단경로/1613.py | 4ca4e3cd15c8a04c41309bc939b9e462ad70c7a2 | []
| no_license | jsungmin6/Algorithm | 9ef2339aa00921e7df756a8dff569954a008c118 | bc1ea9de9f7ba3f1aa6616ebef8719540d72e0bf | refs/heads/master | 2023-05-27T06:24:16.123307 | 2021-06-11T09:22:21 | 2021-06-11T09:22:21 | 259,299,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | # 역사
# 풀이 과정
'''
여러 출발점과 도착점의 경로가 있냐 없냐를 판단하는 거기 때문에 플로이드 와샬을 사용해
dist를 전부 구한 후 판단한다.
'''
import sys
import collections
input = sys.stdin.readline
n, k = map(int, input().split())
dist = [[0 if i == j else sys.maxsize for i in range(n+1)] for j in range(n+1)]
for _ in range(k):
u, v = map(int, input().split())
dist[u][v] = 1
for k in range(n+1):
for i in range(n+1):
for j in range(n+1):
dist[i][j] = min(dist[i][j], dist[i][k]+dist[k][j])
s = int(input())
for _ in range(s):
s, e = map(int, input().split())
if dist[s][e] == sys.maxsize and dist[e][s] == sys.maxsize:
print(0)
elif dist[s][e] == sys.maxsize:
print(1)
else:
print(-1)
| [
"[email protected]"
]
| |
d1a8f1afed15326e2da00c5d6dc38c274523879a | 1eab07420ddbc6774b0dd6f515da5110ed7344af | /brax/experimental/braxlines/experiments/mimax_sweep.py | 09f8390c0598d7fac24c00e990d51f79f848ae95 | [
"Apache-2.0"
]
| permissive | TedTinker/brax | a16097d87607a8bdee46f5d0784fff29e66ca22f | 1d5e70c0c96d1a0dde68901bdabef2c9431c32b3 | refs/heads/main | 2023-08-14T08:22:42.316233 | 2021-09-28T20:08:43 | 2021-09-28T20:08:43 | 410,128,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,321 | py | # Copyright 2021 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MI-Max Sweep."""
AGENT_MODULE = 'brax.experimental.braxlines.vgcrl.train'
CONFIG = [
dict(
env_name=['ant', 'halfcheetah'],
obs_indices='vel',
algo_name=['gcrl', 'diayn', 'cdiayn', 'diayn_full'],
obs_scale=5.0,
seed=list(range(10)),
normalize_obs_for_disc=False,
evaluate_mi=True,
evaluate_lgr=True,
env_reward_multiplier=0.0,
spectral_norm=True,
ppo_params=dict(
num_timesteps=int(2.5 * 1e8),
reward_scaling=10,
episode_length=1000,
normalize_observations=True,
action_repeat=1,
unroll_length=5,
num_minibatches=32,
num_update_epochs=4,
discounting=0.95,
learning_rate=3e-4,
entropy_cost=1e-2,
num_envs=2048,
batch_size=1024,
)),
dict(
env_name=[
'humanoid',
],
obs_indices='vel',
algo_name=['gcrl', 'diayn', 'cdiayn', 'diayn_full'],
obs_scale=5.0,
seed=list(range(10)),
normalize_obs_for_disc=False,
evaluate_mi=True,
evaluate_lgr=True,
env_reward_multiplier=0.0,
spectral_norm=True,
ppo_params=dict(
num_timesteps=int(2.5 * 1e8),
log_frequency=20,
reward_scaling=0.1,
episode_length=1000,
normalize_observations=True,
action_repeat=1,
unroll_length=10,
num_minibatches=16,
num_update_epochs=8,
discounting=0.97,
learning_rate=1e-4,
entropy_cost=1e-3,
num_envs=2048,
batch_size=1024,
)),
]
| [
"[email protected]"
]
| |
69cf9b378381c44f262b107a06c246bdbc38bfdd | a148a0a0fb3209c754b9d6836baa837d3c02e30f | /garbage_code_dont_delete_btw/find_optimized_parameters.py | 426e19aca057cbbe4dbbada8cc902f56d9b0225a | []
| no_license | GreenBlitz/Deep-Space-Vision | 645b64f98bf26500ba501651d332a8cd82f0f340 | c0d8ad10cf42fbac79b42141a44c3bbb00beabb7 | refs/heads/master | 2020-03-28T19:32:30.563636 | 2018-12-13T16:59:21 | 2018-12-13T16:59:21 | 148,985,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | import cv2
import random
import numpy as np
def create_params(shape, factor):
return np.random.rand(*shape)*factor
def get_score(item, frame, bbox, func, reg):
frametag = func(frame, item)
f = frametag[bbox[1]:bbox[1] + bbox[3], bbox[0]:bbox[0] + bbox[2]]
s = f.sum()
return s/f.size - (frametag.sum() - s)/(frametag.size - f.size) - reg*(np.abs(item[:,0] - item[:,1]).sum())
def create_child(sur, alpha, factor):
child = np.sign(np.random.rand(*sur[0].shape))* 10**(-alpha * np.random.rand(*sur[0].shape))*factor
for i in range(len(sur[0])):
child[i] += random.choice(sur)[i]
return child
def find_optimized_parameters(function, images, bboxes, p_shape, gen_size=50, survivors_size=0, p_factor=255, alpha=50, max_iter=100, gen_random=5, c_factor=1, range_regulator=0.5):
gen = []
scores = []
all_scores = []
best = None
max_score = -np.inf
for i in range(gen_size):
gen.append(create_params(p_shape, p_factor))
for _ in range(max_iter):
scores = []
all_scores.append(0)
for i in gen:
sum = 0
for j, im in enumerate(images):
sum += get_score(i, im, bboxes[j], function, range_regulator)
scores.append([i, sum])
all_scores[_] = max(all_scores[_], sum)
if sum > max_score:
max_score = sum
best = i
survivors = list(map(lambda x: x[0].flatten(), sorted(scores, key=lambda x: x[1], reverse=True)))[:survivors_size]
gen.clear()
for i in range(gen_size-gen_random):
gen.append(create_child(survivors, alpha, c_factor).reshape(p_shape))
for i in range(gen_random):
gen.append(create_params(shape=p_shape, factor=p_factor))
return best, all_scores | [
"[email protected]"
]
| |
ba04ac28a0639f1b3b7a11f709742aba37db888d | b0717aeda1942dd35221e668b5d793077c074169 | /env/lib/python3.7/site-packages/twilio/rest/sync/v1/service/sync_stream/__init__.py | 6b2ee533844fc06a31ab795872db7afb709bfa4b | [
"MIT"
]
| permissive | stevehind/sms-steve-server | 3fdeed6de19f29aeaeb587fe7341831036455a25 | 9b0dac19f2e6ccf6452e738017132d93e993870b | refs/heads/master | 2022-12-21T23:34:10.475296 | 2020-01-27T16:24:39 | 2020-01-27T16:24:39 | 231,842,510 | 0 | 0 | MIT | 2022-05-25T05:03:16 | 2020-01-04T23:25:23 | Python | UTF-8 | Python | false | false | 16,649 | py | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.sync.v1.service.sync_stream.stream_message import StreamMessageList
class SyncStreamList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid):
"""
Initialize the SyncStreamList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service that the resource is associated with
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamList
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamList
"""
super(SyncStreamList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, }
self._uri = '/Services/{service_sid}/Streams'.format(**self._solution)
def create(self, unique_name=values.unset, ttl=values.unset):
"""
Create the SyncStreamInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: The created SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
data = values.of({'UniqueName': unique_name, 'Ttl': ttl, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return SyncStreamInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def stream(self, limit=None, page_size=None):
"""
Streams SyncStreamInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists SyncStreamInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SyncStreamInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return SyncStreamPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SyncStreamInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SyncStreamPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a SyncStreamContext
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
return SyncStreamContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a SyncStreamContext
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
return SyncStreamContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncStreamList>'
class SyncStreamPage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the SyncStreamPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Sync Service that the resource is associated with
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
super(SyncStreamPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SyncStreamInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return SyncStreamInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncStreamPage>'
class SyncStreamContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid, sid):
"""
Initialize the SyncStreamContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service with the Sync Stream resource to fetch
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
super(SyncStreamContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Streams/{sid}'.format(**self._solution)
# Dependents
self._stream_messages = None
def fetch(self):
"""
Fetch the SyncStreamInstance
:returns: The fetched SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return SyncStreamInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the SyncStreamInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def update(self, ttl=values.unset):
"""
Update the SyncStreamInstance
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: The updated SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
data = values.of({'Ttl': ttl, })
payload = self._version.update(method='POST', uri=self._uri, data=data, )
return SyncStreamInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
@property
def stream_messages(self):
"""
Access the stream_messages
:returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
:rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
"""
if self._stream_messages is None:
self._stream_messages = StreamMessageList(
self._version,
service_sid=self._solution['service_sid'],
stream_sid=self._solution['sid'],
)
return self._stream_messages
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncStreamContext {}>'.format(context)
class SyncStreamInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, payload, service_sid, sid=None):
"""
Initialize the SyncStreamInstance
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
super(SyncStreamInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'unique_name': payload.get('unique_name'),
'account_sid': payload.get('account_sid'),
'service_sid': payload.get('service_sid'),
'url': payload.get('url'),
'links': payload.get('links'),
'date_expires': deserialize.iso8601_datetime(payload.get('date_expires')),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'created_by': payload.get('created_by'),
}
# Context
self._context = None
self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SyncStreamContext for this SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
if self._context is None:
self._context = SyncStreamContext(
self._version,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Sync Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def url(self):
"""
:returns: The absolute URL of the Message Stream resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URLs of the Stream's nested resources
:rtype: unicode
"""
return self._properties['links']
@property
def date_expires(self):
"""
:returns: The ISO 8601 date and time in GMT when the Message Stream expires
:rtype: datetime
"""
return self._properties['date_expires']
@property
def date_created(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def created_by(self):
"""
:returns: The Identity of the Stream's creator
:rtype: unicode
"""
return self._properties['created_by']
def fetch(self):
"""
Fetch the SyncStreamInstance
:returns: The fetched SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the SyncStreamInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, ttl=values.unset):
"""
Update the SyncStreamInstance
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: The updated SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return self._proxy.update(ttl=ttl, )
@property
def stream_messages(self):
"""
Access the stream_messages
:returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
:rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
"""
return self._proxy.stream_messages
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncStreamInstance {}>'.format(context)
| [
"[email protected]"
]
| |
b41ba04907184cd37b158434c5d33df39713c56e | 48519d4299911ce2a3ca70043079df419155c156 | /typistry/test/validations_test.py | 2ca7ee5e91bfad1903c8486d39490fa063915396 | [
"Apache-2.0"
]
| permissive | kyprifog/typistry | 18c3e010925db5b4a2422bc6eefb69d5da4c2ab9 | aab285d909791106154874eb5331b65fc03849ae | refs/heads/master | 2023-03-12T21:29:09.998425 | 2021-03-02T16:24:48 | 2021-03-02T16:24:48 | 343,979,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,756 | py | from os import path
from shutil import copytree, rmtree
from typing import List, Union, Any, Tuple
from typistry.test.support.types.other_class import OtherClass
from typistry.protos.invalid_object import InvalidObject
from typistry.test.support.types.test_class import TestClass
from typistry.util.path import from_root
from typistry.validators.base import validate_files, filter_type
class TestValidations:
def schemas_path(self) -> str:
return from_root("/test/support/validations/")
def yaml_path(self) -> str:
return from_root("/test/support/yaml/")
def all_protos(self) -> List[Union[Any, InvalidObject]]:
return validate_files(self.yaml_path(), self.schemas_path())
def filter_by_types(self, all: List[Any]) -> Tuple[List[TestClass], List[OtherClass], List[InvalidObject]]:
test_class = filter_type(all, TestClass)
other_class = filter_type(all, OtherClass)
invalid = filter_type(all, InvalidObject)
return (test_class, other_class, invalid)
def test_single_file(self):
yaml_path = from_root("/test/support/yaml/test_class/good_1.yaml")
obj = validate_files(yaml_path, self.schemas_path())
assert(len(obj) == 1)
assert(isinstance(obj[0], TestClass))
def test_directory(self):
all = self.all_protos()
test_class, other_class, invalid = self.filter_by_types(all)
assert(len(test_class) == 2)
assert(len(other_class) == 1)
# Tests protoclass build definition adds 10, otherwise would be 2
assert(other_class[0].test == 12)
assert(len(invalid) == 3)
def test_default_schema_path(self):
default_path = "validations/"
if not path.exists(default_path):
copytree(self.schemas_path(), default_path)
all = validate_files(self.yaml_path())
assert(len(all) == 6)
test_class, other_class, invalid = self.filter_by_types(all)
assert(len(test_class) == 2)
assert(len(other_class) == 1)
assert(len(invalid) == 3)
if path.exists(default_path):
rmtree(default_path)
def test_to_class(self):
test_class_all = validate_files(self.yaml_path(), self.schemas_path(), to_class = TestClass)
test_class, other_class, invalid = self.filter_by_types(test_class_all)
assert(len(test_class) == 2)
assert(len(other_class) == 0)
assert(len(invalid) == 4)
other_class_all = validate_files(self.yaml_path(), self.schemas_path(), to_class = OtherClass)
test_class, other_class, invalid = self.filter_by_types(other_class_all)
assert(len(test_class) == 0)
assert(len(other_class) == 1)
assert(len(invalid) == 5)
| [
"[email protected]"
]
| |
d97ca44c9ac250ee5169fb9662a6ae9b5cd84709 | 3bd961816fe9b9048108f8a5a254b931dd79bde4 | /manga_py/providers/mangamew_com.py | 468cfc62dbc030c91cf86ee739264971ff06e7d1 | [
"MIT"
]
| permissive | eduhoribe/manga-py | 6243115549d78c1599c6b043fe7cd897e2f517d3 | fe7eb2e08532b3c75b4f7ac8cc4132f0e7a65eb4 | refs/heads/stable_1.x | 2023-01-14T01:48:34.873530 | 2020-11-17T04:30:15 | 2020-11-17T04:30:15 | 307,992,359 | 1 | 0 | MIT | 2020-11-15T00:00:45 | 2020-10-28T11:18:18 | Python | UTF-8 | Python | false | false | 1,069 | py | from manga_py.provider import Provider
from .helpers.std import Std
class MangaMewCom(Provider, Std):
_type = 'manga'
def get_chapter_index(self) -> str:
re = r'%s/[^/]+/.+?-(\d+(?:-\d+)?)-\d+' % self._type
return self.re.search(re, self.chapter).group(1)
def get_main_content(self):
url = self.get_url()
if url.find('/' + self._type + '/') == -1: # not found
a = self.html_fromstring(url, 'h1.name a', 0)
url = a.get('href')
return self.http_get(url)
def get_manga_name(self) -> str:
content = self.http_get(self.get_url())
return self.text_content(content, 'h1.name a,h1.title')
def get_chapters(self):
return self._elements('.chapter .item a')[::-1]
def get_files(self):
parser = self.html_fromstring(self.chapter)
return self._images_helper(parser, '#content .item > img')
def get_cover(self) -> str:
return self._cover_from_content('.images img')
def book_meta(self) -> dict:
pass
main = MangaMewCom
| [
"[email protected]"
]
| |
e90007c5d2697952c7b32f49861660ccd7553747 | 3fca3083cda021e41d3199b83b039a93bad0c18d | /inspirehep/modules/workflows/workflows/hep_ingestion.py | 7a193c3566cb53bc0a3a35437b09d45e4c710644 | []
| no_license | nikpap/inspire-next | 53c8859503c684c835e61d4496bc903b39fb4111 | 4de8910fff569fc9028300c70b63200da521ddb9 | refs/heads/master | 2020-12-28T21:17:07.774075 | 2016-05-24T13:19:59 | 2016-05-24T13:19:59 | 34,382,494 | 1 | 0 | null | 2015-04-22T09:51:24 | 2015-04-22T09:51:24 | null | UTF-8 | Python | false | false | 9,915 | py | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014, 2015, 2016 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Workflow for processing single arXiv records harvested."""
from collections import OrderedDict
from flask import render_template
from workflow.patterns.controlflow import IF, IF_ELSE
from inspirehep.dojson.hep import hep2marc
from inspirehep.dojson.utils import legacy_export_as_marc
# FIXME: Revive beard-server to provide predicter
# from inspirehep.modules.predicter.tasks import (
# guess_coreness,
# )
from inspirehep.modules.refextract.tasks import extract_journal_info
from inspirehep.modules.workflows.tasks.actions import (
add_core,
halt_record,
is_record_relevant,
shall_push_remotely,
shall_upload_record,
reject_record,
)
from inspirehep.modules.workflows.tasks.classifier import (
classify_paper,
filter_core_keywords,
)
from inspirehep.modules.workflows.tasks.matching import(
delete_self_and_stop_processing,
stop_processing,
exists_in_holding_pen,
record_exists,
update_old_object,
)
from inspirehep.modules.workflows.tasks.upload import store_record
from inspirehep.modules.workflows.tasks.submission import (
send_robotupload,
)
from inspirehep.utils.record import get_value
class ClassProperty(property):
"""Special class to allow a classmethod to be accessed as property."""
def __get__(self, cls, owner):
"""Call getter and return."""
return self.fget.__get__(None, owner)()
class HEPIngestion(object):
"""Generic HEP ingestion workflow for a single record.
This workflow is built upon a set of steps that can be overridden.
Mainly it is composed of the following steps:
1. `initial_processing` (e.g. conversion to correct formats)
2. `match_processing` (e.g. match record to existing records in
Holding Pen or the database itself.)
3. `before_halt_check` (e.g. list of tasks before checking
if record should be halted)
4. `halt_check` (static function to see if the record require
human intervention in the Holding Pen. Return True/False)
a. On `True`: `on_halt` processing (e.g. decide what happens
upon halt)
5. `before_upload_check` (e.g. tasks to run before upload check)
6. `upload_check` (static function to see if the record shall be
uploaded. Return True/False
a. On `True`: `on_upload` processing (e.g. decide what happens
upon upload)
b. On `False`: `on_rejection` processing (e.g. decide what happens
upon rejection)
7. `final_processing` (e.g. conversion to correct formats)
Integrate this workflow in your other workflows:
.. code:: python
class mysource(HEPIngestion):
initial_processing = [before, processing] # inject before halt
halt_check = staticmethod(my_halt_check) # need to be staticmethod
"""
name = "HEP"
data_type = "hep"
initial_processing = []
match_processing = [
IF(exists_in_holding_pen, [
delete_self_and_stop_processing,
]),
IF(record_exists, [
stop_processing,
]),
]
before_halt_check = [
extract_journal_info,
classify_paper(
taxonomy="HEPont.rdf",
only_core_tags=False,
spires=True,
with_author_keywords=True,
),
filter_core_keywords,
# Predict action for a generic HEP paper based only on title
# and abstract.
# guess_coreness("arxiv_skip_astro_title_abstract.pickle"),
]
halt_check = staticmethod(is_record_relevant)
on_halt = [
halt_record(action="hep_approval"),
]
on_no_halt = [
reject_record("Record automatically rejected")
]
before_upload_check = [
add_core,
]
upload_check = staticmethod(shall_upload_record)
on_upload = [
IF_ELSE(shall_push_remotely, [
send_robotupload(marcxml_processor=hep2marc),
], [
store_record,
]),
]
on_no_upload = []
final_processing = []
@classmethod
def get_workflow(cls):
"""Build the main ingestion workflow.
This builder enforces a certain structure to the ingestion workflow.
"""
return (
cls.initial_processing +
cls.match_processing +
cls.before_halt_check + [
IF_ELSE(cls.halt_check, cls.on_halt, cls.on_no_halt)
] + cls.before_upload_check + [
IF_ELSE(cls.upload_check, cls.on_upload, cls.on_no_upload)
] + cls.final_processing
)
workflow = ClassProperty(get_workflow)
@classmethod
def get_title(cls, obj, **kwargs):
"""Return the value to put in the title column of Holding Pen."""
if isinstance(obj.data, dict):
titles = filter(None, get_value(obj.data, "titles.title", []))
if titles:
# Show first title that evaluates to True
return titles[0]
return "No title available"
@classmethod
def get_additional(cls, obj, **kwargs):
"""Return the value to put in the additional column of HoldingPen."""
if "classifier_results" in obj.extra_data:
keywords = obj.extra_data.get('classifier_results').get("complete_output")
else:
keywords = []
prediction_results = obj.extra_data.get("arxiv_guessing", {})
if prediction_results:
prediction_results = prediction_results[0].get("result")
return render_template(
'inspire_workflows/styles/harvesting_record_additional.html',
object=obj,
keywords=keywords,
score=prediction_results.get("max_score"),
decision=prediction_results.get("decision")
)
@classmethod
def formatter(cls, obj, **kwargs):
"""Nicely format the record."""
if not obj.data:
return ""
if kwargs and kwargs.get('of') == 'xm':
return legacy_export_as_marc(hep2marc.do(obj.data))
return render_template(
'inspirehep_theme/format/record/Holding_Pen_HTML_detailed.tpl',
record=obj.data
)
@classmethod
def get_sort_data(cls, obj, **kwargs):
"""Return a dictionary useful for sorting in Holding Pen."""
prediction_results = obj.extra_data.get("arxiv_guessing")
if prediction_results:
prediction_results = prediction_results[0].get("result")
max_score = prediction_results.get("max_score")
decision = prediction_results.get("decision")
relevance_score = max_score
if decision == "CORE":
relevance_score += 10
elif decision == "Rejected":
relevance_score = (max_score * -1) - 10
return {
"max_score": prediction_results.get("max_score"),
"decision": prediction_results.get("decision"),
"relevance_score": relevance_score
}
else:
return {}
@classmethod
def get_record(cls, obj, **kwargs):
"""Return a dictionary-like object representing the current object.
This object will be used for indexing and be the basis for display
in Holding Pen.
"""
if not isinstance(obj.data, dict):
return {}
return obj.data
@staticmethod
def get_description(obj):
"""Get the description column part."""
if not isinstance(obj.data, dict):
return "No description found."
abstract = ""
authors = []
categories = []
final_identifiers = []
# Get identifiers
dois = get_value(obj.data, "dois.value", [])
if dois:
final_identifiers.extend(dois)
system_no = get_value(obj.data, "external_system_numbers.value", [])
if system_no:
final_identifiers.extend(system_no)
# Get subject categories, adding main one first. Order matters here.
record_categories = get_value(obj.data, "arxiv_eprints.categories", []) + \
get_value(obj.data, "subject_terms.term", [])
for category_list in record_categories:
if isinstance(category_list, list):
categories.extend(category_list)
else:
categories.append(category_list)
categories = list(OrderedDict.fromkeys(categories)) # Unique only
abstract = get_value(obj.data, "abstracts.value", [""])[0]
authors = obj.data.get("authors", [])
return render_template('inspire_workflows/styles/harvesting_record.html',
object=obj,
authors=authors,
categories=categories,
abstract=abstract,
identifiers=final_identifiers)
| [
"[email protected]"
]
| |
badf0cf703289b8f5d16976db515b79b22aba30d | 349dadbf45b7c12a3fe41c5e0421c0488b679919 | /transformers/tests/test_feature_extraction_beit.py | 0ca58a802d6be7f74d82492883f61e9356e28153 | [
"BSD-3-Clause",
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"Apache-2.0"
]
| permissive | salesforce/CodeRL | c772e408bac690527759f416ea22add4c97e5bec | 51db4ff983d5376e62b9e7eba150316a651c80d9 | refs/heads/main | 2023-08-18T18:38:02.740995 | 2022-11-18T16:14:28 | 2022-11-18T16:14:28 | 508,912,853 | 412 | 52 | BSD-3-Clause | 2023-08-31T07:51:27 | 2022-06-30T02:54:36 | Python | UTF-8 | Python | false | false | 12,578 | py | # coding=utf-8
# Copyright 2021 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from datasets import load_dataset
from transformers.file_utils import is_torch_available, is_vision_available
from transformers.testing_utils import require_torch, require_vision
from .test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs
if is_torch_available():
import torch
if is_vision_available():
from PIL import Image
from transformers import BeitFeatureExtractor
class BeitFeatureExtractionTester(unittest.TestCase):
def __init__(
self,
parent,
batch_size=7,
num_channels=3,
image_size=18,
min_resolution=30,
max_resolution=400,
do_resize=True,
size=20,
do_center_crop=True,
crop_size=18,
do_normalize=True,
image_mean=[0.5, 0.5, 0.5],
image_std=[0.5, 0.5, 0.5],
reduce_labels=False,
):
self.parent = parent
self.batch_size = batch_size
self.num_channels = num_channels
self.image_size = image_size
self.min_resolution = min_resolution
self.max_resolution = max_resolution
self.do_resize = do_resize
self.size = size
self.do_center_crop = do_center_crop
self.crop_size = crop_size
self.do_normalize = do_normalize
self.image_mean = image_mean
self.image_std = image_std
self.reduce_labels = reduce_labels
def prepare_feat_extract_dict(self):
return {
"do_resize": self.do_resize,
"size": self.size,
"do_center_crop": self.do_center_crop,
"crop_size": self.crop_size,
"do_normalize": self.do_normalize,
"image_mean": self.image_mean,
"image_std": self.image_std,
"reduce_labels": self.reduce_labels,
}
def prepare_semantic_single_inputs():
dataset = load_dataset("hf-internal-testing/fixtures_ade20k", split="test")
image = Image.open(dataset[0]["file"])
map = Image.open(dataset[1]["file"])
return image, map
def prepare_semantic_batch_inputs():
ds = load_dataset("hf-internal-testing/fixtures_ade20k", split="test")
image1 = Image.open(ds[0]["file"])
map1 = Image.open(ds[1]["file"])
image2 = Image.open(ds[2]["file"])
map2 = Image.open(ds[3]["file"])
return [image1, image2], [map1, map2]
@require_torch
@require_vision
class BeitFeatureExtractionTest(FeatureExtractionSavingTestMixin, unittest.TestCase):
feature_extraction_class = BeitFeatureExtractor if is_vision_available() else None
def setUp(self):
self.feature_extract_tester = BeitFeatureExtractionTester(self)
@property
def feat_extract_dict(self):
return self.feature_extract_tester.prepare_feat_extract_dict()
def test_feat_extract_properties(self):
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
self.assertTrue(hasattr(feature_extractor, "do_resize"))
self.assertTrue(hasattr(feature_extractor, "size"))
self.assertTrue(hasattr(feature_extractor, "do_center_crop"))
self.assertTrue(hasattr(feature_extractor, "center_crop"))
self.assertTrue(hasattr(feature_extractor, "do_normalize"))
self.assertTrue(hasattr(feature_extractor, "image_mean"))
self.assertTrue(hasattr(feature_extractor, "image_std"))
def test_batch_feature(self):
pass
def test_call_pil(self):
# Initialize feature_extractor
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
# create random PIL images
image_inputs = prepare_image_inputs(self.feature_extract_tester, equal_resolution=False)
for image in image_inputs:
self.assertIsInstance(image, Image.Image)
# Test not batched input
encoded_images = feature_extractor(image_inputs[0], return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
1,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
# Test batched
encoded_images = feature_extractor(image_inputs, return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
self.feature_extract_tester.batch_size,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
def test_call_numpy(self):
# Initialize feature_extractor
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
# create random numpy tensors
image_inputs = prepare_image_inputs(self.feature_extract_tester, equal_resolution=False, numpify=True)
for image in image_inputs:
self.assertIsInstance(image, np.ndarray)
# Test not batched input
encoded_images = feature_extractor(image_inputs[0], return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
1,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
# Test batched
encoded_images = feature_extractor(image_inputs, return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
self.feature_extract_tester.batch_size,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
def test_call_pytorch(self):
# Initialize feature_extractor
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
# create random PyTorch tensors
image_inputs = prepare_image_inputs(self.feature_extract_tester, equal_resolution=False, torchify=True)
for image in image_inputs:
self.assertIsInstance(image, torch.Tensor)
# Test not batched input
encoded_images = feature_extractor(image_inputs[0], return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
1,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
# Test batched
encoded_images = feature_extractor(image_inputs, return_tensors="pt").pixel_values
self.assertEqual(
encoded_images.shape,
(
self.feature_extract_tester.batch_size,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
def test_call_segmentation_maps(self):
# Initialize feature_extractor
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
# create random PyTorch tensors
image_inputs = prepare_image_inputs(self.feature_extract_tester, equal_resolution=False, torchify=True)
maps = []
for image in image_inputs:
self.assertIsInstance(image, torch.Tensor)
maps.append(torch.zeros(image.shape[-2:]).long())
# Test not batched input
encoding = feature_extractor(image_inputs[0], maps[0], return_tensors="pt")
self.assertEqual(
encoding["pixel_values"].shape,
(
1,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(
encoding["labels"].shape,
(
1,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(encoding["labels"].dtype, torch.long)
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 255)
# Test batched
encoding = feature_extractor(image_inputs, maps, return_tensors="pt")
self.assertEqual(
encoding["pixel_values"].shape,
(
self.feature_extract_tester.batch_size,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(
encoding["labels"].shape,
(
self.feature_extract_tester.batch_size,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(encoding["labels"].dtype, torch.long)
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 255)
# Test not batched input (PIL images)
image, segmentation_map = prepare_semantic_single_inputs()
encoding = feature_extractor(image, segmentation_map, return_tensors="pt")
self.assertEqual(
encoding["pixel_values"].shape,
(
1,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(
encoding["labels"].shape,
(
1,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(encoding["labels"].dtype, torch.long)
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 255)
# Test batched input (PIL images)
images, segmentation_maps = prepare_semantic_batch_inputs()
encoding = feature_extractor(images, segmentation_maps, return_tensors="pt")
self.assertEqual(
encoding["pixel_values"].shape,
(
2,
self.feature_extract_tester.num_channels,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(
encoding["labels"].shape,
(
2,
self.feature_extract_tester.crop_size,
self.feature_extract_tester.crop_size,
),
)
self.assertEqual(encoding["labels"].dtype, torch.long)
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 255)
def test_reduce_labels(self):
# Initialize feature_extractor
feature_extractor = self.feature_extraction_class(**self.feat_extract_dict)
# ADE20k has 150 classes, and the background is included, so labels should be between 0 and 150
image, map = prepare_semantic_single_inputs()
encoding = feature_extractor(image, map, return_tensors="pt")
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 150)
feature_extractor.reduce_labels = True
encoding = feature_extractor(image, map, return_tensors="pt")
self.assertTrue(encoding["labels"].min().item() >= 0)
self.assertTrue(encoding["labels"].max().item() <= 255)
| [
"[email protected]"
]
| |
5dabeb4a2c1b694a6e37ad5f0562da8805237de6 | adaf5d5cd4c46db0387f6dfd7de34d38cf3b06d0 | /Commands/Mtg.py | 2b91b0e30d456e99519e5a1ddd864ef11a563fa3 | [
"MIT"
]
| permissive | CrushAndRun/PyMoronBot-LugNut | b0f23437a18fb27ee22313469ad2a396ddaa8f13 | d695d8f36b23fc584b3c7d795c12a9e4577c806b | refs/heads/master | 2020-02-26T13:42:41.197328 | 2017-12-07T15:36:58 | 2017-12-07T15:36:58 | 67,321,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,358 | py | # -*- coding: utf-8 -*-
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
from Utils import WebUtils
import re
from bs4 import BeautifulSoup
class Mtg(CommandInterface):
triggers = ['mtg', 'mtgf']
help = 'mtg(f) <card name> - fetches details of the Magic: The Gathering card you specify ' \
'from gatherer.wizards.com. mtgf includes the flavour text, if it has any'
def execute(self, message):
"""
@type message: IRCMessage
"""
searchTerm = 'http://gatherer.wizards.com/pages/search/default.aspx?name='
for param in message.ParameterList:
searchTerm += '+[%s]' % param
webPage = WebUtils.fetchURL(searchTerm)
soup = BeautifulSoup(webPage.body)
name = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_nameRow'})
if name is None:
searchResults = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_searchResultsContainer'})
if searchResults is None:
return IRCResponse(ResponseType.Say, 'No cards found: ' + searchTerm, message.ReplyTo)
else:
cardItems = searchResults.find_all(class_='cardItem')
# potentially return first item here
return IRCResponse(ResponseType.Say, '{0} cards found: {1}'.format(len(cardItems), searchTerm), message.ReplyTo)
name = name.find('div', 'value').text.strip()
types = u' | T: ' + soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_typeRow'}).find('div', 'value').text.strip()
rarity = u' | R: ' + soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_rarityRow'}).find('div', 'value').text.strip()
manaCost = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_manaRow'})
if manaCost is not None:
manaCost = unicode(manaCost.find('div', 'value'))
manaCost = u' | MC: ' + self.translateSymbols(manaCost)
manaCost = re.sub('<[^>]+?>', '', manaCost)
manaCost = manaCost.replace('\n', '')
else:
manaCost = u''
convCost = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_cmcRow'})
if convCost is not None:
convCost = u' | CMC: ' + convCost.find('div', 'value').text.strip()
else:
convCost = u''
cardText = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_textRow'})
if cardText is not None:
cardTexts = cardText.find_all('div', 'cardtextbox')
texts = []
for text in cardTexts:
text = self.translateSymbols(text)
text = re.sub('<[^>]+?>', '', text)
texts.append(text)
cardText = u' | CT: ' + u' > '.join(texts)
else:
cardText = u''
flavText = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_FlavorText'})
if message.Command.endswith('f') and flavText is not None:
flavTexts = flavText.find_all('div', 'cardtextbox')
texts = []
for text in flavTexts:
texts.append(unicode(text.text))
flavText = u' | FT: ' + ' > '.join(texts)
else:
flavText = u''
powTough = soup.find('div', {'id': 'ctl00_ctl00_ctl00_MainContent_SubContent_SubContent_ptRow'})
if powTough is not None:
powTough = u' | P/T: ' + powTough.find('div', 'value').text.strip().replace(' ', '')
else:
powTough = u''
reply = name + manaCost + convCost + types + cardText + flavText + powTough + rarity
return IRCResponse(ResponseType.Say, reply, message.ReplyTo)
@classmethod
def translateSymbols(cls, text):
text = unicode(text)
text = re.sub(r'<img.+?name=(tap).+?>', r'Tap', text) # tap
text = re.sub(r'<img.+?name=([0-9]{2,}).+?>', r'\1', text) # long numbers
text = re.sub(r'<img.+?name=([^&"])([^&"]).+?>', r'{\1/\2}', text) # hybrids
text = re.sub(r'<img.+?name=([^&"]+).+?>', r'\1', text) # singles and any 'others' left over
return text
| [
"[email protected]"
]
| |
afb5f5bed68a1de853f9b06fe514e90acd63388b | 01fa2aca31eb73a559d192fd29e44350f26a13a9 | /HAX/18.CocoJoe/script.module.lambdascrapers/lib/lambdascrapers/modules/cfscrape.py | 3102264f6a7437b75f3d4beb80b8c92f8db299b0 | [
"Beerware"
]
| permissive | RandomIntermition/k4y108837s | b4beedeff375645bd4fa9ad348631a9a9f3640b6 | e9115aad49795dfe30a96c278cedaf089abcc11d | refs/heads/master | 2022-05-01T18:45:57.298903 | 2022-03-30T03:41:08 | 2022-03-30T03:41:08 | 109,356,425 | 1 | 0 | null | 2019-11-08T02:20:47 | 2017-11-03T05:36:48 | Python | UTF-8 | Python | false | false | 15,801 | py |
import ast
import logging
import operator as op
import os
import random
import re
import ssl
from collections import OrderedDict
from copy import deepcopy
from time import sleep
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from requests.packages.urllib3.util.ssl_ import create_urllib3_context
from requests.sessions import Session
try:
from urlparse import urlparse
from urlparse import urlunparse
except ImportError:
from urllib.parse import urlparse
from urllib.parse import urlunparse
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# supported operators
operators = {ast.Add: op.add, ast.Sub: op.sub, ast.Mult: op.mul, ast.Div: op.truediv, ast.Pow: op.pow, ast.BitXor: op.xor, ast.USub: op.neg}
def eval_expr(expr):
return eval_(ast.parse(expr, mode='eval').body)
def eval_(node):
if isinstance(node, ast.Num): # <number>
return node.n
elif isinstance(node, ast.BinOp): # <left> <operator> <right>
return operators[type(node.op)](eval_(node.left), eval_(node.right))
elif isinstance(node, ast.UnaryOp): # <operator> <operand> e.g., -1
return operators[type(node.op)](eval_(node.operand))
else:
raise TypeError(node)
DEFAULT_USER_AGENTS = [
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 UBrowser/6.1.2909.1022 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/65.0.3325.181 Chrome/65.0.3325.181 Safari/537.36",
"Mozilla/5.0 (Linux; Android 7.0; Moto G (5) Build/NPPS25.137-93-8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.137 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:59.0) Gecko/20100101 Firefox/59.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0"
]
DEFAULT_USER_AGENT = random.choice(DEFAULT_USER_AGENTS)
BUG_REPORT = (
"Cloudflare may have changed their technique, or there may be a bug in the script.\n\nPlease read https://github.com/Anorov/cloudflare-scrape#updates, then file a bug report at https://github.com/Anorov/cloudflare-scrape/issues.")
class CipherSuiteAdapter(HTTPAdapter):
def __init__(self, cipher_suite=None, **kwargs):
self.cipher_suite = cipher_suite
super(CipherSuiteAdapter, self).__init__(**kwargs)
if hasattr(ssl, 'PROTOCOL_TLS'):
self.ssl_context = create_urllib3_context(ssl_version=getattr(ssl, 'PROTOCOL_TLSv1_3', ssl.PROTOCOL_TLSv1_2), ciphers=self.cipher_suite)
else:
self.ssl_context = create_urllib3_context(ssl_version=ssl.PROTOCOL_TLSv1)
def init_poolmanager(self, *args, **kwargs):
kwargs['ssl_context'] = create_urllib3_context(ciphers=self.cipher_suite)
return super(CipherSuiteAdapter, self).init_poolmanager(*args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs['ssl_context'] = create_urllib3_context(ciphers=self.cipher_suite)
return super(CipherSuiteAdapter, self).proxy_manager_for(*args, **kwargs)
class CloudflareScraper(Session):
def __init__(self, *args, **kwargs):
super(CloudflareScraper, self).__init__(*args, **kwargs)
self.headers = (
OrderedDict(
[
('User-Agent', self.headers['User-Agent']),
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
('Accept-Language', 'en-US,en;q=0.5'),
('Accept-Encoding', 'gzip, deflate'),
('Connection', 'close'),
('Upgrade-Insecure-Requests', '1')
]
)
)
self.tries = 0
self.prev_resp = None
self.cipher_suite = None
if "requests" in self.headers["User-Agent"]:
# Spoof Firefox on Linux if no custom User-Agent has been set
self.headers["User-Agent"] = DEFAULT_USER_AGENT
def is_cloudflare_on(self, response, allow_empty_body=False):
is_cloudflare_response = (response.status_code in [403, 429, 503] and response.headers.get("Server", "").startswith("cloudflare"))
return (is_cloudflare_response and (allow_empty_body or (b"jschl_vc" in response.content and b"jschl_answer" in response.content)))
def load_cipher_suite(self):
if self.cipher_suite:
return self.cipher_suite
self.cipher_suite = ''
if hasattr(ssl, 'PROTOCOL_TLS'):
ciphers = [
'TLS13-AES-128-GCM-SHA256',
'TLS13-AES-256-GCM-SHA384',
'TLS13-CHACHA20-POLY1305-SHA256',
'ECDHE-ECDSA-AES128-GCM-SHA256',
'ECDHE-ECDSA-AES256-GCM-SHA384',
'ECDHE-RSA-AES256-GCM-SHA384',
'ECDHE-ECDSA-CHACHA20-POLY1305',
'ECDHE-RSA-CHACHA20-POLY1305-OLD',
'ECDHE-ECDSA-AES256-SHA',
'ECDHE-ECDSA-AES128-SHA',
'ECDHE-RSA-AES128-SHA',
'ECDHE-RSA-AES256-SHA',
'DHE-RSA-AES128-SHA',
'DHE-RSA-AES256-SHA',
'AES128-GCM-SHA256',
'AES256-GCM-SHA384',
'AES128-SHA',
'DES-CBC3-SHA'
]
if hasattr(ssl, 'PROTOCOL_TLSv1_3'):
ciphers.insert(0, ['GREASE_3A',
'GREASE_6A',
'AES128-GCM-SHA256',
'AES256-GCM-SHA256',
'AES256-GCM-SHA384',
'CHACHA20-POLY1305-SHA256'
]
)
ctx = ssl.SSLContext(getattr(ssl, 'PROTOCOL_TLSv1_3', ssl.PROTOCOL_TLSv1_2))
#ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
for cipher in ciphers:
try:
ctx.set_ciphers(cipher)
self.cipher_suite = '{}:{}'.format(self.cipher_suite, cipher).rstrip(':')
except ssl.SSLError:
pass
return self.cipher_suite
def request(self, method, url, *args, **kwargs):
instance = super(CloudflareScraper, self)
instance.mount('https://', CipherSuiteAdapter(self.load_cipher_suite()))
resp = instance.request(method, url, *args, **kwargs)
if b'why_captcha' in resp.content or b'/cdn-cgi/l/chk_captcha' in resp.content:
exception_message = 'Cloudflare returned captcha!'
if self.prev_resp is not None and os.getenv('CI') == 'true':
exception_message += '\n' + self.prev_resp.text
raise Exception(exception_message)
self.prev_resp = resp
# Check if Cloudflare anti-bot is on
if self.is_cloudflare_on(resp):
if self.tries >= 3:
exception_message = 'Failed to solve Cloudflare challenge!'
if os.getenv('CI') == 'true':
exception_message += '\n' + resp.text
raise Exception(exception_message)
return self.solve_cf_challenge(resp, **kwargs)
# Otherwise, no Cloudflare anti-bot detected
return resp
def solve_cf_challenge(self, resp, **original_kwargs):
self.tries += 1
timeout = int(re.compile("\}, ([\d]+)\);", re.MULTILINE).findall(resp.text)[0]) / 1000
sleep(timeout)
body = resp.text
parsed_url = urlparse(resp.url)
domain = parsed_url.netloc
submit_url = '{}://{}/cdn-cgi/l/chk_jschl'.format(parsed_url.scheme, domain)
cloudflare_kwargs = deepcopy(original_kwargs)
headers = cloudflare_kwargs.setdefault('headers', {'Referer': resp.url})
try:
params = cloudflare_kwargs.setdefault(
'params', OrderedDict(
[
('s', re.search(r'name="s"\svalue="(?P<s_value>[^"]+)', body).group('s_value')),
('jschl_vc', re.search(r'name="jschl_vc" value="(\w+)"', body).group(1)),
('pass', re.search(r'name="pass" value="(.+?)"', body).group(1)),
]
)
)
answer = self.get_answer(body, domain)
except Exception as e:
logging.error("Unable to parse Cloudflare anti-bots page. %s" % e)
raise
try:
params["jschl_answer"] = str(answer)
except:
pass
# Requests transforms any request into a GET after a redirect,
# so the redirect has to be handled manually here to allow for
# performing other types of requests even as the first request.
method = resp.request.method
cloudflare_kwargs['allow_redirects'] = False
redirect = self.request(method, submit_url, **cloudflare_kwargs)
redirect_location = urlparse(redirect.headers['Location'])
if not redirect_location.netloc:
redirect_url = urlunparse(
(
parsed_url.scheme,
domain,
redirect_location.path,
redirect_location.params,
redirect_location.query,
redirect_location.fragment
)
)
return self.request(method, redirect_url, **original_kwargs)
return self.request(method, redirect.headers['Location'], **original_kwargs)
def get_answer(self, body, domain):
init = re.findall('setTimeout\(function\(\){\s*var.*?.*:(.*?)}', body)[-1]
builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", body)[0]
try:
challenge_element = re.findall(r'id="cf.*?>(.*?)</', body)[0]
except:
challenge_element = None
if '/' in init:
init = init.split('/')
decryptVal = self.parseJSString(init[0]) / float(self.parseJSString(init[1]))
else:
decryptVal = self.parseJSString(init)
lines = builder.split(';')
char_code_at_sep = '"("+p+")")}'
for line in lines:
if len(line) > 0 and '=' in line:
sections = line.split('=')
if len(sections) < 3:
if '/' in sections[1]:
subsecs = sections[1].split('/')
val_1 = self.parseJSString(subsecs[0])
if char_code_at_sep in subsecs[1]:
subsubsecs = re.findall(r"^(.*?)(.)\(function", subsecs[1])[0]
operand_1 = self.parseJSString(subsubsecs[0] + ')')
operand_2 = ord(domain[self.parseJSString(subsecs[1][subsecs[1].find(char_code_at_sep) + len(char_code_at_sep):-2])])
val_2 = '%.16f%s%.16f' % (float(operand_1), subsubsecs[1], float(operand_2))
val_2 = eval_expr(val_2)
else:
val_2 = self.parseJSString(subsecs[1])
line_val = val_1 / float(val_2)
elif len(sections) > 2 and 'atob' in sections[2]:
expr = re.findall((r"id=\"%s.*?>(.*?)</" % re.findall(r"k = '(.*?)'", body)[0]), body)[0]
if '/' in expr:
expr_parts = expr.split('/')
val_1 = self.parseJSString(expr_parts[0])
val_2 = self.parseJSString(expr_parts[1])
line_val = val_1 / float(val_2)
else:
line_val = self.parseJSString(expr)
else:
if 'function' in sections[1]:
continue
line_val = self.parseJSString(sections[1])
elif 'Element' in sections[2]:
subsecs = challenge_element.split('/')
val_1 = self.parseJSString(subsecs[0])
if char_code_at_sep in subsecs[1]:
subsubsecs = re.findall(r"^(.*?)(.)\(function", subsecs[1])[0]
operand_1 = self.parseJSString(subsubsecs[0] + ')')
operand_2 = ord(domain[self.parseJSString(subsecs[1][subsecs[1].find(char_code_at_sep) + len(char_code_at_sep):-2])])
val_2 = '%.16f%s%.16f' % (float(operand_1), subsubsecs[1], float(operand_2))
val_2 = eval_expr(val_2)
else:
val_2 = self.parseJSString(subsecs[1])
line_val = val_1 / float(val_2)
decryptVal = '%.16f%s%.16f' % (float(decryptVal), sections[0][-1], float(line_val))
decryptVal = eval_expr(decryptVal)
if '+ t.length' in body:
decryptVal += len(domain)
return float('%.10f' % decryptVal)
def parseJSString(self, s):
offset = 1 if s[0] == '+' else 0
val = s.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')[offset:]
val = val.replace('(+0', '(0').replace('(+1', '(1')
val = re.findall(r'\((?:\d|\+|\-)*\)', val)
val = ''.join([str(eval_expr(i)) for i in val])
return int(val)
@classmethod
def create_scraper(cls, sess=None, **kwargs):
"""
Convenience function for creating a ready-to-go requests.Session (subclass) object.
"""
scraper = cls()
if sess:
attrs = ["auth", "cert", "cookies", "headers", "hooks", "params", "proxies", "data"]
for attr in attrs:
val = getattr(sess, attr, None)
if val:
setattr(scraper, attr, val)
return scraper
## Functions for integrating cloudflare-scrape with other applications and scripts
@classmethod
def get_tokens(cls, url, user_agent=None, **kwargs):
scraper = cls.create_scraper()
if user_agent:
scraper.headers["User-Agent"] = user_agent
try:
resp = scraper.get(url, **kwargs)
resp.raise_for_status()
except Exception as e:
logging.error("'%s' returned an error. Could not collect tokens." % url)
raise
domain = urlparse(resp.url).netloc
cookie_domain = None
for d in scraper.cookies.list_domains():
if d.startswith(".") and d in ("." + domain):
cookie_domain = d
break
else:
raise ValueError("Unable to find Cloudflare cookies. Does the site actually have Cloudflare IUAM (\"I'm Under Attack Mode\") enabled?")
return ({"__cfduid": scraper.cookies.get("__cfduid", "", domain=cookie_domain), "cf_clearance": scraper.cookies.get("cf_clearance", "", domain=cookie_domain)}, scraper.headers["User-Agent"])
@classmethod
def get_cookie_string(cls, url, user_agent=None, **kwargs):
"""
Convenience function for building a Cookie HTTP header value.
"""
tokens, user_agent = cls.get_tokens(url, user_agent=user_agent, **kwargs)
return "; ".join("=".join(pair) for pair in tokens.items()), user_agent
create_scraper = CloudflareScraper.create_scraper
get_tokens = CloudflareScraper.get_tokens
get_cookie_string = CloudflareScraper.get_cookie_string
| [
"[email protected]"
]
| |
e8dd5fdaccd0096013f6662954213832a0879e9a | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Link/LinkChecker-9.3/linkcheck/checker/__init__.py | 62202815b17c0292ffab6e86859598e16e02a15a | []
| no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:ed0ddaf86ed59f018b4cf89cd2abc32f6e966da1ceeb748d5678f763bef305b1
size 6120
| [
"[email protected]"
]
| |
199e2fbaeb8a63f651532d83d07851ad76bdda71 | 3c92c92f588ba1156c3579683d3d8b7d12aef652 | /test.py | caf3bab175467be3b3c5100db5eaf6157fac8118 | []
| no_license | JoseAVallejo12/sempliMl | 2dace46732c67f9a2b1a035db449e1ee7170a77c | 3117fcd5a473f0fe7756d58f8707cb447193d7fc | refs/heads/master | 2022-12-26T19:23:30.429927 | 2020-10-13T19:45:17 | 2020-10-13T19:45:17 | 303,531,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | #!env/bin/python3
import requests
# Response a newUser random using the request.get and parsing to object with json() method
params = requests.get('https://lv2394qpu0.execute-api.us-east-1.amazonaws.com/dev/user/random').json()
# Define the headers for sent an request to API clostering customer
headers = {'Content-Type': 'application/json'}
# Response a clostering user using the request.get and parsing to object with json() method
res = requests.post("https://lv2394qpu0.execute-api.us-east-1.amazonaws.com/dev/user/cluster", params=params, headers=headers).json()
# Print inf of user
print(f'user data sent: {params}')
# Print cluster user
print(f"clustering user: {res}") | [
"[email protected]"
]
| |
0a3898ac1c39e49d42d319ba852271926310d148 | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/pybites/PyBites-master/NAMES.PY | 31a0aabe5eab8555bb9e8f89529c771d14e56474 | [
"MIT"
]
| permissive | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 858 | py | NAMES = ['arnold schwarzenegger', 'alec baldwin', 'bob belderbos',
'julian sequeira', 'sandra bullock', 'keanu reeves',
'julbob pybites', 'bob belderbos', 'julian sequeira',
'al pacino', 'brad pitt', 'matt damon', 'brad pitt']
def dedup_and_title_case_names(names):
"""Should return a list of names, each name appears only once"""
return list({name.title() for name in names})
def sort_by_surname_desc(names):
"""Returns names list sorted desc by surname"""
names = dedup_and_title_case_names(names)
return sorted(names,
key=lambda x: x.split()[-1],
reverse=True)
def shortest_first_name(names):
"""Returns the shortest first name (str)"""
names = dedup_and_title_case_names(names)
names = [name.split()[0] for name in names]
return min(names, key=len)
| [
"[email protected]"
]
| |
52475cb0f587a30f869a90240fe288a74769a8a0 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_1_1_neat/16_1_1_shaun_lee_lastword.py | 162809f1c2e6062991a66a8085d652bce0400c30 | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 648 | py | from __future__ import print_function
import sys
def read_input(in_file):
T = int(in_file.readline().strip())
result = [line.strip() for line in in_file]
return result
def check_case(S):
result = ""
for c in S:
if c + result > result + c:
result = c + result
else:
result += c
return result
def main():
input_filename = sys.argv[1]
with open(input_filename) as input_file:
case_no = 0
for case in read_input(input_file):
case_no += 1
print("Case #" + str(case_no) + ": " + check_case(case))
if __name__ == '__main__':
main()
| [
"[[email protected]]"
]
| |
e7dfab6a95b879c74086ca60e363d1ccef110c97 | 96a34a048c783a75736bf0ec775df22142f9ee53 | /services/web/server/src/simcore_service_webserver/scicrunch/__init__.py | 338202607956135f6b5d4b7eb2066805397f8190 | [
"MIT"
]
| permissive | ITISFoundation/osparc-simcore | 77e5b9f7eb549c907f6ba2abb14862154cc7bb66 | f4c57ffc7b494ac06a2692cb5539d3acfd3d1d63 | refs/heads/master | 2023-08-31T17:39:48.466163 | 2023-08-31T15:03:56 | 2023-08-31T15:03:56 | 118,596,920 | 39 | 29 | MIT | 2023-09-14T20:23:09 | 2018-01-23T10:48:05 | Python | UTF-8 | Python | false | false | 478 | py | """
Submodule to interact with K-Core's https://scicrunch.org service
- client to validate and get info about RRIDs via scicrunch's API (service_client)
- keeps validated RRIDs in pg-database (scicrunch.db)
- define models for all interfaces: scicrunch API, postgres DB and webserver API (scicrunch_models)
NOTE: should have no dependencies with other modules in this service
Initial design: https://github.com/ITISFoundation/osparc-simcore/pull/2045
"""
| [
"[email protected]"
]
| |
53dc5dbad44d62299a2771b4f46026d73806497f | 3c750d4d60660fdf6ef84d7b7ab9663fb76d0fa1 | /sopht/numeric/eulerian_grid_ops/poisson_solver_3d/scipy_fft_3d.py | 4e90e7708b6623f2626783e66e3daaf05b5eb5bd | [
"MIT"
]
| permissive | SophT-Team/SophT | 25d157a17734600e9aa4f522b4574bfefe202bc7 | 99a094e0d6e635e5b2385a69bdee239a4d1fb530 | refs/heads/main | 2023-08-31T21:14:10.304592 | 2023-08-31T17:00:38 | 2023-08-31T17:00:38 | 498,451,510 | 2 | 2 | MIT | 2023-09-12T15:37:31 | 2022-05-31T18:25:12 | Python | UTF-8 | Python | false | false | 457 | py | """Create reference FFT operations via scipy in 3D."""
import numpy as np
from scipy.fft import irfftn, rfftn
def fft_ifft_via_scipy_kernel_3d(
fourier_field: np.ndarray,
inv_fourier_field: np.ndarray,
field: np.ndarray,
num_threads: int = 1,
) -> None:
"""Perform reference FFT operations via scipy."""
fourier_field[...] = rfftn(field, workers=num_threads)
inv_fourier_field[...] = irfftn(fourier_field, workers=num_threads)
| [
"[email protected]"
]
| |
d5c82675fd32505beabe8291bcae1e2d6bd02ffa | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r9/Gen/DecFiles/options/46000030.py | 67f8dc02c09f3261e98f8ceb9313e2ba312969dc | []
| no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,108 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r9/Gen/DecFiles/options/46000030.py generated: Fri, 27 Mar 2015 16:10:12
#
# Event Type: 46000030
#
# ASCII decay Descriptor: pp -> (X -> ~chi_10 -> (l q q, l l l) + jet ... )
#
from Configurables import Generation
Generation().EventType = 46000030
Generation().SampleGenerationTool = "Special"
from Configurables import Special
Generation().addTool( Special )
Generation().Special.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/BRpVNeutralino_m0600_m12400.dec"
Generation().Special.CutTool = "PythiaLSP"
from Configurables import LHCb__ParticlePropertySvc
LHCb__ParticlePropertySvc().OtherFiles = ["$DECFILESROOT/ppfiles/mSUGRA_m0600_m12400.tbl"]
from Gaudi.Configuration import *
importOptions( "$DECFILESROOT/options/SusyBRpV.py" )
from Configurables import PythiaProduction
Generation().Special.addTool( PythiaProduction )
Generation().Special.PythiaProduction.SLHASpectrumFile = "mSUGRA_m0600_m12400.LHspc"
| [
"[email protected]"
]
| |
a3b2f4ad14a1175927087259c39561072d324996 | eef243e450cea7e91bac2f71f0bfd45a00c6f12c | /.history/app/api_service/nlp_processing_20210124215235.py | 849149836d612782e4ed41b1146fa410bb3532f6 | []
| no_license | hoaf13/nlp-chatbot-lol | 910ab2ea3b62d5219901050271fc1a1340e46a2f | 18cb64efa9d6b4cafe1015f1cd94f4409271ef56 | refs/heads/master | 2023-05-08T04:17:19.450718 | 2021-02-02T02:37:38 | 2021-02-02T02:37:38 | 332,535,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,301 | py | import csv
import json
import numpy as np
import sklearn
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
import numpy
from keras.datasets import imdb
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.layers import LSTM
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from keras.layers import LSTM, GRU,Bidirectional, Flatten, Dense
from keras_self_attention import SeqSelfAttention
import csv, re
import json
import numpy as np
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from keras.utils import np_utils
from sklearn.model_selection import train_test_split
from keras import optimizers
import numpy as np
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras_self_attention import SeqSelfAttention, SeqWeightedAttention
dict_intent={
'build_item':0,
'support_socket':1,
'counter':2,
'be_countered':3,
'skill_up':4,
'how_to_play':5,
'combo':6,
'combine_with':7,
'how_to_use_skill':8,
'introduce':9
}
dict_digit2intent = {}
key = 0
for i in dict_intent.keys():
dict_digit2intent[key] = i
key += 1
f = open('./my_upload/champions.txt', "r")
reg = ""
for cham in f:
# print (cham.split ('\n')[0])
reg += cham.split ('\n')[0] + '|'
print (reg)
reg = reg[:-1]
print("REG: {}".format(reg))
f.close()
skills = ['q', 'w', 'e' , 'r']
def get_entity(content):
# content = content.lower()
hero = re.search(reg, content)
if hero != None:
hero = hero.group()
else: hero = ""
spl = content.split(" ")
skill = ""
for i in spl:
if i in skills:
skill = i
break
return hero, skill
def load_model():
model = Sequential()
model.add(Embedding(208, 5248, input_length=17))
model.add(Bidirectional(LSTM(128, return_sequences=True)))
# model.add(LSTM(128, return_sequences = True))
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
model.compile(loss= 'categorical_crossentropy',optimizer='adam', metrics=['accuracy'])
model.load_weights('./my_upload/hoaf13-nlp.h5')
model.summary()
return model
def process_content(reg, content):
# content = content.lower()
x = re.search(reg, content)
if x != None:
content = content.replace(x.group(), "{hero}")
return content
def process_data(model, content):
f = open('./my_upload/bow.txt', 'r')
dictionary = ''
for word in f:
dictionary += word + " "
f.close()
data = [dictionary]
token_obj = Tokenizer()
token_obj.fit_on_texts(data)
max_len = 17
X_train_token = token_obj.texts_to_sequences([content])
X_pad = pad_sequences(X_train_token, maxlen=max_len, padding='post')
result = model.predict(X_pad)
intent = np.argmax(result)
hero, skill = get_entity(content)
return dict_digit2intent[intent], result[0][intent], hero, skill
model_nlp = load_model()
m | [
"[email protected]"
]
| |
8fe94a63a6e963f1ad1e1f239fe6261d16869520 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /vBwRuR4mF5yQ4cNuc_17.py | cf1ee398857363c1811e1931002d97f5cddeae37 | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py |
def count_missing_nums(lst):
out = [int(i) for i in lst if i.isdigit()]
return sum(i not in out for i in range(min(out), max(out)+1))
| [
"[email protected]"
]
| |
092e0a9e39100e338aa1e0c4db77d873972d49ee | 306045a1cd0fb362f46d4db88311f442311bbc16 | /examples/idioms/programs/158.2163-random-sublist.py | 3f736d4ad21fd5030145e3bb3f6f9a976ee7b66f | [
"MIT"
]
| permissive | laowantong/paroxython | 608c9010a2b57c8f7ed5ea309e24035c2b2e44a3 | a6d45829dd34f046d20e5bae780fbf7af59429cb | refs/heads/master | 2023-09-01T05:18:29.687916 | 2022-11-07T17:40:31 | 2022-11-07T17:40:31 | 220,820,424 | 36 | 5 | MIT | 2023-09-08T04:44:58 | 2019-11-10T16:54:56 | Python | UTF-8 | Python | false | false | 566 | py | """Random sublist.
Create a new list _y from randomly picking exactly _k elements from list _x.
It is assumed that _x has at least _k elements.
Each element must have same probability to be picked.
Each element from _x must be picked _at _most _once.
Explain if the original ordering is preserved or not.
Source: programming-idioms.org
"""
# Implementation author: Oldboy
# Created on 2017-10-28T13:10:20.094421Z
# Last modified on 2018-06-24T13:08:32.325774Z
# Version 2
# The original ordering is not preserved.
import random
y = random.sample(x, k)
| [
"[email protected]"
]
| |
8898fd8584b627d6221f4ec6682599576dd3016c | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/429/usersdata/308/99324/submittedfiles/jogoDaVelha_BIB.py | 3cd55426284682db76a7c18bcbf132edaafce8bb | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,553 | py | # -*- coding: utf-8 -*-
from random import randint
import copy
def verificaVencedor(tabela): #Verifica se há vencedor || Parâmetro: Recebe a tabela para verificar || Retorno: Retorna o simbolo do vencedor
for a in range(0, 3):
if tabela[0][a]==tabela[1][a]==tabela[2][a]:
print('if 1')
return tabela[1][a]
if tabela[a][0]==tabela[a][1]==tabela[a][2]:
print('if 2')
return tabela[a][0]
if (tabela[0][0]==tabela[1][1]==tabela[2][2]) or (tabela[2][0]==tabela[1][1]==tabela[0][2]):
print('if 3')
return tabela[1][1]
return ' '
def verificaIgualdade(valora, valorb ): #Verifica se sao iguais e retorna falso ou verdadeiro
if valora==valorb:
return True
else:
return False
def solicitaNomeDoJogador(): #Solicita nome || Retorno: Nome do usuário
nome=str(input('Qual o seu nome (ou apelido)? '))
return nome
def solicitaSimboloDoHumano(): #Solicita simbolo || Retorno: Array = [Simbolo do humano, simbolo do computador]
simbolo=str.upper(input('Qual simbolo você deseja utilizar no jogo? '))
while simbolo not in ('X','O'):
simbolo=str.upper(input('Qual simbolo você deseja utilizar no jogo? '))
if simbolo== 'X':
scomputador='O'
else:
scomputador='X'
return [simbolo, scomputador]
def sorteioPrimeiraJogada(nomes): #Sorteio || Parâmetro: Nome dos jogadores e tabela || Retorno: Retorna o resultado do sorteio (0 ou 1)
resultado = randint(0, 1)
print('Vencedor do sorteio para inicio do jogo: %s' % nomes[resultado][0])
return resultado
def validaJogada(jogada, visual): #Valida jogada || Paraâmetro: Nome dos jogadores e tabela || Retorno: Verdadeiro ou falso
try:
return not verificaIgualdade(visual[int(jogada[0])][int(jogada[2])],' ')
except:
return True
def jogadaHumana(nomes, tabela): #Jogada humana || Parâmetro: Nome dos jogadores e tabela || Retorno: Tabela modificada
jogada = input('Qual sua jogada, %s: ' % nomes[0][0])
while validaJogada(jogada, tabela):
print('OPS!!! Essa jogada não está disponível. Tente novamente!')
jogada = input('Qual sua jogada, %s: ' % nomes[0][0])
tabela[int(jogada[0])][int(jogada[2])] = nomes[0][1]
return tabela
def mostraTabuleiro(visual): #Mostrar tabuleiro || Parâmetro: tabela para ser mostrada
for i in range (0, 3):
print(str(visual[i][0]) + ' | '+ str(visual[i][1]) + ' | '+ str(visual[i][2]))
def jogarNovamente(): #Pergunta se deseja jogar novamente || Retorno: Verdadeiro ou falso
x = input('Deseja jogar novamente? (S ou N) ')
return verificaIgualdade(str.upper(x), 'N')
def jogadaComputador(nomes, tabela): #Jogada do computador || Parâmetro: Nome dos jogadores e tabela || Retorno: Tabela modificada
if tabela == [[' ',' ', ' '], [' ', ' ',' '], [' ', ' ', ' ']] or tabela == [[' ',' ', ' '], [' ',nomes[0][1],' '], [' ', ' ', ' ']]:
lista = ['0 0', '0 2', '2 0', '2 2']
jogada = lista[randint(0, 3)]
tabela[int(jogada[0])][int(jogada[2])] = nomes[1][1]
return tabela
for jogador in [nomes[1][1], nomes[0][1]]:
for i in range(0, 3):
for j in range(0, 3):
if not validaJogada(('%d %d' % (i, j)), tabela):
copia = copy.deepcopy(tabela)
copia[i][j] = jogador
if verificaVencedor(copia) in ['X', 'O']:
tabela[i][j] = nomes[1][1]
return tabela
for i in range(0, 3):
for j in range(0, 3):
if not validaJogada(('%d %d' % (i, j)), tabela):
copia = copy.deepcopy(tabela)
copia[i][j] = nomes[1][1]
for k in range(0, 3):
for l in range(0, 3):
copia2 = copy.deepcopy(copia)
if not validaJogada(('%d %d' % (k, l)), copia2):
copia2[k][l] = nomes[1][1]
if verificaVencedor(copia2) == nomes[1][1]:
tabela[i][j] = nomes[1][1]
return tabela
if not validaJogada('1 1', tabela):
tabela[1][1] = nomes[1][1]
return tabela
jogada = ('%d %d' % (randint(0, 2), randint(0, 2)))
while validaJogada(jogada, tabela):
jogada = ('%d %d' % (randint(0, 2), randint(0, 2)))
tabela[int(jogada[0])][int(jogada[2])] = nomes[1][1]
return tabela | [
"[email protected]"
]
| |
978517d22ef87134a9543194451cef9bb9403512 | ce66de2e9b099b8b53f52721e649bce3e9d42734 | /python/ray/ml/predictors/integrations/sklearn/sklearn_predictor.py | 91a117cb878acc3efff31d27cb8e260ec6335d6d | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
]
| permissive | pengzhenghao/ray | 0428d0b7d479f112c7649577534dfdfdfe7ea2e7 | e57ce7efd6ea1d0e4f6942fcf6f526287340e63d | refs/heads/master | 2023-03-10T00:41:17.916609 | 2022-04-18T10:20:12 | 2022-04-18T10:20:12 | 184,541,751 | 0 | 1 | Apache-2.0 | 2023-03-04T08:58:50 | 2019-05-02T07:52:15 | Python | UTF-8 | Python | false | false | 3,452 | py | from typing import Optional, List, Union
import pandas as pd
from ray.ml.checkpoint import Checkpoint
from ray.ml.predictor import Predictor, DataBatchType
from ray.ml.preprocessor import Preprocessor
from sklearn.base import BaseEstimator
class SklearnPredictor(Predictor):
"""A predictor for scikit-learn compatible estimators.
Args:
estimator: The fitted scikit-learn compatible estimator to use for
predictions.
preprocessor: A preprocessor used to transform data batches prior
to prediction.
"""
def __init__(
self, estimator: BaseEstimator, preprocessor: Optional[Preprocessor] = None
):
self.estimator = estimator
self.preprocessor = preprocessor
@classmethod
def from_checkpoint(cls, checkpoint: Checkpoint) -> "SklearnPredictor":
"""Instantiate the predictor from a Checkpoint.
The checkpoint is expected to be a result of ``SklearnTrainer``.
Args:
checkpoint (Checkpoint): The checkpoint to load the model and
preprocessor from. It is expected to be from the result of a
``SklearnTrainer`` run.
"""
raise NotImplementedError
def predict(
self,
data: DataBatchType,
feature_columns: Optional[Union[List[str], List[int]]] = None,
**predict_kwargs,
) -> pd.DataFrame:
"""Run inference on data batch.
Args:
data: A batch of input data. Either a pandas DataFrame or numpy
array.
feature_columns: The names or indices of the columns in the
data to use as features to predict on. If None, then use
all columns in ``data``.
**predict_kwargs: Keyword arguments passed to ``estimator.predict``.
Examples:
.. code-block:: python
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from ray.ml.predictors.sklearn import SklearnPredictor
train_X = np.array([[1, 2], [3, 4]])
train_y = np.array([0, 1])
model = RandomForestClassifier().fit(train_X, train_y)
predictor = SklearnPredictor(model=model)
data = np.array([[1, 2], [3, 4]])
predictions = predictor.predict(data)
# Only use first and second column as the feature
data = np.array([[1, 2, 8], [3, 4, 9]])
predictions = predictor.predict(data, feature_columns=[0, 1])
.. code-block:: python
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from ray.ml.predictors.sklearn import SklearnPredictor
train_X = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
train_y = pd.Series([0, 1])
model = RandomForestClassifier().fit(train_X, train_y)
predictor = SklearnPredictor(model=model)
# Pandas dataframe.
data = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
predictions = predictor.predict(data)
# Only use first and second column as the feature
data = pd.DataFrame([[1, 2, 8], [3, 4, 9]], columns=["A", "B", "C"])
predictions = predictor.predict(data, feature_columns=["A", "B"])
Returns:
pd.DataFrame: Prediction result.
"""
raise NotImplementedError
| [
"[email protected]"
]
| |
f143b9e6ab91c4c9fa6f94e1f36a988af36b2133 | a3d6556180e74af7b555f8d47d3fea55b94bcbda | /third_party/blink/web_tests/external/wpt/eventsource/resources/cors.py | 6ed31f2cd7d1782f8b7267d646d3ba26ab1a2a6d | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
]
| permissive | chromium/chromium | aaa9eda10115b50b0616d2f1aed5ef35d1d779d6 | a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c | refs/heads/main | 2023-08-24T00:35:12.585945 | 2023-08-23T22:01:11 | 2023-08-23T22:01:11 | 120,360,765 | 17,408 | 7,102 | BSD-3-Clause | 2023-09-10T23:44:27 | 2018-02-05T20:55:32 | null | UTF-8 | Python | false | false | 1,481 | py | import os
from wptserve import pipes
from wptserve.utils import isomorphic_decode
def run_other(request, response, path):
#This is a terrible hack
environ = {u"__file__": path}
exec(compile(open(path, u"r").read(), path, u'exec'), environ, environ)
rv = environ[u"main"](request, response)
return rv
def main(request, response):
origin = request.GET.first(b"origin", request.headers[b"origin"])
credentials = request.GET.first(b"credentials", b"true")
response.headers.update([(b"Access-Control-Allow-Origin", origin),
(b"Access-Control-Allow-Credentials", credentials)])
handler = request.GET.first(b'run')
if handler in [b"status-reconnect",
b"message",
b"redirect",
b"cache-control"]:
if handler == b"cache-control":
response.headers.set(b"Content-Type", b"text/event-stream")
rv = open(os.path.join(request.doc_root, u"eventsource", u"resources", u"cache-control.event_stream"), u"r").read()
response.content = rv
pipes.sub(request, response)
return
elif handler == b"redirect":
return run_other(request, response, os.path.join(request.doc_root, u"common", u"redirect.py"))
else:
return run_other(request, response, os.path.join(os.path.dirname(isomorphic_decode(__file__)), isomorphic_decode(handler) + u".py"))
else:
return
| [
"[email protected]"
]
| |
ee5635c50525121a7abafdf4f2497ca80d592b88 | ef243d91a1826b490e935fa3f3e6c29c3cc547d0 | /PyQt5/QtSensors/QDistanceReading.py | 93010e1e2e32d6db692726e8427d0fdb40d8b5f6 | []
| no_license | VentiFang/Python_local_module | 6b3d0b22399e817057dfd15d647a14bb1e41980e | c44f55379eca2818b29732c2815480ee755ae3fb | refs/heads/master | 2020-11-29T11:24:54.932967 | 2019-12-25T12:57:14 | 2019-12-25T12:57:14 | 230,101,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | # encoding: utf-8
# module PyQt5.QtSensors
# from F:\Python\Python36\lib\site-packages\PyQt5\QtSensors.pyd
# by generator 1.147
# no doc
# imports
import PyQt5.QtCore as __PyQt5_QtCore
import sip as __sip
from .QSensorReading import QSensorReading
class QDistanceReading(QSensorReading):
# no doc
def distance(self): # real signature unknown; restored from __doc__
""" distance(self) -> float """
return 0.0
def setDistance(self, p_float): # real signature unknown; restored from __doc__
""" setDistance(self, float) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
| [
"[email protected]"
]
| |
1ed5e23a6eec5d59476d6e9f794889ccaf9c1d50 | ba1e90ae6ea9f8f74d9b542e159825341c717712 | /2015/iitbhu3.py | 0b48444d1b36719c8ee5de5dc0558536774e0724 | []
| no_license | sailesh2/CompetitiveCode | b384687a7caa8980ab9b9c9deef2488b0bfe9cd9 | 5671dac08216f4ce75d5992e6af8208fa2324d12 | refs/heads/master | 2021-06-24T22:39:11.396049 | 2020-11-27T05:22:17 | 2020-11-27T05:22:17 | 161,877,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | x=raw_input().split(' ')
a=int(x[0])
b=int(x[1])
y=raw_input().split(' ')
n=int(y[0])
m=int(y[1])
savea=a
saveb=b
if a>b:
while 1:
if a%b==0:
gcd=b
break
temp=a
a=b
b=temp%b
else:
while 1:
if b%a==0:
gcd=a
break
temp=b
b=a
a=temp%a
#print gcd
lcm=(savea*saveb)/gcd
#print lcm
if lcm/savea<=n and lcm/saveb<=m:
print "Yes"
else:
print "No"
| [
"[email protected]"
]
| |
8eb6dd51ef164ee1452d1e90314b69b391ac91a8 | c0f4104194a7989e44d7f0161b2425c5a5bc3a98 | /senlin/tests/unit/apiv1/test_cluster_policies.py | 547c7f11cea39189106c81f3b497db2e0be8cd0a | []
| no_license | bopopescu/Openstack-2 | f65470bdd0ee4736c45b6f869f0453cb8eb446c8 | 6f06133562e3dfd490695a92c9ddf1a322675104 | refs/heads/master | 2022-11-28T09:19:21.633850 | 2016-06-23T07:55:32 | 2016-06-23T07:55:32 | 282,095,817 | 0 | 0 | null | 2020-07-24T01:44:49 | 2020-07-24T01:44:48 | null | UTF-8 | Python | false | false | 7,295 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from senlin.api.middleware import fault
from senlin.api.openstack.v1 import cluster_policies as cp_mod
from senlin.common import exception as senlin_exc
from senlin.common import policy
from senlin.rpc import client as rpc_client
from senlin.tests.unit.apiv1 import shared
from senlin.tests.unit.common import base
@mock.patch.object(policy, 'enforce')
class ClusterPolicyControllerTest(shared.ControllerTest, base.SenlinTestCase):
'''Tests the API class which acts as the WSGI controller.'''
def setUp(self):
super(ClusterPolicyControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8778
cfgopts = DummyConfig()
self.controller = cp_mod.ClusterPolicyController(options=cfgopts)
def test_cluster_policy_index(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
cid = 'test_cluster'
req = self._get('/cluster_policies/%s' % cid)
engine_resp = [
{
'id': 'fake_id',
'cluster_id': 'fake cluster id',
'policy_id': 'fake policy id',
'enabled': True,
'data': {},
'cluster_name': 'test_cluster',
'policy_name': 'test_policy',
'policy_type': 'ScalingPolicy',
}
]
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=engine_resp)
result = self.controller.index(req, cluster_id=cid)
default_args = {'sort': None, 'filters': None, 'identity': cid}
mock_call.assert_called_with(req.context,
('cluster_policy_list', default_args))
expected = {'cluster_policies': engine_resp}
self.assertEqual(expected, result)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_cluster_policy_index_whitelists_params(self, mock_call,
mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
cid = 'FAKE_CLUSTER'
params = {
'sort': 'fake sorting string',
'filters': None,
'balrog': 'you shall not pass!'
}
req = self._get('/cluster_policies/%s' % cid, params=params)
mock_call.return_value = []
self.controller.index(req, cluster_id=cid)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertEqual(3, len(engine_args))
self.assertIn('sort', engine_args)
self.assertIn('filters', engine_args)
self.assertNotIn('balrog', engine_args)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_cluster_policy_index_whitelist_filter_params(self, mock_call,
mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
cid = 'FAKE_CLUSTER'
params = {
'enabled': 'True',
'balrog': 'you shall not pass!'
}
req = self._get('/cluster_policies/%s' % cid, params=params)
mock_call.return_value = []
self.controller.index(req, cluster_id=cid)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertIn('filters', engine_args)
filters = engine_args['filters']
self.assertEqual(1, len(filters))
self.assertTrue(filters['enabled'])
self.assertNotIn('balrog', filters)
def test_cluster_policy_index_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', False)
cid = 'FAKE_CLUSTER'
req = self._get('/cluster_policy/%s' % cid)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, cluster_id=cid)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_cluster_policy_get_success(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'get', True)
cid = 'FAKE_CLUSTER'
pid = 'FAKE_POLICY'
req = self._get('/cluster_policies/%(cid)s/%(pid)s'
'' % {'cid': cid, 'pid': pid})
engine_resp = {
'id': 'fake_id',
'cluster_id': cid,
'policy_id': pid,
'enabled': True,
'data': {},
'cluster_name': 'test_cluster',
'policy_name': 'test_policy',
'policy_type': 'ScalingPolicy',
}
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=engine_resp)
response = self.controller.get(req, cluster_id=cid, policy_id=pid)
mock_call.assert_called_once_with(
req.context, ('cluster_policy_get',
{'identity': cid, 'policy_id': pid}))
self.assertEqual({'cluster_policy': engine_resp}, response)
def test_cluster_policy_get_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'get', True)
cid = 'FAKE_CLUSTER'
pid = 'FAKE_POLICY'
req = self._get('/cluster_policies/%(cid)s/%(pid)s'
'' % {'cid': cid, 'pid': pid})
error = senlin_exc.PolicyBindingNotFound(policy=pid, identity=cid)
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.get,
req, cluster_id=cid,
policy_id=pid)
self.assertEqual(404, resp.json['code'])
self.assertEqual('PolicyBindingNotFound', resp.json['error']['type'])
def test_action_get_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'get', False)
cid = 'FAKE_CLUSTER'
pid = 'FAKE_POLICY'
req = self._get('/cluster_policies/%(cid)s/%(pid)s'
'' % {'cid': cid, 'pid': pid})
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.get,
req, cluster_id=cid,
policy_id=pid)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
| [
"egonmin@CN00119199"
]
| egonmin@CN00119199 |
3b9d3f0c092a22bced0ce36744a8a86dec30f188 | f034ce134705b2de79a5aef85496e0ed9eabd700 | /market/migrations/0001_initial.py | 930c638800dc0ee0538596413d50661da2ce1491 | []
| no_license | poudel/animal-farm | 4d7961ae3b8b64f382232f2f3a9c0fc41be392ab | 646a6156fd60a73e6e50de1c2891ae25a13dae30 | refs/heads/develop | 2021-04-06T06:26:35.834800 | 2018-03-14T15:47:12 | 2018-03-14T15:47:12 | 125,237,057 | 8 | 0 | null | 2020-02-11T21:25:44 | 2018-03-14T15:50:45 | Python | UTF-8 | Python | false | false | 3,301 | py | # Generated by Django 2.0.3 on 2018-03-10 06:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('livestock', '0007_auto_20180310_1209'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('modified_at', models.DateTimeField(auto_now=True)),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
('is_deleted', models.BooleanField(default=False)),
('title', models.CharField(max_length=100, verbose_name='title')),
('description', models.TextField(verbose_name='description')),
('animal', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='livestock.Animal', verbose_name='animal')),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='created_products', to=settings.AUTH_USER_MODEL, verbose_name='created by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Seller',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('modified_at', models.DateTimeField(auto_now=True)),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
('is_deleted', models.BooleanField(default=False)),
('name', models.CharField(max_length=50, verbose_name='name')),
('about', models.TextField(verbose_name='about')),
('mobile', models.CharField(max_length=10, verbose_name='mobile number')),
('status', models.CharField(choices=[('Pending', 'Pending verification'), ('Verified', 'Verified'), ('Unverified', 'Unverified'), ('Banned', 'Banned'), ('Archived', 'Archived')], db_index=True, default='Pending', max_length=20, verbose_name='status')),
('checker', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='checked_sellers', to=settings.AUTH_USER_MODEL, verbose_name='profile checker')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sellers', to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='product',
name='seller',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='market.Seller', verbose_name='seller'),
),
]
| [
"[email protected]"
]
| |
3ddce0badad2f28033a53e7964223509ad1d9078 | 0f2c3e9fef8146274362c25782991b8df68471b3 | /virtual/bin/sqlformat | 19582f982df35fdae7296406eb987b6eb139323b | [
"MIT"
]
| permissive | seron-ux/Awards | 068199a8cc31368ee3adc5830a0bff68567149bd | e6cbe17a890e44b0d1d88c48d5c6c27656b50bfb | refs/heads/master | 2023-04-03T22:14:17.852709 | 2021-04-06T12:35:16 | 2021-04-06T12:35:16 | 354,060,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | #!/home/moringa/Desktop/Awwards/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
ee633cead836f951cba9e7535fd70d9222b2ba1a | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/pb_6025/sdB_pb_6025_lc.py | d912f3768ca33282d331c84172b5370d3149d7de | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[8.2935,1.325956], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_pb_6025/sdB_pb_6025_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
a6c39c47a9e3abb4901208d7a8d2d9fc8bf7c7d4 | ea79ba14054cd4879968e75dc0cfc4890ba090b8 | /common/xrd-ui-tests-python/tests/xroad_global_groups_tests/XroadMemberAddToGlobalGroup.py | bbb863da99caf10e227ca5120b7869720df39da0 | [
"MIT"
]
| permissive | VitaliStupin/X-Road-tests | bf70a5591721243622e6c3a2653aae30af6c4ae4 | 6103f3f5bbba387b8b59b050c0c4f1fb2180fc37 | refs/heads/develop | 2023-04-16T12:20:23.289803 | 2018-06-01T07:59:12 | 2018-06-01T07:59:12 | 84,918,233 | 0 | 0 | null | 2017-03-14T07:21:58 | 2017-03-14T07:21:58 | null | UTF-8 | Python | false | false | 1,725 | py | import unittest
from helpers import auditchecker, xroad
from main.maincontroller import MainController
from tests.xroad_global_groups_tests import global_groups_tests
class XroadMemberAddToGlobalGroup(unittest.TestCase):
"""
SERVICE_37 Add an X-Road Member to a Global Group
RIA URL: https://jira.ria.ee/browse/XTKB-182
Depends on finishing other test(s): remove from global group
Requires helper scenarios:
X-Road version: 6.16.0
"""
def test_member_add_to_global_group(self):
main = MainController(self)
cs_host = main.config.get('cs.host')
cs_user = main.config.get('cs.user')
cs_pass = main.config.get('cs.pass')
cs_ssh_host = main.config.get('cs.ssh_host')
cs_ssh_user = main.config.get('cs.ssh_user')
cs_ssh_pass = main.config.get('cs.ssh_pass')
log_checker = auditchecker.AuditChecker(cs_ssh_host, cs_ssh_user, cs_ssh_pass)
global_group = main.config.get('cs.global_group')
member_name = main.config.get('ss1.client_name')
member_code = xroad.split_xroad_id(main.config.get('ss1.client_id'))['code']
test_member_add_to_global_group = global_groups_tests.test_member_add_to_global_group(main, member_name,
member_code, global_group,
log_checker=log_checker)
try:
main.reload_webdriver(cs_host, cs_user, cs_pass)
test_member_add_to_global_group()
except:
main.save_exception_data()
raise
finally:
main.tearDown()
| [
"[email protected]"
]
| |
8713fa99e22ae736ff68230a7b32a4cdab41f7df | 9d67cd5f8d3e0ffdd4334a6b9b67c93f8deca100 | /configs/12_20share_old.py | cf6e73c0a3255ed59e3802cffdecf6980499f4aa | []
| no_license | SiyuanLee/caps | 0c300a8e5a9a661eca4b2f59cd38125ddc35b6d3 | 476802e18ca1c7c88f1e29ed66a90c350aa50c1f | refs/heads/master | 2021-06-20T22:48:16.230354 | 2021-02-22T13:21:57 | 2021-02-22T13:21:57 | 188,695,489 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,041 | py | """
This is the example config file
same_room
no parameter share
take a look at transfer_config (differences are there)
"""
import numpy as np
# More one-char representation will be added in order to support
# other objects.
# The following a=10 is an example although it does not work now
# as I have not included a '10' object yet.
a = 10
# This is the map array that represents the map
# You have to fill the array into a (m x n) matrix with all elements
# not None. A strange shape of the array may cause malfunction.
# Currently available object indices are # they can fill more than one element in the array.
# 0: nothing
# 1: wall
# 2: ladder
# 3: coin
# 4: spike
# 5: triangle -------source
# 6: square ------ source
# 7: coin -------- target
# 8: princess -------source
# 9: player # elements(possibly more than 1) filled will be selected randomly to place the player
# unsupported indices will work as 0: nothing
map_array = [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 5, 6, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 9, 9, 9, 9, 1, 9, 9, 9, 8, 1],
[1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1],
[1, 0, 0, 2, 0, 0, 0, 2, 0, 7, 1],
[1, 9, 9, 2, 9, 9, 9, 2, 9, 9, 1],
[1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1],
[1, 0, 2, 0, 1, 0, 2, 0, 0, 0, 1],
[1, 0, 2, 0, 1, 0, 2, 0, 0, 0, 1],
[1, 9, 9, 9, 1, 9, 9, 9, 9, 9, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
]
# set to true -> win when touching the object
# 0, 1, 2, 3, 4, 9 are not possible
end_game = {
6: True,
}
rewards = {
"positive": 0, # when collecting a coin
"win": 1, # endgame (win)
"negative": -25, # endgame (die)
"tick": 0 # living
}
######### dqn only ##########
# ensure correct import
import os
import sys
__file_path = os.path.abspath(__file__)
__dqn_dir = '/'.join(str.split(__file_path, '/')[:-2]) + '/'
sys.path.append(__dqn_dir)
__cur_dir = '/'.join(str.split(__file_path, '/')[:-1]) + '/'
from dqn_utils import PiecewiseSchedule, NoOpWrapperMK
# load the random sampled obs
import pickle
pkl_file = __cur_dir + 'same.pkl'
with open(pkl_file, 'rb') as f:
eval_obs_array = pickle.loads(f.read())
def seed_func():
return np.random.randint(0, 1000)
num_timesteps = 2.5e7
learning_freq = 4
# training iterations to go
num_iter = num_timesteps / learning_freq
# piecewise learning rate
lr_multiplier = 1.0
learning_rate = PiecewiseSchedule([
(0, 2e-4 * lr_multiplier),
(num_iter / 2, 1e-4 * lr_multiplier),
(num_iter * 3 / 4, 5e-5 * lr_multiplier),
], outside_value=5e-5 * lr_multiplier)
# piecewise learning rate
exploration = PiecewiseSchedule([
(0, 1.0),
(num_iter / 2, 0.7),
(num_iter * 3 / 4, 0.1),
(num_iter * 7 / 8, 0.05),
], outside_value=0.05)
######### transfer only #########
source_dirs = [
# an old map policy
# '/home/lsy/logs/target6c_12_05_17_21:26:25/dqn',
# '/home/lsy/PycharmProjects/ple-monstrerkong/examples/dqn_new/logs/target5_12_05_17_19:49:45',
# '/home/lsy/target8c_12_10_17_15:25:06/dqn',
# '/home/beeperman/Project/ple-monsterkong/examples/dqn_new/logs/same_room_12_12_17_20:54:53/dqn',
#'/home/lsy/same_room_12_12_17_20:54:53/dqn',
'/home/lsy/ple-monstrerkong/examples/dqn_new/logs/same_room5_12_12_17_21:00:29/dqn',
]
transfer_config = {
'source_dirs': source_dirs,
'online_q_omega': False, # default false off policy with experience replay
'q_omega_uniform_sample': False, # default false
'four_to_two': False, # default false frame_history_len must be 4!
'source_noop': False, # default false (false means source policies HAS noop action)
'no_share_para': False, # default false set to true to stop sharing parameter between q network and q_omega/term
'debug_no_term_train': True
}
dqn_config = {
'seed': seed_func, # will override game settings
'num_timesteps': num_timesteps,
'replay_buffer_size': 1000000,
'batch_size': 32,
'gamma': 0.99,
'learning_starts': 1e3,
'learning_freq': learning_freq,
'frame_history_len': 2,
'target_update_freq': 10000,
'grad_norm_clipping': 10,
'learning_rate': learning_rate,
'exploration': exploration,
'additional_wrapper': NoOpWrapperMK,
'eval_obs_array': eval_obs_array, # TODO: construct some eval_obs_array
'room_q_interval': 5e4, # q_vals will be evaluated every room_q_interval steps
'epoch_size': 5e4, # you decide any way
'config_name': str.split(__file_path, '/')[-1].replace('.py', ''), # the config file name
'transfer_config': transfer_config,
}
map_config = {
'map_array': map_array,
'rewards': rewards,
'end_game': end_game,
'init_score': 0,
'init_lives': 1, # please don't change, not going to work
# configs for dqn
'dqn_config': dqn_config,
# work automatically only for aigym wrapped version
'fps': 1000,
'frame_skip': 1,
'force_fps': True, # set to true to make the game run as fast as possible
'display_screen': False,
'episode_length': 1200,
'episode_end_sleep': 0., # sec
} | [
"[email protected]"
]
| |
706bb6fb18f57fe7fbff7f5b7082205fde6883cf | 5b6ec656a247d10011fd67a920aa002ebdf873c3 | /Ecommerce Website/Ecommerce Website 1.3/EcommerceWebsite/urls.py | 0c1b7936cf2a6e86257310459061cbcc2ef5174e | []
| no_license | KhaledAbuNada-AI/Django-Projects | cfb46d46da5f5358171294ca8c02c62c5babf2cf | ff264426d7a650f3c513678bbd71b5519372f6d3 | refs/heads/master | 2022-04-24T10:52:26.791436 | 2020-04-22T15:27:37 | 2020-04-22T15:27:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,101 | py | """EcommerceWebsite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.index, name='index'),
path('shop/', include('shop.urls'))
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"[email protected]"
]
| |
8239c124f63cbbc3e2ce479cc233adb943472bcf | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_267/ch77_2020_04_08_20_40_26_130258.py | 01494b6b51477f2062e1834d94e0c986db072780 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | def calcula_tempo(dicio):
nome_tempo = {}
i = 0
for e,a in dicio.items():
t = (200/a)**(1/2)
nome_tempo[e] = t
i += 1
return nome_tempo
| [
"[email protected]"
]
| |
e1a7c547a214c1f8836d35c4c7eacd9583f9394b | 96e74d3d36a8394f2f9a094c1eebe9e292b8123f | /setup.py | 0041bc7c6a46b037e90af71d5841c6cc73fbef18 | []
| no_license | kundajelab/locusselect | cf8073c541c2b363f70ff6e54f3d607701f6a832 | aafebf5e43e514e824a36ae07f5336a683e17b88 | refs/heads/master | 2021-07-02T12:14:14.281841 | 2020-11-12T10:10:16 | 2020-11-12T10:10:16 | 195,343,578 | 2 | 2 | null | 2019-07-19T19:31:13 | 2019-07-05T05:23:27 | Jupyter Notebook | UTF-8 | Python | false | false | 1,040 | py | from setuptools import setup,find_packages
config = {
'include_package_data': True,
'description': 'Compute deep learning embeddings for narrowPeak files; compute pairwise distance between embeddings and cluster with tSNE',
'download_url': 'https://github.com/kundajelab/locusselect',
'version': '0.3',
'packages': ['locusselect'],
'setup_requires': [],
'install_requires': ['numpy>=1.9', 'keras>=2.2', 'h5py', 'pandas','deeplift'],
'scripts': [],
'entry_points': {'console_scripts': ['compute_nn_embeddings = locusselect.embeddings:main',
'compute_interpretation_scores = locusselect.interpret:main',
'compute_embedding_distances = locusselect.dist:main',
'visualize_embeddings =locusselect.vis:main',
'compute_kmer_embeddings = locusselect.gapped_kmers:main']},
'name': 'locusselect'
}
if __name__== '__main__':
setup(**config)
| [
"[email protected]"
]
| |
9fddbd257c40305611a75397a400ebbb4e82b974 | c89e59b4d018e8a2d7dc0dbc3bb7a3768024f849 | /before2021/python/문제풀이/day6/7_건물세우기.py | 2d5eeac2e65309d19d45f900abf451c444c92311 | []
| no_license | leeiopd/algorithm | ff32103a43e467a5a091257cc07cf35365ecbf91 | e41647d3918c3099110d97f455c5ebf9a38d571e | refs/heads/master | 2023-03-08T23:46:34.919991 | 2023-02-22T09:39:46 | 2023-02-22T09:39:46 | 166,131,885 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,108 | py | '''
(주)정올에서는 여러 개의 빌딩을 새로 지을 계획이다. 그래서 빌딩을 세울 장소를 선정하였다.
그리고 각 빌딩을 각 장소에 세울 경우에 드는 비용을 추정하였다. 예를 들어서 아래의 표를 보자
1 2 3
A 4 7 3
B 2 6 1
C 3 9 4
A, B, C 는 건물을 나타내고, 1, 2, 3은 장소를 나타낸다.
예를 들어서 건물 B를 장소 1에 세우면 비용이 2가 들고, 장소 2에 세우면 비용이 6, 장소 3에 세우면 비용이 1만큼 든다.
물론 한 장소에는 한 건물밖에 세울 수 없다. 만일 A를 장소 2에, B를 장소 3에, C를 1에 세우면 전체 비용이 7+1+3 = 11이 필요하다.
그런데 A를 3, B를 1, C를 2에 세우면 3+2+9 = 14 가 필요하다.
각 빌딩을 어느 장소에 세우면 비용의 합이 최소가 되는지 구하는 프로그램을 작성하시오.
입력 파일의 첫 줄은 빌딩의 개수 n(1≤n≤10)이 들어있다.
그 다음 n 줄에는 각 건물을 각 장소에 세울 경우에 드는 비용이 입력된다. 물론 각 줄 에는 n개의 수가 입력된다.
비용을 나타내는 수의 범위는 1이상 100미만이다.
첫 줄에는 최소비용을 출력한다.
'''
import sys
sys.stdin = open("7_input.txt")
N = int(input())
top = -1
visited = [-99] * N
maps = []
for y in range(N):
maps.append(list(map(int, input().split())))
result = 9999999999999
def check():
global top, result, visited
if top == N-1:
add = 0
for i in range(N):
add += maps[i][visited[i]]
if add > result:
return
if add < result:
result = add
return
add = 0
for i in range(top):
add += maps[i][visited[i]]
if add > result:
return
for i in range(N):
if i not in visited:
top += 1
visited[top] = i
check()
visited[top] = -99
top -= 1
check()
print(result) | [
"[email protected]"
]
| |
9335ee01004cfda281685a47d13ad9f9b197d21d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02392/s373045640.py | 22f5aea0a5d5d4db08ccc0376b1cb0908b99e83c | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | a, b, c = eval(input().replace(' ', ','))
print("Yes" if a < b < c else "No") | [
"[email protected]"
]
| |
6f8e8834f52ca848594d52208b0a41a8329f2961 | d23735419170bc51979bd37e6e82909ded61b818 | /image_lucida_project/image_lucida_app/context-processors.py | 97126abdacf5c034ac3bf03e29879d31545689ac | []
| no_license | ZoeLeBlanc/ImageLucida | 7f691d956faa2c5490c7a6b7d1ab69439636ea58 | edf41df372f31cac0682b9bfec85b2846b78a936 | refs/heads/main | 2023-02-20T12:41:20.372916 | 2022-05-30T02:12:26 | 2022-05-30T02:12:26 | 82,299,769 | 1 | 0 | null | 2023-02-15T18:32:12 | 2017-02-17T13:26:13 | Python | UTF-8 | Python | false | false | 1,665 | py | from image_lucida_app.models import *
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.models import User
def navigation(context):
"""
The navigation context processor generates the dynamic navbar for the index.html template.
"""
if context.user.is_authenticated():
welcome_message = 'Welcome '+user.username
list_of_nav = [
{
'name':'Projects',
'link': '/projects/',
'prop': 'right',
'nav-loc': 'main'
},
{
'name':'Play',
'link': '/play/',
'prop': 'right',
'nav-loc': 'main'
},
{
'name': 'Profile',
'link': '/profile/',
'prop': 'right',
'nav-loc': 'main'
},
{
'name': welcome_message,
'link': '#',
'nav-loc': 'side'
},
{
'name':'Logout',
'link': '/logout/',
'nav-loc': 'side'
},
]
else:
# if user is not logged in show 0 next to cart
list_of_nav = [
{
'name':'Register',
'link': '/register/'
},
{
'name':'Login',
'link': '/login/'
}
]
return {'navigation': list_of_nav}
| [
"[email protected]"
]
| |
6ba1f46941ad26b0e5603b42a8af33aca521b913 | 40832e48ef481b4141435459afeaf0706fa6dc13 | /flask/crud/app.py | 0850249ba85737b18275e868b63a0dc907261a18 | []
| no_license | drgarcia1986/python-web-frameworks-crud | 93d85c9c35ae016d21d13549081fab84c33dbfe6 | 1fe110a3af5320ec6aecf277a45f61e3fc7df8be | refs/heads/master | 2021-01-01T16:49:52.529747 | 2015-09-14T13:39:13 | 2015-09-14T13:39:13 | 39,610,696 | 4 | 1 | null | 2015-09-14T13:39:14 | 2015-07-24T03:53:36 | Python | UTF-8 | Python | false | false | 621 | py | # -*- coding: utf-8 -*-
from flask import Flask
from .extensions import db
from .person.views import person_bp
def create_app():
app = Flask(__name__)
app.config.update(
SQLALCHEMY_DATABASE_URI='sqlite:///database.db'
)
register_extensions(app)
register_bluenprints(app)
create_database(app)
return app
def register_extensions(app):
db.init_app(app)
def register_bluenprints(app):
app.register_blueprint(person_bp, url_prefix='/api/persons')
def create_database(app):
from .person.models import Person # noqa
with app.app_context():
db.create_all()
| [
"[email protected]"
]
| |
dfb10b03ea40082c08212ec8ee5ac256d02b0a38 | 29705cfa764b8800a4f611044bb441ae2dbb517e | /ctpbee/date.py | 7c0043958979e8278289aa71453ed442a804afe5 | [
"MIT"
]
| permissive | ctpbee/ctpbee | 98c720a54999e9c4bb242848a9cd4363f96ea2e1 | 217b73da65931213c1af4733741014d05b3a8bac | refs/heads/master | 2023-03-16T12:47:01.260983 | 2023-03-13T05:49:51 | 2023-03-13T05:49:51 | 202,876,271 | 665 | 186 | MIT | 2023-09-12T12:33:29 | 2019-08-17T12:08:53 | Python | UTF-8 | Python | false | false | 145,684 | py | import datetime
trade_dates = [
'1990-12-19',
'1990-12-20',
'1990-12-21',
'1990-12-24',
'1990-12-25',
'1990-12-26',
'1990-12-27',
'1990-12-28',
'1990-12-31',
'1991-01-02',
'1991-01-03',
'1991-01-04',
'1991-01-07',
'1991-01-08',
'1991-01-09',
'1991-01-10',
'1991-01-11',
'1991-01-14',
'1991-01-15',
'1991-01-16',
'1991-01-17',
'1991-01-18',
'1991-01-21',
'1991-01-22',
'1991-01-23',
'1991-01-24',
'1991-01-25',
'1991-01-28',
'1991-01-29',
'1991-01-30',
'1991-01-31',
'1991-02-01',
'1991-02-04',
'1991-02-05',
'1991-02-06',
'1991-02-07',
'1991-02-08',
'1991-02-11',
'1991-02-12',
'1991-02-13',
'1991-02-14',
'1991-02-19',
'1991-02-20',
'1991-02-21',
'1991-02-22',
'1991-02-25',
'1991-02-26',
'1991-02-27',
'1991-02-28',
'1991-03-01',
'1991-03-04',
'1991-03-05',
'1991-03-06',
'1991-03-07',
'1991-03-08',
'1991-03-11',
'1991-03-12',
'1991-03-13',
'1991-03-14',
'1991-03-15',
'1991-03-18',
'1991-03-19',
'1991-03-20',
'1991-03-21',
'1991-03-22',
'1991-03-25',
'1991-03-26',
'1991-03-27',
'1991-03-28',
'1991-03-29',
'1991-04-01',
'1991-04-02',
'1991-04-03',
'1991-04-04',
'1991-04-05',
'1991-04-08',
'1991-04-09',
'1991-04-10',
'1991-04-11',
'1991-04-12',
'1991-04-15',
'1991-04-16',
'1991-04-17',
'1991-04-18',
'1991-04-19',
'1991-04-22',
'1991-04-23',
'1991-04-24',
'1991-04-25',
'1991-04-26',
'1991-04-29',
'1991-04-30',
'1991-05-02',
'1991-05-03',
'1991-05-06',
'1991-05-07',
'1991-05-08',
'1991-05-09',
'1991-05-10',
'1991-05-13',
'1991-05-14',
'1991-05-15',
'1991-05-16',
'1991-05-17',
'1991-05-20',
'1991-05-21',
'1991-05-22',
'1991-05-23',
'1991-05-24',
'1991-05-27',
'1991-05-28',
'1991-05-29',
'1991-05-30',
'1991-05-31',
'1991-06-03',
'1991-06-04',
'1991-06-05',
'1991-06-06',
'1991-06-07',
'1991-06-10',
'1991-06-11',
'1991-06-12',
'1991-06-13',
'1991-06-14',
'1991-06-17',
'1991-06-18',
'1991-06-19',
'1991-06-20',
'1991-06-21',
'1991-06-24',
'1991-06-25',
'1991-06-26',
'1991-06-27',
'1991-06-28',
'1991-07-01',
'1991-07-02',
'1991-07-03',
'1991-07-04',
'1991-07-05',
'1991-07-08',
'1991-07-09',
'1991-07-10',
'1991-07-11',
'1991-07-12',
'1991-07-15',
'1991-07-16',
'1991-07-17',
'1991-07-18',
'1991-07-19',
'1991-07-22',
'1991-07-23',
'1991-07-24',
'1991-07-25',
'1991-07-26',
'1991-07-29',
'1991-07-30',
'1991-07-31',
'1991-08-01',
'1991-08-02',
'1991-08-05',
'1991-08-06',
'1991-08-07',
'1991-08-08',
'1991-08-09',
'1991-08-12',
'1991-08-13',
'1991-08-14',
'1991-08-15',
'1991-08-16',
'1991-08-19',
'1991-08-20',
'1991-08-21',
'1991-08-22',
'1991-08-23',
'1991-08-26',
'1991-08-27',
'1991-08-28',
'1991-08-29',
'1991-08-30',
'1991-09-02',
'1991-09-03',
'1991-09-04',
'1991-09-05',
'1991-09-06',
'1991-09-09',
'1991-09-10',
'1991-09-11',
'1991-09-12',
'1991-09-13',
'1991-09-16',
'1991-09-17',
'1991-09-18',
'1991-09-19',
'1991-09-20',
'1991-09-23',
'1991-09-24',
'1991-09-25',
'1991-09-26',
'1991-09-27',
'1991-09-30',
'1991-10-03',
'1991-10-04',
'1991-10-07',
'1991-10-08',
'1991-10-09',
'1991-10-10',
'1991-10-11',
'1991-10-14',
'1991-10-15',
'1991-10-16',
'1991-10-17',
'1991-10-18',
'1991-10-21',
'1991-10-22',
'1991-10-23',
'1991-10-24',
'1991-10-25',
'1991-10-28',
'1991-10-29',
'1991-10-30',
'1991-10-31',
'1991-11-01',
'1991-11-04',
'1991-11-05',
'1991-11-06',
'1991-11-07',
'1991-11-08',
'1991-11-11',
'1991-11-12',
'1991-11-13',
'1991-11-14',
'1991-11-15',
'1991-11-18',
'1991-11-19',
'1991-11-20',
'1991-11-21',
'1991-11-22',
'1991-11-25',
'1991-11-26',
'1991-11-27',
'1991-11-28',
'1991-11-29',
'1991-12-02',
'1991-12-03',
'1991-12-04',
'1991-12-05',
'1991-12-06',
'1991-12-09',
'1991-12-10',
'1991-12-11',
'1991-12-12',
'1991-12-13',
'1991-12-16',
'1991-12-17',
'1991-12-18',
'1991-12-19',
'1991-12-20',
'1991-12-23',
'1991-12-24',
'1991-12-25',
'1991-12-26',
'1991-12-27',
'1991-12-30',
'1991-12-31',
'1992-01-02',
'1992-01-03',
'1992-01-06',
'1992-01-07',
'1992-01-08',
'1992-01-09',
'1992-01-10',
'1992-01-13',
'1992-01-14',
'1992-01-15',
'1992-01-16',
'1992-01-17',
'1992-01-20',
'1992-01-21',
'1992-01-22',
'1992-01-23',
'1992-01-24',
'1992-01-27',
'1992-01-28',
'1992-01-29',
'1992-01-30',
'1992-01-31',
'1992-02-03',
'1992-02-07',
'1992-02-10',
'1992-02-11',
'1992-02-12',
'1992-02-13',
'1992-02-14',
'1992-02-17',
'1992-02-18',
'1992-02-19',
'1992-02-20',
'1992-02-21',
'1992-02-24',
'1992-02-25',
'1992-02-26',
'1992-02-27',
'1992-02-28',
'1992-03-02',
'1992-03-03',
'1992-03-04',
'1992-03-05',
'1992-03-06',
'1992-03-09',
'1992-03-10',
'1992-03-11',
'1992-03-12',
'1992-03-13',
'1992-03-16',
'1992-03-17',
'1992-03-18',
'1992-03-19',
'1992-03-20',
'1992-03-23',
'1992-03-24',
'1992-03-25',
'1992-03-26',
'1992-03-27',
'1992-03-30',
'1992-03-31',
'1992-04-01',
'1992-04-02',
'1992-04-03',
'1992-04-06',
'1992-04-07',
'1992-04-08',
'1992-04-09',
'1992-04-10',
'1992-04-13',
'1992-04-14',
'1992-04-15',
'1992-04-16',
'1992-04-17',
'1992-04-20',
'1992-04-21',
'1992-04-22',
'1992-04-23',
'1992-04-24',
'1992-04-27',
'1992-04-28',
'1992-04-29',
'1992-04-30',
'1992-05-04',
'1992-05-05',
'1992-05-06',
'1992-05-07',
'1992-05-08',
'1992-05-11',
'1992-05-12',
'1992-05-13',
'1992-05-14',
'1992-05-15',
'1992-05-18',
'1992-05-19',
'1992-05-20',
'1992-05-21',
'1992-05-22',
'1992-05-25',
'1992-05-26',
'1992-05-27',
'1992-05-28',
'1992-05-29',
'1992-06-01',
'1992-06-02',
'1992-06-03',
'1992-06-04',
'1992-06-05',
'1992-06-08',
'1992-06-09',
'1992-06-10',
'1992-06-11',
'1992-06-12',
'1992-06-15',
'1992-06-16',
'1992-06-17',
'1992-06-18',
'1992-06-19',
'1992-06-22',
'1992-06-23',
'1992-06-24',
'1992-06-25',
'1992-06-26',
'1992-06-29',
'1992-06-30',
'1992-07-01',
'1992-07-02',
'1992-07-03',
'1992-07-06',
'1992-07-07',
'1992-07-08',
'1992-07-09',
'1992-07-10',
'1992-07-13',
'1992-07-14',
'1992-07-15',
'1992-07-16',
'1992-07-17',
'1992-07-20',
'1992-07-21',
'1992-07-22',
'1992-07-23',
'1992-07-24',
'1992-07-27',
'1992-07-28',
'1992-07-29',
'1992-07-30',
'1992-07-31',
'1992-08-03',
'1992-08-04',
'1992-08-05',
'1992-08-06',
'1992-08-07',
'1992-08-10',
'1992-08-11',
'1992-08-12',
'1992-08-13',
'1992-08-14',
'1992-08-17',
'1992-08-18',
'1992-08-19',
'1992-08-20',
'1992-08-21',
'1992-08-24',
'1992-08-25',
'1992-08-26',
'1992-08-27',
'1992-08-28',
'1992-08-31',
'1992-09-01',
'1992-09-02',
'1992-09-03',
'1992-09-04',
'1992-09-07',
'1992-09-08',
'1992-09-09',
'1992-09-10',
'1992-09-11',
'1992-09-14',
'1992-09-15',
'1992-09-16',
'1992-09-17',
'1992-09-18',
'1992-09-21',
'1992-09-22',
'1992-09-23',
'1992-09-24',
'1992-09-25',
'1992-09-28',
'1992-09-29',
'1992-09-30',
'1992-10-05',
'1992-10-06',
'1992-10-07',
'1992-10-08',
'1992-10-09',
'1992-10-12',
'1992-10-13',
'1992-10-14',
'1992-10-15',
'1992-10-16',
'1992-10-19',
'1992-10-20',
'1992-10-21',
'1992-10-22',
'1992-10-23',
'1992-10-26',
'1992-10-27',
'1992-10-28',
'1992-10-29',
'1992-10-30',
'1992-11-02',
'1992-11-03',
'1992-11-04',
'1992-11-05',
'1992-11-06',
'1992-11-09',
'1992-11-10',
'1992-11-11',
'1992-11-12',
'1992-11-13',
'1992-11-16',
'1992-11-17',
'1992-11-18',
'1992-11-19',
'1992-11-20',
'1992-11-23',
'1992-11-24',
'1992-11-25',
'1992-11-26',
'1992-11-27',
'1992-11-30',
'1992-12-01',
'1992-12-02',
'1992-12-03',
'1992-12-04',
'1992-12-07',
'1992-12-08',
'1992-12-09',
'1992-12-10',
'1992-12-11',
'1992-12-14',
'1992-12-15',
'1992-12-16',
'1992-12-17',
'1992-12-18',
'1992-12-21',
'1992-12-22',
'1992-12-23',
'1992-12-24',
'1992-12-25',
'1992-12-28',
'1992-12-29',
'1992-12-30',
'1992-12-31',
'1993-01-04',
'1993-01-05',
'1993-01-06',
'1993-01-07',
'1993-01-08',
'1993-01-11',
'1993-01-12',
'1993-01-13',
'1993-01-14',
'1993-01-15',
'1993-01-18',
'1993-01-19',
'1993-01-20',
'1993-01-21',
'1993-01-22',
'1993-01-27',
'1993-01-28',
'1993-01-29',
'1993-02-01',
'1993-02-02',
'1993-02-03',
'1993-02-04',
'1993-02-05',
'1993-02-08',
'1993-02-09',
'1993-02-10',
'1993-02-11',
'1993-02-12',
'1993-02-15',
'1993-02-16',
'1993-02-17',
'1993-02-18',
'1993-02-19',
'1993-02-22',
'1993-02-23',
'1993-02-24',
'1993-02-25',
'1993-02-26',
'1993-03-01',
'1993-03-02',
'1993-03-03',
'1993-03-04',
'1993-03-05',
'1993-03-08',
'1993-03-09',
'1993-03-10',
'1993-03-11',
'1993-03-12',
'1993-03-15',
'1993-03-16',
'1993-03-17',
'1993-03-18',
'1993-03-19',
'1993-03-22',
'1993-03-23',
'1993-03-24',
'1993-03-25',
'1993-03-26',
'1993-03-29',
'1993-03-30',
'1993-03-31',
'1993-04-01',
'1993-04-02',
'1993-04-05',
'1993-04-06',
'1993-04-07',
'1993-04-08',
'1993-04-09',
'1993-04-12',
'1993-04-13',
'1993-04-14',
'1993-04-15',
'1993-04-16',
'1993-04-19',
'1993-04-20',
'1993-04-21',
'1993-04-22',
'1993-04-23',
'1993-04-26',
'1993-04-27',
'1993-04-28',
'1993-04-29',
'1993-04-30',
'1993-05-03',
'1993-05-04',
'1993-05-05',
'1993-05-06',
'1993-05-07',
'1993-05-10',
'1993-05-11',
'1993-05-12',
'1993-05-13',
'1993-05-14',
'1993-05-17',
'1993-05-18',
'1993-05-19',
'1993-05-20',
'1993-05-21',
'1993-05-24',
'1993-05-25',
'1993-05-26',
'1993-05-27',
'1993-05-28',
'1993-05-31',
'1993-06-01',
'1993-06-02',
'1993-06-03',
'1993-06-04',
'1993-06-07',
'1993-06-08',
'1993-06-09',
'1993-06-10',
'1993-06-11',
'1993-06-14',
'1993-06-15',
'1993-06-16',
'1993-06-17',
'1993-06-18',
'1993-06-21',
'1993-06-22',
'1993-06-23',
'1993-06-24',
'1993-06-25',
'1993-06-28',
'1993-06-29',
'1993-06-30',
'1993-07-01',
'1993-07-02',
'1993-07-05',
'1993-07-06',
'1993-07-07',
'1993-07-08',
'1993-07-09',
'1993-07-12',
'1993-07-13',
'1993-07-14',
'1993-07-15',
'1993-07-16',
'1993-07-19',
'1993-07-20',
'1993-07-21',
'1993-07-22',
'1993-07-23',
'1993-07-26',
'1993-07-27',
'1993-07-28',
'1993-07-29',
'1993-07-30',
'1993-08-02',
'1993-08-03',
'1993-08-04',
'1993-08-05',
'1993-08-06',
'1993-08-09',
'1993-08-10',
'1993-08-11',
'1993-08-12',
'1993-08-13',
'1993-08-16',
'1993-08-17',
'1993-08-18',
'1993-08-19',
'1993-08-20',
'1993-08-23',
'1993-08-24',
'1993-08-25',
'1993-08-26',
'1993-08-27',
'1993-08-30',
'1993-08-31',
'1993-09-01',
'1993-09-02',
'1993-09-03',
'1993-09-06',
'1993-09-07',
'1993-09-08',
'1993-09-09',
'1993-09-10',
'1993-09-13',
'1993-09-14',
'1993-09-15',
'1993-09-16',
'1993-09-17',
'1993-09-20',
'1993-09-21',
'1993-09-22',
'1993-09-23',
'1993-09-24',
'1993-09-27',
'1993-09-28',
'1993-09-29',
'1993-09-30',
'1993-10-04',
'1993-10-05',
'1993-10-06',
'1993-10-07',
'1993-10-08',
'1993-10-11',
'1993-10-12',
'1993-10-13',
'1993-10-14',
'1993-10-15',
'1993-10-18',
'1993-10-19',
'1993-10-20',
'1993-10-21',
'1993-10-22',
'1993-10-25',
'1993-10-26',
'1993-10-27',
'1993-10-28',
'1993-10-29',
'1993-11-01',
'1993-11-02',
'1993-11-03',
'1993-11-04',
'1993-11-05',
'1993-11-08',
'1993-11-09',
'1993-11-10',
'1993-11-11',
'1993-11-12',
'1993-11-15',
'1993-11-16',
'1993-11-17',
'1993-11-18',
'1993-11-19',
'1993-11-22',
'1993-11-23',
'1993-11-24',
'1993-11-25',
'1993-11-26',
'1993-11-29',
'1993-11-30',
'1993-12-01',
'1993-12-02',
'1993-12-03',
'1993-12-06',
'1993-12-07',
'1993-12-08',
'1993-12-09',
'1993-12-10',
'1993-12-13',
'1993-12-14',
'1993-12-15',
'1993-12-16',
'1993-12-17',
'1993-12-20',
'1993-12-21',
'1993-12-22',
'1993-12-23',
'1993-12-24',
'1993-12-27',
'1993-12-28',
'1993-12-29',
'1993-12-30',
'1993-12-31',
'1994-01-03',
'1994-01-04',
'1994-01-05',
'1994-01-06',
'1994-01-07',
'1994-01-10',
'1994-01-11',
'1994-01-12',
'1994-01-13',
'1994-01-14',
'1994-01-17',
'1994-01-18',
'1994-01-19',
'1994-01-20',
'1994-01-21',
'1994-01-24',
'1994-01-25',
'1994-01-26',
'1994-01-27',
'1994-01-28',
'1994-01-31',
'1994-02-01',
'1994-02-02',
'1994-02-03',
'1994-02-04',
'1994-02-14',
'1994-02-15',
'1994-02-16',
'1994-02-17',
'1994-02-18',
'1994-02-21',
'1994-02-22',
'1994-02-23',
'1994-02-24',
'1994-02-25',
'1994-02-28',
'1994-03-01',
'1994-03-02',
'1994-03-03',
'1994-03-04',
'1994-03-07',
'1994-03-08',
'1994-03-09',
'1994-03-10',
'1994-03-11',
'1994-03-14',
'1994-03-15',
'1994-03-16',
'1994-03-17',
'1994-03-18',
'1994-03-21',
'1994-03-22',
'1994-03-23',
'1994-03-24',
'1994-03-25',
'1994-03-28',
'1994-03-29',
'1994-03-30',
'1994-03-31',
'1994-04-01',
'1994-04-04',
'1994-04-05',
'1994-04-06',
'1994-04-07',
'1994-04-08',
'1994-04-11',
'1994-04-12',
'1994-04-13',
'1994-04-14',
'1994-04-15',
'1994-04-18',
'1994-04-19',
'1994-04-20',
'1994-04-21',
'1994-04-22',
'1994-04-25',
'1994-04-26',
'1994-04-27',
'1994-04-28',
'1994-04-29',
'1994-05-03',
'1994-05-04',
'1994-05-05',
'1994-05-06',
'1994-05-09',
'1994-05-10',
'1994-05-11',
'1994-05-12',
'1994-05-13',
'1994-05-16',
'1994-05-17',
'1994-05-18',
'1994-05-19',
'1994-05-20',
'1994-05-23',
'1994-05-24',
'1994-05-25',
'1994-05-26',
'1994-05-27',
'1994-05-30',
'1994-05-31',
'1994-06-01',
'1994-06-02',
'1994-06-03',
'1994-06-06',
'1994-06-07',
'1994-06-08',
'1994-06-09',
'1994-06-10',
'1994-06-13',
'1994-06-14',
'1994-06-15',
'1994-06-16',
'1994-06-17',
'1994-06-20',
'1994-06-21',
'1994-06-22',
'1994-06-23',
'1994-06-24',
'1994-06-27',
'1994-06-28',
'1994-06-29',
'1994-06-30',
'1994-07-01',
'1994-07-04',
'1994-07-05',
'1994-07-06',
'1994-07-07',
'1994-07-08',
'1994-07-11',
'1994-07-12',
'1994-07-13',
'1994-07-14',
'1994-07-15',
'1994-07-18',
'1994-07-19',
'1994-07-20',
'1994-07-21',
'1994-07-22',
'1994-07-25',
'1994-07-26',
'1994-07-27',
'1994-07-28',
'1994-07-29',
'1994-08-01',
'1994-08-02',
'1994-08-03',
'1994-08-04',
'1994-08-05',
'1994-08-08',
'1994-08-09',
'1994-08-10',
'1994-08-11',
'1994-08-12',
'1994-08-15',
'1994-08-16',
'1994-08-17',
'1994-08-18',
'1994-08-19',
'1994-08-22',
'1994-08-23',
'1994-08-24',
'1994-08-25',
'1994-08-26',
'1994-08-29',
'1994-08-30',
'1994-08-31',
'1994-09-01',
'1994-09-02',
'1994-09-05',
'1994-09-06',
'1994-09-07',
'1994-09-08',
'1994-09-09',
'1994-09-12',
'1994-09-13',
'1994-09-14',
'1994-09-15',
'1994-09-16',
'1994-09-19',
'1994-09-20',
'1994-09-21',
'1994-09-22',
'1994-09-23',
'1994-09-26',
'1994-09-27',
'1994-09-28',
'1994-09-29',
'1994-09-30',
'1994-10-05',
'1994-10-06',
'1994-10-07',
'1994-10-10',
'1994-10-11',
'1994-10-12',
'1994-10-13',
'1994-10-14',
'1994-10-17',
'1994-10-18',
'1994-10-19',
'1994-10-20',
'1994-10-21',
'1994-10-24',
'1994-10-25',
'1994-10-26',
'1994-10-27',
'1994-10-28',
'1994-10-31',
'1994-11-01',
'1994-11-02',
'1994-11-03',
'1994-11-04',
'1994-11-07',
'1994-11-08',
'1994-11-09',
'1994-11-10',
'1994-11-11',
'1994-11-14',
'1994-11-15',
'1994-11-16',
'1994-11-17',
'1994-11-18',
'1994-11-21',
'1994-11-22',
'1994-11-23',
'1994-11-24',
'1994-11-25',
'1994-11-28',
'1994-11-29',
'1994-11-30',
'1994-12-01',
'1994-12-02',
'1994-12-05',
'1994-12-06',
'1994-12-07',
'1994-12-08',
'1994-12-09',
'1994-12-12',
'1994-12-13',
'1994-12-14',
'1994-12-15',
'1994-12-16',
'1994-12-19',
'1994-12-20',
'1994-12-21',
'1994-12-22',
'1994-12-23',
'1994-12-26',
'1994-12-27',
'1994-12-28',
'1994-12-29',
'1994-12-30',
'1995-01-03',
'1995-01-04',
'1995-01-05',
'1995-01-06',
'1995-01-09',
'1995-01-10',
'1995-01-11',
'1995-01-12',
'1995-01-13',
'1995-01-16',
'1995-01-17',
'1995-01-18',
'1995-01-19',
'1995-01-20',
'1995-01-23',
'1995-01-24',
'1995-01-25',
'1995-01-26',
'1995-01-27',
'1995-02-06',
'1995-02-07',
'1995-02-08',
'1995-02-09',
'1995-02-10',
'1995-02-13',
'1995-02-14',
'1995-02-15',
'1995-02-16',
'1995-02-17',
'1995-02-20',
'1995-02-21',
'1995-02-22',
'1995-02-23',
'1995-02-24',
'1995-02-27',
'1995-02-28',
'1995-03-01',
'1995-03-02',
'1995-03-03',
'1995-03-06',
'1995-03-07',
'1995-03-08',
'1995-03-09',
'1995-03-10',
'1995-03-13',
'1995-03-14',
'1995-03-15',
'1995-03-16',
'1995-03-17',
'1995-03-20',
'1995-03-21',
'1995-03-22',
'1995-03-23',
'1995-03-24',
'1995-03-27',
'1995-03-28',
'1995-03-29',
'1995-03-30',
'1995-03-31',
'1995-04-03',
'1995-04-04',
'1995-04-05',
'1995-04-06',
'1995-04-07',
'1995-04-10',
'1995-04-11',
'1995-04-12',
'1995-04-13',
'1995-04-14',
'1995-04-17',
'1995-04-18',
'1995-04-19',
'1995-04-20',
'1995-04-21',
'1995-04-24',
'1995-04-25',
'1995-04-26',
'1995-04-27',
'1995-04-28',
'1995-05-02',
'1995-05-03',
'1995-05-04',
'1995-05-05',
'1995-05-08',
'1995-05-09',
'1995-05-10',
'1995-05-11',
'1995-05-12',
'1995-05-15',
'1995-05-16',
'1995-05-17',
'1995-05-18',
'1995-05-19',
'1995-05-22',
'1995-05-23',
'1995-05-24',
'1995-05-25',
'1995-05-26',
'1995-05-29',
'1995-05-30',
'1995-05-31',
'1995-06-01',
'1995-06-02',
'1995-06-05',
'1995-06-06',
'1995-06-07',
'1995-06-08',
'1995-06-09',
'1995-06-12',
'1995-06-13',
'1995-06-14',
'1995-06-15',
'1995-06-16',
'1995-06-19',
'1995-06-20',
'1995-06-21',
'1995-06-22',
'1995-06-23',
'1995-06-26',
'1995-06-27',
'1995-06-28',
'1995-06-29',
'1995-06-30',
'1995-07-03',
'1995-07-04',
'1995-07-05',
'1995-07-06',
'1995-07-07',
'1995-07-10',
'1995-07-11',
'1995-07-12',
'1995-07-13',
'1995-07-14',
'1995-07-17',
'1995-07-18',
'1995-07-19',
'1995-07-20',
'1995-07-21',
'1995-07-24',
'1995-07-25',
'1995-07-26',
'1995-07-27',
'1995-07-28',
'1995-07-31',
'1995-08-01',
'1995-08-02',
'1995-08-03',
'1995-08-04',
'1995-08-07',
'1995-08-08',
'1995-08-09',
'1995-08-10',
'1995-08-11',
'1995-08-14',
'1995-08-15',
'1995-08-16',
'1995-08-17',
'1995-08-18',
'1995-08-21',
'1995-08-22',
'1995-08-23',
'1995-08-24',
'1995-08-25',
'1995-08-28',
'1995-08-29',
'1995-08-30',
'1995-08-31',
'1995-09-01',
'1995-09-04',
'1995-09-05',
'1995-09-06',
'1995-09-07',
'1995-09-08',
'1995-09-11',
'1995-09-12',
'1995-09-13',
'1995-09-14',
'1995-09-15',
'1995-09-18',
'1995-09-19',
'1995-09-20',
'1995-09-21',
'1995-09-22',
'1995-09-25',
'1995-09-26',
'1995-09-27',
'1995-09-28',
'1995-09-29',
'1995-10-04',
'1995-10-05',
'1995-10-06',
'1995-10-09',
'1995-10-10',
'1995-10-11',
'1995-10-12',
'1995-10-13',
'1995-10-16',
'1995-10-17',
'1995-10-18',
'1995-10-19',
'1995-10-20',
'1995-10-23',
'1995-10-24',
'1995-10-25',
'1995-10-26',
'1995-10-27',
'1995-10-30',
'1995-10-31',
'1995-11-01',
'1995-11-02',
'1995-11-03',
'1995-11-06',
'1995-11-07',
'1995-11-08',
'1995-11-09',
'1995-11-10',
'1995-11-13',
'1995-11-14',
'1995-11-15',
'1995-11-16',
'1995-11-17',
'1995-11-20',
'1995-11-21',
'1995-11-22',
'1995-11-23',
'1995-11-24',
'1995-11-27',
'1995-11-28',
'1995-11-29',
'1995-11-30',
'1995-12-01',
'1995-12-04',
'1995-12-05',
'1995-12-06',
'1995-12-07',
'1995-12-08',
'1995-12-11',
'1995-12-12',
'1995-12-13',
'1995-12-14',
'1995-12-15',
'1995-12-18',
'1995-12-19',
'1995-12-20',
'1995-12-21',
'1995-12-22',
'1995-12-25',
'1995-12-26',
'1995-12-27',
'1995-12-28',
'1995-12-29',
'1996-01-02',
'1996-01-03',
'1996-01-04',
'1996-01-05',
'1996-01-08',
'1996-01-09',
'1996-01-10',
'1996-01-11',
'1996-01-12',
'1996-01-15',
'1996-01-16',
'1996-01-17',
'1996-01-18',
'1996-01-19',
'1996-01-22',
'1996-01-23',
'1996-01-24',
'1996-01-25',
'1996-01-26',
'1996-01-29',
'1996-01-30',
'1996-01-31',
'1996-02-01',
'1996-02-02',
'1996-02-05',
'1996-02-06',
'1996-02-07',
'1996-02-08',
'1996-02-09',
'1996-02-12',
'1996-02-13',
'1996-02-14',
'1996-02-15',
'1996-02-16',
'1996-03-04',
'1996-03-05',
'1996-03-06',
'1996-03-07',
'1996-03-08',
'1996-03-11',
'1996-03-12',
'1996-03-13',
'1996-03-14',
'1996-03-15',
'1996-03-18',
'1996-03-19',
'1996-03-20',
'1996-03-21',
'1996-03-22',
'1996-03-25',
'1996-03-26',
'1996-03-27',
'1996-03-28',
'1996-03-29',
'1996-04-01',
'1996-04-02',
'1996-04-03',
'1996-04-04',
'1996-04-05',
'1996-04-08',
'1996-04-09',
'1996-04-10',
'1996-04-11',
'1996-04-12',
'1996-04-15',
'1996-04-16',
'1996-04-17',
'1996-04-18',
'1996-04-19',
'1996-04-22',
'1996-04-23',
'1996-04-24',
'1996-04-25',
'1996-04-26',
'1996-04-29',
'1996-04-30',
'1996-05-02',
'1996-05-03',
'1996-05-06',
'1996-05-07',
'1996-05-08',
'1996-05-09',
'1996-05-10',
'1996-05-13',
'1996-05-14',
'1996-05-15',
'1996-05-16',
'1996-05-17',
'1996-05-20',
'1996-05-21',
'1996-05-22',
'1996-05-23',
'1996-05-24',
'1996-05-27',
'1996-05-28',
'1996-05-29',
'1996-05-30',
'1996-05-31',
'1996-06-03',
'1996-06-04',
'1996-06-05',
'1996-06-06',
'1996-06-07',
'1996-06-10',
'1996-06-11',
'1996-06-12',
'1996-06-13',
'1996-06-14',
'1996-06-17',
'1996-06-18',
'1996-06-19',
'1996-06-20',
'1996-06-21',
'1996-06-24',
'1996-06-25',
'1996-06-26',
'1996-06-27',
'1996-06-28',
'1996-07-01',
'1996-07-02',
'1996-07-03',
'1996-07-04',
'1996-07-05',
'1996-07-08',
'1996-07-09',
'1996-07-10',
'1996-07-11',
'1996-07-12',
'1996-07-15',
'1996-07-16',
'1996-07-17',
'1996-07-18',
'1996-07-19',
'1996-07-22',
'1996-07-23',
'1996-07-24',
'1996-07-25',
'1996-07-26',
'1996-07-29',
'1996-07-30',
'1996-07-31',
'1996-08-01',
'1996-08-02',
'1996-08-05',
'1996-08-06',
'1996-08-07',
'1996-08-08',
'1996-08-09',
'1996-08-12',
'1996-08-13',
'1996-08-14',
'1996-08-15',
'1996-08-16',
'1996-08-19',
'1996-08-20',
'1996-08-21',
'1996-08-22',
'1996-08-23',
'1996-08-26',
'1996-08-27',
'1996-08-28',
'1996-08-29',
'1996-08-30',
'1996-09-02',
'1996-09-03',
'1996-09-04',
'1996-09-05',
'1996-09-06',
'1996-09-09',
'1996-09-10',
'1996-09-11',
'1996-09-12',
'1996-09-13',
'1996-09-16',
'1996-09-17',
'1996-09-18',
'1996-09-19',
'1996-09-20',
'1996-09-23',
'1996-09-24',
'1996-09-25',
'1996-09-26',
'1996-09-27',
'1996-10-03',
'1996-10-04',
'1996-10-07',
'1996-10-08',
'1996-10-09',
'1996-10-10',
'1996-10-11',
'1996-10-14',
'1996-10-15',
'1996-10-16',
'1996-10-17',
'1996-10-18',
'1996-10-21',
'1996-10-22',
'1996-10-23',
'1996-10-24',
'1996-10-25',
'1996-10-28',
'1996-10-29',
'1996-10-30',
'1996-10-31',
'1996-11-01',
'1996-11-04',
'1996-11-05',
'1996-11-06',
'1996-11-07',
'1996-11-08',
'1996-11-11',
'1996-11-12',
'1996-11-13',
'1996-11-14',
'1996-11-15',
'1996-11-18',
'1996-11-19',
'1996-11-20',
'1996-11-21',
'1996-11-22',
'1996-11-25',
'1996-11-26',
'1996-11-27',
'1996-11-28',
'1996-11-29',
'1996-12-02',
'1996-12-03',
'1996-12-04',
'1996-12-05',
'1996-12-06',
'1996-12-09',
'1996-12-10',
'1996-12-11',
'1996-12-12',
'1996-12-13',
'1996-12-16',
'1996-12-17',
'1996-12-18',
'1996-12-19',
'1996-12-20',
'1996-12-23',
'1996-12-24',
'1996-12-25',
'1996-12-26',
'1996-12-27',
'1996-12-30',
'1996-12-31',
'1997-01-02',
'1997-01-03',
'1997-01-06',
'1997-01-07',
'1997-01-08',
'1997-01-09',
'1997-01-10',
'1997-01-13',
'1997-01-14',
'1997-01-15',
'1997-01-16',
'1997-01-17',
'1997-01-20',
'1997-01-21',
'1997-01-22',
'1997-01-23',
'1997-01-24',
'1997-01-27',
'1997-01-28',
'1997-01-29',
'1997-01-30',
'1997-01-31',
'1997-02-17',
'1997-02-18',
'1997-02-19',
'1997-02-20',
'1997-02-21',
'1997-02-24',
'1997-02-25',
'1997-02-26',
'1997-02-27',
'1997-02-28',
'1997-03-03',
'1997-03-04',
'1997-03-05',
'1997-03-06',
'1997-03-07',
'1997-03-10',
'1997-03-11',
'1997-03-12',
'1997-03-13',
'1997-03-14',
'1997-03-17',
'1997-03-18',
'1997-03-19',
'1997-03-20',
'1997-03-21',
'1997-03-24',
'1997-03-25',
'1997-03-26',
'1997-03-27',
'1997-03-28',
'1997-03-31',
'1997-04-01',
'1997-04-02',
'1997-04-03',
'1997-04-04',
'1997-04-07',
'1997-04-08',
'1997-04-09',
'1997-04-10',
'1997-04-11',
'1997-04-14',
'1997-04-15',
'1997-04-16',
'1997-04-17',
'1997-04-18',
'1997-04-21',
'1997-04-22',
'1997-04-23',
'1997-04-24',
'1997-04-25',
'1997-04-28',
'1997-04-29',
'1997-04-30',
'1997-05-05',
'1997-05-06',
'1997-05-07',
'1997-05-08',
'1997-05-09',
'1997-05-12',
'1997-05-13',
'1997-05-14',
'1997-05-15',
'1997-05-16',
'1997-05-19',
'1997-05-20',
'1997-05-21',
'1997-05-22',
'1997-05-23',
'1997-05-26',
'1997-05-27',
'1997-05-28',
'1997-05-29',
'1997-05-30',
'1997-06-02',
'1997-06-03',
'1997-06-04',
'1997-06-05',
'1997-06-06',
'1997-06-09',
'1997-06-10',
'1997-06-11',
'1997-06-12',
'1997-06-13',
'1997-06-16',
'1997-06-17',
'1997-06-18',
'1997-06-19',
'1997-06-20',
'1997-06-23',
'1997-06-24',
'1997-06-25',
'1997-06-26',
'1997-06-27',
'1997-07-02',
'1997-07-03',
'1997-07-04',
'1997-07-07',
'1997-07-08',
'1997-07-09',
'1997-07-10',
'1997-07-11',
'1997-07-14',
'1997-07-15',
'1997-07-16',
'1997-07-17',
'1997-07-18',
'1997-07-21',
'1997-07-22',
'1997-07-23',
'1997-07-24',
'1997-07-25',
'1997-07-28',
'1997-07-29',
'1997-07-30',
'1997-07-31',
'1997-08-01',
'1997-08-04',
'1997-08-05',
'1997-08-06',
'1997-08-07',
'1997-08-08',
'1997-08-11',
'1997-08-12',
'1997-08-13',
'1997-08-14',
'1997-08-15',
'1997-08-18',
'1997-08-19',
'1997-08-20',
'1997-08-21',
'1997-08-22',
'1997-08-25',
'1997-08-26',
'1997-08-27',
'1997-08-28',
'1997-08-29',
'1997-09-01',
'1997-09-02',
'1997-09-03',
'1997-09-04',
'1997-09-05',
'1997-09-08',
'1997-09-09',
'1997-09-10',
'1997-09-11',
'1997-09-12',
'1997-09-15',
'1997-09-16',
'1997-09-17',
'1997-09-18',
'1997-09-19',
'1997-09-22',
'1997-09-23',
'1997-09-24',
'1997-09-25',
'1997-09-26',
'1997-09-29',
'1997-09-30',
'1997-10-06',
'1997-10-07',
'1997-10-08',
'1997-10-09',
'1997-10-10',
'1997-10-13',
'1997-10-14',
'1997-10-15',
'1997-10-16',
'1997-10-17',
'1997-10-20',
'1997-10-21',
'1997-10-22',
'1997-10-23',
'1997-10-24',
'1997-10-27',
'1997-10-28',
'1997-10-29',
'1997-10-30',
'1997-10-31',
'1997-11-03',
'1997-11-04',
'1997-11-05',
'1997-11-06',
'1997-11-07',
'1997-11-10',
'1997-11-11',
'1997-11-12',
'1997-11-13',
'1997-11-14',
'1997-11-17',
'1997-11-18',
'1997-11-19',
'1997-11-20',
'1997-11-21',
'1997-11-24',
'1997-11-25',
'1997-11-26',
'1997-11-27',
'1997-11-28',
'1997-12-01',
'1997-12-02',
'1997-12-03',
'1997-12-04',
'1997-12-05',
'1997-12-08',
'1997-12-09',
'1997-12-10',
'1997-12-11',
'1997-12-12',
'1997-12-15',
'1997-12-16',
'1997-12-17',
'1997-12-18',
'1997-12-19',
'1997-12-22',
'1997-12-23',
'1997-12-24',
'1997-12-25',
'1997-12-26',
'1997-12-29',
'1997-12-30',
'1997-12-31',
'1998-01-05',
'1998-01-06',
'1998-01-07',
'1998-01-08',
'1998-01-09',
'1998-01-12',
'1998-01-13',
'1998-01-14',
'1998-01-15',
'1998-01-16',
'1998-01-19',
'1998-01-20',
'1998-01-21',
'1998-01-22',
'1998-01-23',
'1998-02-09',
'1998-02-10',
'1998-02-11',
'1998-02-12',
'1998-02-13',
'1998-02-16',
'1998-02-17',
'1998-02-18',
'1998-02-19',
'1998-02-20',
'1998-02-23',
'1998-02-24',
'1998-02-25',
'1998-02-26',
'1998-02-27',
'1998-03-02',
'1998-03-03',
'1998-03-04',
'1998-03-05',
'1998-03-06',
'1998-03-09',
'1998-03-10',
'1998-03-11',
'1998-03-12',
'1998-03-13',
'1998-03-16',
'1998-03-17',
'1998-03-18',
'1998-03-19',
'1998-03-20',
'1998-03-23',
'1998-03-24',
'1998-03-25',
'1998-03-26',
'1998-03-27',
'1998-03-30',
'1998-03-31',
'1998-04-01',
'1998-04-02',
'1998-04-03',
'1998-04-06',
'1998-04-07',
'1998-04-08',
'1998-04-09',
'1998-04-10',
'1998-04-13',
'1998-04-14',
'1998-04-15',
'1998-04-16',
'1998-04-17',
'1998-04-20',
'1998-04-21',
'1998-04-22',
'1998-04-23',
'1998-04-24',
'1998-04-27',
'1998-04-28',
'1998-04-29',
'1998-04-30',
'1998-05-04',
'1998-05-05',
'1998-05-06',
'1998-05-07',
'1998-05-08',
'1998-05-11',
'1998-05-12',
'1998-05-13',
'1998-05-14',
'1998-05-15',
'1998-05-18',
'1998-05-19',
'1998-05-20',
'1998-05-21',
'1998-05-22',
'1998-05-25',
'1998-05-26',
'1998-05-27',
'1998-05-28',
'1998-05-29',
'1998-06-01',
'1998-06-02',
'1998-06-03',
'1998-06-04',
'1998-06-05',
'1998-06-08',
'1998-06-09',
'1998-06-10',
'1998-06-11',
'1998-06-12',
'1998-06-15',
'1998-06-16',
'1998-06-17',
'1998-06-18',
'1998-06-19',
'1998-06-22',
'1998-06-23',
'1998-06-24',
'1998-06-25',
'1998-06-26',
'1998-06-29',
'1998-06-30',
'1998-07-01',
'1998-07-02',
'1998-07-03',
'1998-07-06',
'1998-07-07',
'1998-07-08',
'1998-07-09',
'1998-07-10',
'1998-07-13',
'1998-07-14',
'1998-07-15',
'1998-07-16',
'1998-07-17',
'1998-07-20',
'1998-07-21',
'1998-07-22',
'1998-07-23',
'1998-07-24',
'1998-07-27',
'1998-07-28',
'1998-07-29',
'1998-07-30',
'1998-07-31',
'1998-08-03',
'1998-08-04',
'1998-08-05',
'1998-08-06',
'1998-08-07',
'1998-08-10',
'1998-08-11',
'1998-08-12',
'1998-08-13',
'1998-08-14',
'1998-08-17',
'1998-08-18',
'1998-08-19',
'1998-08-20',
'1998-08-21',
'1998-08-24',
'1998-08-25',
'1998-08-26',
'1998-08-27',
'1998-08-28',
'1998-08-31',
'1998-09-01',
'1998-09-02',
'1998-09-03',
'1998-09-04',
'1998-09-07',
'1998-09-08',
'1998-09-09',
'1998-09-10',
'1998-09-11',
'1998-09-14',
'1998-09-15',
'1998-09-16',
'1998-09-17',
'1998-09-18',
'1998-09-21',
'1998-09-22',
'1998-09-23',
'1998-09-24',
'1998-09-25',
'1998-09-28',
'1998-09-29',
'1998-09-30',
'1998-10-05',
'1998-10-06',
'1998-10-07',
'1998-10-08',
'1998-10-09',
'1998-10-12',
'1998-10-13',
'1998-10-14',
'1998-10-15',
'1998-10-16',
'1998-10-19',
'1998-10-20',
'1998-10-21',
'1998-10-22',
'1998-10-23',
'1998-10-26',
'1998-10-27',
'1998-10-28',
'1998-10-29',
'1998-10-30',
'1998-11-02',
'1998-11-03',
'1998-11-04',
'1998-11-05',
'1998-11-06',
'1998-11-09',
'1998-11-10',
'1998-11-11',
'1998-11-12',
'1998-11-13',
'1998-11-16',
'1998-11-17',
'1998-11-18',
'1998-11-19',
'1998-11-20',
'1998-11-23',
'1998-11-24',
'1998-11-25',
'1998-11-26',
'1998-11-27',
'1998-11-30',
'1998-12-01',
'1998-12-02',
'1998-12-03',
'1998-12-04',
'1998-12-07',
'1998-12-08',
'1998-12-09',
'1998-12-10',
'1998-12-11',
'1998-12-14',
'1998-12-15',
'1998-12-16',
'1998-12-17',
'1998-12-18',
'1998-12-21',
'1998-12-22',
'1998-12-23',
'1998-12-24',
'1998-12-25',
'1998-12-28',
'1998-12-29',
'1998-12-30',
'1998-12-31',
'1999-01-04',
'1999-01-05',
'1999-01-06',
'1999-01-07',
'1999-01-08',
'1999-01-11',
'1999-01-12',
'1999-01-13',
'1999-01-14',
'1999-01-15',
'1999-01-18',
'1999-01-19',
'1999-01-20',
'1999-01-21',
'1999-01-22',
'1999-01-25',
'1999-01-26',
'1999-01-27',
'1999-01-28',
'1999-01-29',
'1999-02-01',
'1999-02-02',
'1999-02-03',
'1999-02-04',
'1999-02-05',
'1999-02-08',
'1999-02-09',
'1999-03-01',
'1999-03-02',
'1999-03-03',
'1999-03-04',
'1999-03-05',
'1999-03-08',
'1999-03-09',
'1999-03-10',
'1999-03-11',
'1999-03-12',
'1999-03-15',
'1999-03-16',
'1999-03-17',
'1999-03-18',
'1999-03-19',
'1999-03-22',
'1999-03-23',
'1999-03-24',
'1999-03-25',
'1999-03-26',
'1999-03-29',
'1999-03-30',
'1999-03-31',
'1999-04-01',
'1999-04-02',
'1999-04-05',
'1999-04-06',
'1999-04-07',
'1999-04-08',
'1999-04-09',
'1999-04-12',
'1999-04-13',
'1999-04-14',
'1999-04-15',
'1999-04-16',
'1999-04-19',
'1999-04-20',
'1999-04-21',
'1999-04-22',
'1999-04-23',
'1999-04-26',
'1999-04-27',
'1999-04-28',
'1999-04-29',
'1999-04-30',
'1999-05-04',
'1999-05-05',
'1999-05-06',
'1999-05-07',
'1999-05-10',
'1999-05-11',
'1999-05-12',
'1999-05-13',
'1999-05-14',
'1999-05-17',
'1999-05-18',
'1999-05-19',
'1999-05-20',
'1999-05-21',
'1999-05-24',
'1999-05-25',
'1999-05-26',
'1999-05-27',
'1999-05-28',
'1999-05-31',
'1999-06-01',
'1999-06-02',
'1999-06-03',
'1999-06-04',
'1999-06-07',
'1999-06-08',
'1999-06-09',
'1999-06-10',
'1999-06-11',
'1999-06-14',
'1999-06-15',
'1999-06-16',
'1999-06-17',
'1999-06-18',
'1999-06-21',
'1999-06-22',
'1999-06-23',
'1999-06-24',
'1999-06-25',
'1999-06-28',
'1999-06-29',
'1999-06-30',
'1999-07-01',
'1999-07-02',
'1999-07-05',
'1999-07-06',
'1999-07-07',
'1999-07-08',
'1999-07-09',
'1999-07-12',
'1999-07-13',
'1999-07-14',
'1999-07-15',
'1999-07-16',
'1999-07-19',
'1999-07-20',
'1999-07-21',
'1999-07-22',
'1999-07-23',
'1999-07-26',
'1999-07-27',
'1999-07-28',
'1999-07-29',
'1999-07-30',
'1999-08-02',
'1999-08-03',
'1999-08-04',
'1999-08-05',
'1999-08-06',
'1999-08-09',
'1999-08-10',
'1999-08-11',
'1999-08-12',
'1999-08-13',
'1999-08-16',
'1999-08-17',
'1999-08-18',
'1999-08-19',
'1999-08-20',
'1999-08-23',
'1999-08-24',
'1999-08-25',
'1999-08-26',
'1999-08-27',
'1999-08-30',
'1999-08-31',
'1999-09-01',
'1999-09-02',
'1999-09-03',
'1999-09-06',
'1999-09-07',
'1999-09-08',
'1999-09-09',
'1999-09-10',
'1999-09-13',
'1999-09-14',
'1999-09-15',
'1999-09-16',
'1999-09-17',
'1999-09-20',
'1999-09-21',
'1999-09-22',
'1999-09-23',
'1999-09-24',
'1999-09-27',
'1999-09-28',
'1999-09-29',
'1999-09-30',
'1999-10-08',
'1999-10-11',
'1999-10-12',
'1999-10-13',
'1999-10-14',
'1999-10-15',
'1999-10-18',
'1999-10-19',
'1999-10-20',
'1999-10-21',
'1999-10-22',
'1999-10-25',
'1999-10-26',
'1999-10-27',
'1999-10-28',
'1999-10-29',
'1999-11-01',
'1999-11-02',
'1999-11-03',
'1999-11-04',
'1999-11-05',
'1999-11-08',
'1999-11-09',
'1999-11-10',
'1999-11-11',
'1999-11-12',
'1999-11-15',
'1999-11-16',
'1999-11-17',
'1999-11-18',
'1999-11-19',
'1999-11-22',
'1999-11-23',
'1999-11-24',
'1999-11-25',
'1999-11-26',
'1999-11-29',
'1999-11-30',
'1999-12-01',
'1999-12-02',
'1999-12-03',
'1999-12-06',
'1999-12-07',
'1999-12-08',
'1999-12-09',
'1999-12-10',
'1999-12-13',
'1999-12-14',
'1999-12-15',
'1999-12-16',
'1999-12-17',
'1999-12-21',
'1999-12-22',
'1999-12-23',
'1999-12-24',
'1999-12-27',
'1999-12-28',
'1999-12-29',
'1999-12-30',
'2000-01-04',
'2000-01-05',
'2000-01-06',
'2000-01-07',
'2000-01-10',
'2000-01-11',
'2000-01-12',
'2000-01-13',
'2000-01-14',
'2000-01-17',
'2000-01-18',
'2000-01-19',
'2000-01-20',
'2000-01-21',
'2000-01-24',
'2000-01-25',
'2000-01-26',
'2000-01-27',
'2000-01-28',
'2000-02-14',
'2000-02-15',
'2000-02-16',
'2000-02-17',
'2000-02-18',
'2000-02-21',
'2000-02-22',
'2000-02-23',
'2000-02-24',
'2000-02-25',
'2000-02-28',
'2000-02-29',
'2000-03-01',
'2000-03-02',
'2000-03-03',
'2000-03-06',
'2000-03-07',
'2000-03-08',
'2000-03-09',
'2000-03-10',
'2000-03-13',
'2000-03-14',
'2000-03-15',
'2000-03-16',
'2000-03-17',
'2000-03-20',
'2000-03-21',
'2000-03-22',
'2000-03-23',
'2000-03-24',
'2000-03-27',
'2000-03-28',
'2000-03-29',
'2000-03-30',
'2000-03-31',
'2000-04-03',
'2000-04-04',
'2000-04-05',
'2000-04-06',
'2000-04-07',
'2000-04-10',
'2000-04-11',
'2000-04-12',
'2000-04-13',
'2000-04-14',
'2000-04-17',
'2000-04-18',
'2000-04-19',
'2000-04-20',
'2000-04-21',
'2000-04-24',
'2000-04-25',
'2000-04-26',
'2000-04-27',
'2000-04-28',
'2000-05-08',
'2000-05-09',
'2000-05-10',
'2000-05-11',
'2000-05-12',
'2000-05-15',
'2000-05-16',
'2000-05-17',
'2000-05-18',
'2000-05-19',
'2000-05-22',
'2000-05-23',
'2000-05-24',
'2000-05-25',
'2000-05-26',
'2000-05-29',
'2000-05-30',
'2000-05-31',
'2000-06-01',
'2000-06-02',
'2000-06-05',
'2000-06-06',
'2000-06-07',
'2000-06-08',
'2000-06-09',
'2000-06-12',
'2000-06-13',
'2000-06-14',
'2000-06-15',
'2000-06-16',
'2000-06-19',
'2000-06-20',
'2000-06-21',
'2000-06-22',
'2000-06-23',
'2000-06-26',
'2000-06-27',
'2000-06-28',
'2000-06-29',
'2000-06-30',
'2000-07-03',
'2000-07-04',
'2000-07-05',
'2000-07-06',
'2000-07-07',
'2000-07-10',
'2000-07-11',
'2000-07-12',
'2000-07-13',
'2000-07-14',
'2000-07-17',
'2000-07-18',
'2000-07-19',
'2000-07-20',
'2000-07-21',
'2000-07-24',
'2000-07-25',
'2000-07-26',
'2000-07-27',
'2000-07-28',
'2000-07-31',
'2000-08-01',
'2000-08-02',
'2000-08-03',
'2000-08-04',
'2000-08-07',
'2000-08-08',
'2000-08-09',
'2000-08-10',
'2000-08-11',
'2000-08-14',
'2000-08-15',
'2000-08-16',
'2000-08-17',
'2000-08-18',
'2000-08-21',
'2000-08-22',
'2000-08-23',
'2000-08-24',
'2000-08-25',
'2000-08-28',
'2000-08-29',
'2000-08-30',
'2000-08-31',
'2000-09-01',
'2000-09-04',
'2000-09-05',
'2000-09-06',
'2000-09-07',
'2000-09-08',
'2000-09-11',
'2000-09-12',
'2000-09-13',
'2000-09-14',
'2000-09-15',
'2000-09-18',
'2000-09-19',
'2000-09-20',
'2000-09-21',
'2000-09-22',
'2000-09-25',
'2000-09-26',
'2000-09-27',
'2000-09-28',
'2000-09-29',
'2000-10-09',
'2000-10-10',
'2000-10-11',
'2000-10-12',
'2000-10-13',
'2000-10-16',
'2000-10-17',
'2000-10-18',
'2000-10-19',
'2000-10-20',
'2000-10-23',
'2000-10-24',
'2000-10-25',
'2000-10-26',
'2000-10-27',
'2000-10-30',
'2000-10-31',
'2000-11-01',
'2000-11-02',
'2000-11-03',
'2000-11-06',
'2000-11-07',
'2000-11-08',
'2000-11-09',
'2000-11-10',
'2000-11-13',
'2000-11-14',
'2000-11-15',
'2000-11-16',
'2000-11-17',
'2000-11-20',
'2000-11-21',
'2000-11-22',
'2000-11-23',
'2000-11-24',
'2000-11-27',
'2000-11-28',
'2000-11-29',
'2000-11-30',
'2000-12-01',
'2000-12-04',
'2000-12-05',
'2000-12-06',
'2000-12-07',
'2000-12-08',
'2000-12-11',
'2000-12-12',
'2000-12-13',
'2000-12-14',
'2000-12-15',
'2000-12-18',
'2000-12-19',
'2000-12-20',
'2000-12-21',
'2000-12-22',
'2000-12-25',
'2000-12-26',
'2000-12-27',
'2000-12-28',
'2000-12-29',
'2001-01-02',
'2001-01-03',
'2001-01-04',
'2001-01-05',
'2001-01-08',
'2001-01-09',
'2001-01-10',
'2001-01-11',
'2001-01-12',
'2001-01-15',
'2001-01-16',
'2001-01-17',
'2001-01-18',
'2001-01-19',
'2001-02-05',
'2001-02-06',
'2001-02-07',
'2001-02-08',
'2001-02-09',
'2001-02-12',
'2001-02-13',
'2001-02-14',
'2001-02-15',
'2001-02-16',
'2001-02-19',
'2001-02-20',
'2001-02-21',
'2001-02-22',
'2001-02-23',
'2001-02-26',
'2001-02-27',
'2001-02-28',
'2001-03-01',
'2001-03-02',
'2001-03-05',
'2001-03-06',
'2001-03-07',
'2001-03-08',
'2001-03-09',
'2001-03-12',
'2001-03-13',
'2001-03-14',
'2001-03-15',
'2001-03-16',
'2001-03-19',
'2001-03-20',
'2001-03-21',
'2001-03-22',
'2001-03-23',
'2001-03-26',
'2001-03-27',
'2001-03-28',
'2001-03-29',
'2001-03-30',
'2001-04-02',
'2001-04-03',
'2001-04-04',
'2001-04-05',
'2001-04-06',
'2001-04-09',
'2001-04-10',
'2001-04-11',
'2001-04-12',
'2001-04-13',
'2001-04-16',
'2001-04-17',
'2001-04-18',
'2001-04-19',
'2001-04-20',
'2001-04-23',
'2001-04-24',
'2001-04-25',
'2001-04-26',
'2001-04-27',
'2001-04-30',
'2001-05-08',
'2001-05-09',
'2001-05-10',
'2001-05-11',
'2001-05-14',
'2001-05-15',
'2001-05-16',
'2001-05-17',
'2001-05-18',
'2001-05-21',
'2001-05-22',
'2001-05-23',
'2001-05-24',
'2001-05-25',
'2001-05-28',
'2001-05-29',
'2001-05-30',
'2001-05-31',
'2001-06-01',
'2001-06-04',
'2001-06-05',
'2001-06-06',
'2001-06-07',
'2001-06-08',
'2001-06-11',
'2001-06-12',
'2001-06-13',
'2001-06-14',
'2001-06-15',
'2001-06-18',
'2001-06-19',
'2001-06-20',
'2001-06-21',
'2001-06-22',
'2001-06-25',
'2001-06-26',
'2001-06-27',
'2001-06-28',
'2001-06-29',
'2001-07-02',
'2001-07-03',
'2001-07-04',
'2001-07-05',
'2001-07-06',
'2001-07-09',
'2001-07-10',
'2001-07-11',
'2001-07-12',
'2001-07-13',
'2001-07-16',
'2001-07-17',
'2001-07-18',
'2001-07-19',
'2001-07-20',
'2001-07-23',
'2001-07-24',
'2001-07-25',
'2001-07-26',
'2001-07-27',
'2001-07-30',
'2001-07-31',
'2001-08-01',
'2001-08-02',
'2001-08-03',
'2001-08-06',
'2001-08-07',
'2001-08-08',
'2001-08-09',
'2001-08-10',
'2001-08-13',
'2001-08-14',
'2001-08-15',
'2001-08-16',
'2001-08-17',
'2001-08-20',
'2001-08-21',
'2001-08-22',
'2001-08-23',
'2001-08-24',
'2001-08-27',
'2001-08-28',
'2001-08-29',
'2001-08-30',
'2001-08-31',
'2001-09-03',
'2001-09-04',
'2001-09-05',
'2001-09-06',
'2001-09-07',
'2001-09-10',
'2001-09-11',
'2001-09-12',
'2001-09-13',
'2001-09-14',
'2001-09-17',
'2001-09-18',
'2001-09-19',
'2001-09-20',
'2001-09-21',
'2001-09-24',
'2001-09-25',
'2001-09-26',
'2001-09-27',
'2001-09-28',
'2001-10-08',
'2001-10-09',
'2001-10-10',
'2001-10-11',
'2001-10-12',
'2001-10-15',
'2001-10-16',
'2001-10-17',
'2001-10-18',
'2001-10-19',
'2001-10-22',
'2001-10-23',
'2001-10-24',
'2001-10-25',
'2001-10-26',
'2001-10-29',
'2001-10-30',
'2001-10-31',
'2001-11-01',
'2001-11-02',
'2001-11-05',
'2001-11-06',
'2001-11-07',
'2001-11-08',
'2001-11-09',
'2001-11-12',
'2001-11-13',
'2001-11-14',
'2001-11-15',
'2001-11-16',
'2001-11-19',
'2001-11-20',
'2001-11-21',
'2001-11-22',
'2001-11-23',
'2001-11-26',
'2001-11-27',
'2001-11-28',
'2001-11-29',
'2001-11-30',
'2001-12-03',
'2001-12-04',
'2001-12-05',
'2001-12-06',
'2001-12-07',
'2001-12-10',
'2001-12-11',
'2001-12-12',
'2001-12-13',
'2001-12-14',
'2001-12-17',
'2001-12-18',
'2001-12-19',
'2001-12-20',
'2001-12-21',
'2001-12-24',
'2001-12-25',
'2001-12-26',
'2001-12-27',
'2001-12-28',
'2001-12-31',
'2002-01-04',
'2002-01-07',
'2002-01-08',
'2002-01-09',
'2002-01-10',
'2002-01-11',
'2002-01-14',
'2002-01-15',
'2002-01-16',
'2002-01-17',
'2002-01-18',
'2002-01-21',
'2002-01-22',
'2002-01-23',
'2002-01-24',
'2002-01-25',
'2002-01-28',
'2002-01-29',
'2002-01-30',
'2002-01-31',
'2002-02-01',
'2002-02-04',
'2002-02-05',
'2002-02-06',
'2002-02-07',
'2002-02-08',
'2002-02-25',
'2002-02-26',
'2002-02-27',
'2002-02-28',
'2002-03-01',
'2002-03-04',
'2002-03-05',
'2002-03-06',
'2002-03-07',
'2002-03-08',
'2002-03-11',
'2002-03-12',
'2002-03-13',
'2002-03-14',
'2002-03-15',
'2002-03-18',
'2002-03-19',
'2002-03-20',
'2002-03-21',
'2002-03-22',
'2002-03-25',
'2002-03-26',
'2002-03-27',
'2002-03-28',
'2002-03-29',
'2002-04-01',
'2002-04-02',
'2002-04-03',
'2002-04-04',
'2002-04-05',
'2002-04-08',
'2002-04-09',
'2002-04-10',
'2002-04-11',
'2002-04-12',
'2002-04-15',
'2002-04-16',
'2002-04-17',
'2002-04-18',
'2002-04-19',
'2002-04-22',
'2002-04-23',
'2002-04-24',
'2002-04-25',
'2002-04-26',
'2002-04-29',
'2002-04-30',
'2002-05-08',
'2002-05-09',
'2002-05-10',
'2002-05-13',
'2002-05-14',
'2002-05-15',
'2002-05-16',
'2002-05-17',
'2002-05-20',
'2002-05-21',
'2002-05-22',
'2002-05-23',
'2002-05-24',
'2002-05-27',
'2002-05-28',
'2002-05-29',
'2002-05-30',
'2002-05-31',
'2002-06-03',
'2002-06-04',
'2002-06-05',
'2002-06-06',
'2002-06-07',
'2002-06-10',
'2002-06-11',
'2002-06-12',
'2002-06-13',
'2002-06-14',
'2002-06-17',
'2002-06-18',
'2002-06-19',
'2002-06-20',
'2002-06-21',
'2002-06-24',
'2002-06-25',
'2002-06-26',
'2002-06-27',
'2002-06-28',
'2002-07-01',
'2002-07-02',
'2002-07-03',
'2002-07-04',
'2002-07-05',
'2002-07-08',
'2002-07-09',
'2002-07-10',
'2002-07-11',
'2002-07-12',
'2002-07-15',
'2002-07-16',
'2002-07-17',
'2002-07-18',
'2002-07-19',
'2002-07-22',
'2002-07-23',
'2002-07-24',
'2002-07-25',
'2002-07-26',
'2002-07-29',
'2002-07-30',
'2002-07-31',
'2002-08-01',
'2002-08-02',
'2002-08-05',
'2002-08-06',
'2002-08-07',
'2002-08-08',
'2002-08-09',
'2002-08-12',
'2002-08-13',
'2002-08-14',
'2002-08-15',
'2002-08-16',
'2002-08-19',
'2002-08-20',
'2002-08-21',
'2002-08-22',
'2002-08-23',
'2002-08-26',
'2002-08-27',
'2002-08-28',
'2002-08-29',
'2002-08-30',
'2002-09-02',
'2002-09-03',
'2002-09-04',
'2002-09-05',
'2002-09-06',
'2002-09-09',
'2002-09-10',
'2002-09-11',
'2002-09-12',
'2002-09-13',
'2002-09-16',
'2002-09-17',
'2002-09-18',
'2002-09-19',
'2002-09-20',
'2002-09-23',
'2002-09-24',
'2002-09-25',
'2002-09-26',
'2002-09-27',
'2002-10-08',
'2002-10-09',
'2002-10-10',
'2002-10-11',
'2002-10-14',
'2002-10-15',
'2002-10-16',
'2002-10-17',
'2002-10-18',
'2002-10-21',
'2002-10-22',
'2002-10-23',
'2002-10-24',
'2002-10-25',
'2002-10-28',
'2002-10-29',
'2002-10-30',
'2002-10-31',
'2002-11-01',
'2002-11-04',
'2002-11-05',
'2002-11-06',
'2002-11-07',
'2002-11-08',
'2002-11-11',
'2002-11-12',
'2002-11-13',
'2002-11-14',
'2002-11-15',
'2002-11-18',
'2002-11-19',
'2002-11-20',
'2002-11-21',
'2002-11-22',
'2002-11-25',
'2002-11-26',
'2002-11-27',
'2002-11-28',
'2002-11-29',
'2002-12-02',
'2002-12-03',
'2002-12-04',
'2002-12-05',
'2002-12-06',
'2002-12-09',
'2002-12-10',
'2002-12-11',
'2002-12-12',
'2002-12-13',
'2002-12-16',
'2002-12-17',
'2002-12-18',
'2002-12-19',
'2002-12-20',
'2002-12-23',
'2002-12-24',
'2002-12-25',
'2002-12-26',
'2002-12-27',
'2002-12-30',
'2002-12-31',
'2003-01-02',
'2003-01-03',
'2003-01-06',
'2003-01-07',
'2003-01-08',
'2003-01-09',
'2003-01-10',
'2003-01-13',
'2003-01-14',
'2003-01-15',
'2003-01-16',
'2003-01-17',
'2003-01-20',
'2003-01-21',
'2003-01-22',
'2003-01-23',
'2003-01-24',
'2003-01-27',
'2003-01-28',
'2003-01-29',
'2003-02-10',
'2003-02-11',
'2003-02-12',
'2003-02-13',
'2003-02-14',
'2003-02-17',
'2003-02-18',
'2003-02-19',
'2003-02-20',
'2003-02-21',
'2003-02-24',
'2003-02-25',
'2003-02-26',
'2003-02-27',
'2003-02-28',
'2003-03-03',
'2003-03-04',
'2003-03-05',
'2003-03-06',
'2003-03-07',
'2003-03-10',
'2003-03-11',
'2003-03-12',
'2003-03-13',
'2003-03-14',
'2003-03-17',
'2003-03-18',
'2003-03-19',
'2003-03-20',
'2003-03-21',
'2003-03-24',
'2003-03-25',
'2003-03-26',
'2003-03-27',
'2003-03-28',
'2003-03-31',
'2003-04-01',
'2003-04-02',
'2003-04-03',
'2003-04-04',
'2003-04-07',
'2003-04-08',
'2003-04-09',
'2003-04-10',
'2003-04-11',
'2003-04-14',
'2003-04-15',
'2003-04-16',
'2003-04-17',
'2003-04-18',
'2003-04-21',
'2003-04-22',
'2003-04-23',
'2003-04-24',
'2003-04-25',
'2003-04-28',
'2003-04-29',
'2003-04-30',
'2003-05-12',
'2003-05-13',
'2003-05-14',
'2003-05-15',
'2003-05-16',
'2003-05-19',
'2003-05-20',
'2003-05-21',
'2003-05-22',
'2003-05-23',
'2003-05-26',
'2003-05-27',
'2003-05-28',
'2003-05-29',
'2003-05-30',
'2003-06-02',
'2003-06-03',
'2003-06-04',
'2003-06-05',
'2003-06-06',
'2003-06-09',
'2003-06-10',
'2003-06-11',
'2003-06-12',
'2003-06-13',
'2003-06-16',
'2003-06-17',
'2003-06-18',
'2003-06-19',
'2003-06-20',
'2003-06-23',
'2003-06-24',
'2003-06-25',
'2003-06-26',
'2003-06-27',
'2003-06-30',
'2003-07-01',
'2003-07-02',
'2003-07-03',
'2003-07-04',
'2003-07-07',
'2003-07-08',
'2003-07-09',
'2003-07-10',
'2003-07-11',
'2003-07-14',
'2003-07-15',
'2003-07-16',
'2003-07-17',
'2003-07-18',
'2003-07-21',
'2003-07-22',
'2003-07-23',
'2003-07-24',
'2003-07-25',
'2003-07-28',
'2003-07-29',
'2003-07-30',
'2003-07-31',
'2003-08-01',
'2003-08-04',
'2003-08-05',
'2003-08-06',
'2003-08-07',
'2003-08-08',
'2003-08-11',
'2003-08-12',
'2003-08-13',
'2003-08-14',
'2003-08-15',
'2003-08-18',
'2003-08-19',
'2003-08-20',
'2003-08-21',
'2003-08-22',
'2003-08-25',
'2003-08-26',
'2003-08-27',
'2003-08-28',
'2003-08-29',
'2003-09-01',
'2003-09-02',
'2003-09-03',
'2003-09-04',
'2003-09-05',
'2003-09-08',
'2003-09-09',
'2003-09-10',
'2003-09-11',
'2003-09-12',
'2003-09-15',
'2003-09-16',
'2003-09-17',
'2003-09-18',
'2003-09-19',
'2003-09-22',
'2003-09-23',
'2003-09-24',
'2003-09-25',
'2003-09-26',
'2003-09-29',
'2003-09-30',
'2003-10-08',
'2003-10-09',
'2003-10-10',
'2003-10-13',
'2003-10-14',
'2003-10-15',
'2003-10-16',
'2003-10-17',
'2003-10-20',
'2003-10-21',
'2003-10-22',
'2003-10-23',
'2003-10-24',
'2003-10-27',
'2003-10-28',
'2003-10-29',
'2003-10-30',
'2003-10-31',
'2003-11-03',
'2003-11-04',
'2003-11-05',
'2003-11-06',
'2003-11-07',
'2003-11-10',
'2003-11-11',
'2003-11-12',
'2003-11-13',
'2003-11-14',
'2003-11-17',
'2003-11-18',
'2003-11-19',
'2003-11-20',
'2003-11-21',
'2003-11-24',
'2003-11-25',
'2003-11-26',
'2003-11-27',
'2003-11-28',
'2003-12-01',
'2003-12-02',
'2003-12-03',
'2003-12-04',
'2003-12-05',
'2003-12-08',
'2003-12-09',
'2003-12-10',
'2003-12-11',
'2003-12-12',
'2003-12-15',
'2003-12-16',
'2003-12-17',
'2003-12-18',
'2003-12-19',
'2003-12-22',
'2003-12-23',
'2003-12-24',
'2003-12-25',
'2003-12-26',
'2003-12-29',
'2003-12-30',
'2003-12-31',
'2004-01-02',
'2004-01-05',
'2004-01-06',
'2004-01-07',
'2004-01-08',
'2004-01-09',
'2004-01-12',
'2004-01-13',
'2004-01-14',
'2004-01-15',
'2004-01-16',
'2004-01-29',
'2004-01-30',
'2004-02-02',
'2004-02-03',
'2004-02-04',
'2004-02-05',
'2004-02-06',
'2004-02-09',
'2004-02-10',
'2004-02-11',
'2004-02-12',
'2004-02-13',
'2004-02-16',
'2004-02-17',
'2004-02-18',
'2004-02-19',
'2004-02-20',
'2004-02-23',
'2004-02-24',
'2004-02-25',
'2004-02-26',
'2004-02-27',
'2004-03-01',
'2004-03-02',
'2004-03-03',
'2004-03-04',
'2004-03-05',
'2004-03-08',
'2004-03-09',
'2004-03-10',
'2004-03-11',
'2004-03-12',
'2004-03-15',
'2004-03-16',
'2004-03-17',
'2004-03-18',
'2004-03-19',
'2004-03-22',
'2004-03-23',
'2004-03-24',
'2004-03-25',
'2004-03-26',
'2004-03-29',
'2004-03-30',
'2004-03-31',
'2004-04-01',
'2004-04-02',
'2004-04-05',
'2004-04-06',
'2004-04-07',
'2004-04-08',
'2004-04-09',
'2004-04-12',
'2004-04-13',
'2004-04-14',
'2004-04-15',
'2004-04-16',
'2004-04-19',
'2004-04-20',
'2004-04-21',
'2004-04-22',
'2004-04-23',
'2004-04-26',
'2004-04-27',
'2004-04-28',
'2004-04-29',
'2004-04-30',
'2004-05-10',
'2004-05-11',
'2004-05-12',
'2004-05-13',
'2004-05-14',
'2004-05-17',
'2004-05-18',
'2004-05-19',
'2004-05-20',
'2004-05-21',
'2004-05-24',
'2004-05-25',
'2004-05-26',
'2004-05-27',
'2004-05-28',
'2004-05-31',
'2004-06-01',
'2004-06-02',
'2004-06-03',
'2004-06-04',
'2004-06-07',
'2004-06-08',
'2004-06-09',
'2004-06-10',
'2004-06-11',
'2004-06-14',
'2004-06-15',
'2004-06-16',
'2004-06-17',
'2004-06-18',
'2004-06-21',
'2004-06-22',
'2004-06-23',
'2004-06-24',
'2004-06-25',
'2004-06-28',
'2004-06-29',
'2004-06-30',
'2004-07-01',
'2004-07-02',
'2004-07-05',
'2004-07-06',
'2004-07-07',
'2004-07-08',
'2004-07-09',
'2004-07-12',
'2004-07-13',
'2004-07-14',
'2004-07-15',
'2004-07-16',
'2004-07-19',
'2004-07-20',
'2004-07-21',
'2004-07-22',
'2004-07-23',
'2004-07-26',
'2004-07-27',
'2004-07-28',
'2004-07-29',
'2004-07-30',
'2004-08-02',
'2004-08-03',
'2004-08-04',
'2004-08-05',
'2004-08-06',
'2004-08-09',
'2004-08-10',
'2004-08-11',
'2004-08-12',
'2004-08-13',
'2004-08-16',
'2004-08-17',
'2004-08-18',
'2004-08-19',
'2004-08-20',
'2004-08-23',
'2004-08-24',
'2004-08-25',
'2004-08-26',
'2004-08-27',
'2004-08-30',
'2004-08-31',
'2004-09-01',
'2004-09-02',
'2004-09-03',
'2004-09-06',
'2004-09-07',
'2004-09-08',
'2004-09-09',
'2004-09-10',
'2004-09-13',
'2004-09-14',
'2004-09-15',
'2004-09-16',
'2004-09-17',
'2004-09-20',
'2004-09-21',
'2004-09-22',
'2004-09-23',
'2004-09-24',
'2004-09-27',
'2004-09-28',
'2004-09-29',
'2004-09-30',
'2004-10-08',
'2004-10-11',
'2004-10-12',
'2004-10-13',
'2004-10-14',
'2004-10-15',
'2004-10-18',
'2004-10-19',
'2004-10-20',
'2004-10-21',
'2004-10-22',
'2004-10-25',
'2004-10-26',
'2004-10-27',
'2004-10-28',
'2004-10-29',
'2004-11-01',
'2004-11-02',
'2004-11-03',
'2004-11-04',
'2004-11-05',
'2004-11-08',
'2004-11-09',
'2004-11-10',
'2004-11-11',
'2004-11-12',
'2004-11-15',
'2004-11-16',
'2004-11-17',
'2004-11-18',
'2004-11-19',
'2004-11-22',
'2004-11-23',
'2004-11-24',
'2004-11-25',
'2004-11-26',
'2004-11-29',
'2004-11-30',
'2004-12-01',
'2004-12-02',
'2004-12-03',
'2004-12-06',
'2004-12-07',
'2004-12-08',
'2004-12-09',
'2004-12-10',
'2004-12-13',
'2004-12-14',
'2004-12-15',
'2004-12-16',
'2004-12-17',
'2004-12-20',
'2004-12-21',
'2004-12-22',
'2004-12-23',
'2004-12-24',
'2004-12-27',
'2004-12-28',
'2004-12-29',
'2004-12-30',
'2004-12-31',
'2005-01-04',
'2005-01-05',
'2005-01-06',
'2005-01-07',
'2005-01-10',
'2005-01-11',
'2005-01-12',
'2005-01-13',
'2005-01-14',
'2005-01-17',
'2005-01-18',
'2005-01-19',
'2005-01-20',
'2005-01-21',
'2005-01-24',
'2005-01-25',
'2005-01-26',
'2005-01-27',
'2005-01-28',
'2005-01-31',
'2005-02-01',
'2005-02-02',
'2005-02-03',
'2005-02-04',
'2005-02-16',
'2005-02-17',
'2005-02-18',
'2005-02-21',
'2005-02-22',
'2005-02-23',
'2005-02-24',
'2005-02-25',
'2005-02-28',
'2005-03-01',
'2005-03-02',
'2005-03-03',
'2005-03-04',
'2005-03-07',
'2005-03-08',
'2005-03-09',
'2005-03-10',
'2005-03-11',
'2005-03-14',
'2005-03-15',
'2005-03-16',
'2005-03-17',
'2005-03-18',
'2005-03-21',
'2005-03-22',
'2005-03-23',
'2005-03-24',
'2005-03-25',
'2005-03-28',
'2005-03-29',
'2005-03-30',
'2005-03-31',
'2005-04-01',
'2005-04-04',
'2005-04-05',
'2005-04-06',
'2005-04-07',
'2005-04-08',
'2005-04-11',
'2005-04-12',
'2005-04-13',
'2005-04-14',
'2005-04-15',
'2005-04-18',
'2005-04-19',
'2005-04-20',
'2005-04-21',
'2005-04-22',
'2005-04-25',
'2005-04-26',
'2005-04-27',
'2005-04-28',
'2005-04-29',
'2005-05-09',
'2005-05-10',
'2005-05-11',
'2005-05-12',
'2005-05-13',
'2005-05-16',
'2005-05-17',
'2005-05-18',
'2005-05-19',
'2005-05-20',
'2005-05-23',
'2005-05-24',
'2005-05-25',
'2005-05-26',
'2005-05-27',
'2005-05-30',
'2005-05-31',
'2005-06-01',
'2005-06-02',
'2005-06-03',
'2005-06-06',
'2005-06-07',
'2005-06-08',
'2005-06-09',
'2005-06-10',
'2005-06-13',
'2005-06-14',
'2005-06-15',
'2005-06-16',
'2005-06-17',
'2005-06-20',
'2005-06-21',
'2005-06-22',
'2005-06-23',
'2005-06-24',
'2005-06-27',
'2005-06-28',
'2005-06-29',
'2005-06-30',
'2005-07-01',
'2005-07-04',
'2005-07-05',
'2005-07-06',
'2005-07-07',
'2005-07-08',
'2005-07-11',
'2005-07-12',
'2005-07-13',
'2005-07-14',
'2005-07-15',
'2005-07-18',
'2005-07-19',
'2005-07-20',
'2005-07-21',
'2005-07-22',
'2005-07-25',
'2005-07-26',
'2005-07-27',
'2005-07-28',
'2005-07-29',
'2005-08-01',
'2005-08-02',
'2005-08-03',
'2005-08-04',
'2005-08-05',
'2005-08-08',
'2005-08-09',
'2005-08-10',
'2005-08-11',
'2005-08-12',
'2005-08-15',
'2005-08-16',
'2005-08-17',
'2005-08-18',
'2005-08-19',
'2005-08-22',
'2005-08-23',
'2005-08-24',
'2005-08-25',
'2005-08-26',
'2005-08-29',
'2005-08-30',
'2005-08-31',
'2005-09-01',
'2005-09-02',
'2005-09-05',
'2005-09-06',
'2005-09-07',
'2005-09-08',
'2005-09-09',
'2005-09-12',
'2005-09-13',
'2005-09-14',
'2005-09-15',
'2005-09-16',
'2005-09-19',
'2005-09-20',
'2005-09-21',
'2005-09-22',
'2005-09-23',
'2005-09-26',
'2005-09-27',
'2005-09-28',
'2005-09-29',
'2005-09-30',
'2005-10-10',
'2005-10-11',
'2005-10-12',
'2005-10-13',
'2005-10-14',
'2005-10-17',
'2005-10-18',
'2005-10-19',
'2005-10-20',
'2005-10-21',
'2005-10-24',
'2005-10-25',
'2005-10-26',
'2005-10-27',
'2005-10-28',
'2005-10-31',
'2005-11-01',
'2005-11-02',
'2005-11-03',
'2005-11-04',
'2005-11-07',
'2005-11-08',
'2005-11-09',
'2005-11-10',
'2005-11-11',
'2005-11-14',
'2005-11-15',
'2005-11-16',
'2005-11-17',
'2005-11-18',
'2005-11-21',
'2005-11-22',
'2005-11-23',
'2005-11-24',
'2005-11-25',
'2005-11-28',
'2005-11-29',
'2005-11-30',
'2005-12-01',
'2005-12-02',
'2005-12-05',
'2005-12-06',
'2005-12-07',
'2005-12-08',
'2005-12-09',
'2005-12-12',
'2005-12-13',
'2005-12-14',
'2005-12-15',
'2005-12-16',
'2005-12-19',
'2005-12-20',
'2005-12-21',
'2005-12-22',
'2005-12-23',
'2005-12-26',
'2005-12-27',
'2005-12-28',
'2005-12-29',
'2005-12-30',
'2006-01-04',
'2006-01-05',
'2006-01-06',
'2006-01-09',
'2006-01-10',
'2006-01-11',
'2006-01-12',
'2006-01-13',
'2006-01-16',
'2006-01-17',
'2006-01-18',
'2006-01-19',
'2006-01-20',
'2006-01-23',
'2006-01-24',
'2006-01-25',
'2006-02-06',
'2006-02-07',
'2006-02-08',
'2006-02-09',
'2006-02-10',
'2006-02-13',
'2006-02-14',
'2006-02-15',
'2006-02-16',
'2006-02-17',
'2006-02-20',
'2006-02-21',
'2006-02-22',
'2006-02-23',
'2006-02-24',
'2006-02-27',
'2006-02-28',
'2006-03-01',
'2006-03-02',
'2006-03-03',
'2006-03-06',
'2006-03-07',
'2006-03-08',
'2006-03-09',
'2006-03-10',
'2006-03-13',
'2006-03-14',
'2006-03-15',
'2006-03-16',
'2006-03-17',
'2006-03-20',
'2006-03-21',
'2006-03-22',
'2006-03-23',
'2006-03-24',
'2006-03-27',
'2006-03-28',
'2006-03-29',
'2006-03-30',
'2006-03-31',
'2006-04-03',
'2006-04-04',
'2006-04-05',
'2006-04-06',
'2006-04-07',
'2006-04-10',
'2006-04-11',
'2006-04-12',
'2006-04-13',
'2006-04-14',
'2006-04-17',
'2006-04-18',
'2006-04-19',
'2006-04-20',
'2006-04-21',
'2006-04-24',
'2006-04-25',
'2006-04-26',
'2006-04-27',
'2006-04-28',
'2006-05-08',
'2006-05-09',
'2006-05-10',
'2006-05-11',
'2006-05-12',
'2006-05-15',
'2006-05-16',
'2006-05-17',
'2006-05-18',
'2006-05-19',
'2006-05-22',
'2006-05-23',
'2006-05-24',
'2006-05-25',
'2006-05-26',
'2006-05-29',
'2006-05-30',
'2006-05-31',
'2006-06-01',
'2006-06-02',
'2006-06-05',
'2006-06-06',
'2006-06-07',
'2006-06-08',
'2006-06-09',
'2006-06-12',
'2006-06-13',
'2006-06-14',
'2006-06-15',
'2006-06-16',
'2006-06-19',
'2006-06-20',
'2006-06-21',
'2006-06-22',
'2006-06-23',
'2006-06-26',
'2006-06-27',
'2006-06-28',
'2006-06-29',
'2006-06-30',
'2006-07-03',
'2006-07-04',
'2006-07-05',
'2006-07-06',
'2006-07-07',
'2006-07-10',
'2006-07-11',
'2006-07-12',
'2006-07-13',
'2006-07-14',
'2006-07-17',
'2006-07-18',
'2006-07-19',
'2006-07-20',
'2006-07-21',
'2006-07-24',
'2006-07-25',
'2006-07-26',
'2006-07-27',
'2006-07-28',
'2006-07-31',
'2006-08-01',
'2006-08-02',
'2006-08-03',
'2006-08-04',
'2006-08-07',
'2006-08-08',
'2006-08-09',
'2006-08-10',
'2006-08-11',
'2006-08-14',
'2006-08-15',
'2006-08-16',
'2006-08-17',
'2006-08-18',
'2006-08-21',
'2006-08-22',
'2006-08-23',
'2006-08-24',
'2006-08-25',
'2006-08-28',
'2006-08-29',
'2006-08-30',
'2006-08-31',
'2006-09-01',
'2006-09-04',
'2006-09-05',
'2006-09-06',
'2006-09-07',
'2006-09-08',
'2006-09-11',
'2006-09-12',
'2006-09-13',
'2006-09-14',
'2006-09-15',
'2006-09-18',
'2006-09-19',
'2006-09-20',
'2006-09-21',
'2006-09-22',
'2006-09-25',
'2006-09-26',
'2006-09-27',
'2006-09-28',
'2006-09-29',
'2006-10-09',
'2006-10-10',
'2006-10-11',
'2006-10-12',
'2006-10-13',
'2006-10-16',
'2006-10-17',
'2006-10-18',
'2006-10-19',
'2006-10-20',
'2006-10-23',
'2006-10-24',
'2006-10-25',
'2006-10-26',
'2006-10-27',
'2006-10-30',
'2006-10-31',
'2006-11-01',
'2006-11-02',
'2006-11-03',
'2006-11-06',
'2006-11-07',
'2006-11-08',
'2006-11-09',
'2006-11-10',
'2006-11-13',
'2006-11-14',
'2006-11-15',
'2006-11-16',
'2006-11-17',
'2006-11-20',
'2006-11-21',
'2006-11-22',
'2006-11-23',
'2006-11-24',
'2006-11-27',
'2006-11-28',
'2006-11-29',
'2006-11-30',
'2006-12-01',
'2006-12-04',
'2006-12-05',
'2006-12-06',
'2006-12-07',
'2006-12-08',
'2006-12-11',
'2006-12-12',
'2006-12-13',
'2006-12-14',
'2006-12-15',
'2006-12-18',
'2006-12-19',
'2006-12-20',
'2006-12-21',
'2006-12-22',
'2006-12-25',
'2006-12-26',
'2006-12-27',
'2006-12-28',
'2006-12-29',
'2007-01-04',
'2007-01-05',
'2007-01-08',
'2007-01-09',
'2007-01-10',
'2007-01-11',
'2007-01-12',
'2007-01-15',
'2007-01-16',
'2007-01-17',
'2007-01-18',
'2007-01-19',
'2007-01-22',
'2007-01-23',
'2007-01-24',
'2007-01-25',
'2007-01-26',
'2007-01-29',
'2007-01-30',
'2007-01-31',
'2007-02-01',
'2007-02-02',
'2007-02-05',
'2007-02-06',
'2007-02-07',
'2007-02-08',
'2007-02-09',
'2007-02-12',
'2007-02-13',
'2007-02-14',
'2007-02-15',
'2007-02-16',
'2007-02-26',
'2007-02-27',
'2007-02-28',
'2007-03-01',
'2007-03-02',
'2007-03-05',
'2007-03-06',
'2007-03-07',
'2007-03-08',
'2007-03-09',
'2007-03-12',
'2007-03-13',
'2007-03-14',
'2007-03-15',
'2007-03-16',
'2007-03-19',
'2007-03-20',
'2007-03-21',
'2007-03-22',
'2007-03-23',
'2007-03-26',
'2007-03-27',
'2007-03-28',
'2007-03-29',
'2007-03-30',
'2007-04-02',
'2007-04-03',
'2007-04-04',
'2007-04-05',
'2007-04-06',
'2007-04-09',
'2007-04-10',
'2007-04-11',
'2007-04-12',
'2007-04-13',
'2007-04-16',
'2007-04-17',
'2007-04-18',
'2007-04-19',
'2007-04-20',
'2007-04-23',
'2007-04-24',
'2007-04-25',
'2007-04-26',
'2007-04-27',
'2007-04-30',
'2007-05-08',
'2007-05-09',
'2007-05-10',
'2007-05-11',
'2007-05-14',
'2007-05-15',
'2007-05-16',
'2007-05-17',
'2007-05-18',
'2007-05-21',
'2007-05-22',
'2007-05-23',
'2007-05-24',
'2007-05-25',
'2007-05-28',
'2007-05-29',
'2007-05-30',
'2007-05-31',
'2007-06-01',
'2007-06-04',
'2007-06-05',
'2007-06-06',
'2007-06-07',
'2007-06-08',
'2007-06-11',
'2007-06-12',
'2007-06-13',
'2007-06-14',
'2007-06-15',
'2007-06-18',
'2007-06-19',
'2007-06-20',
'2007-06-21',
'2007-06-22',
'2007-06-25',
'2007-06-26',
'2007-06-27',
'2007-06-28',
'2007-06-29',
'2007-07-02',
'2007-07-03',
'2007-07-04',
'2007-07-05',
'2007-07-06',
'2007-07-09',
'2007-07-10',
'2007-07-11',
'2007-07-12',
'2007-07-13',
'2007-07-16',
'2007-07-17',
'2007-07-18',
'2007-07-19',
'2007-07-20',
'2007-07-23',
'2007-07-24',
'2007-07-25',
'2007-07-26',
'2007-07-27',
'2007-07-30',
'2007-07-31',
'2007-08-01',
'2007-08-02',
'2007-08-03',
'2007-08-06',
'2007-08-07',
'2007-08-08',
'2007-08-09',
'2007-08-10',
'2007-08-13',
'2007-08-14',
'2007-08-15',
'2007-08-16',
'2007-08-17',
'2007-08-20',
'2007-08-21',
'2007-08-22',
'2007-08-23',
'2007-08-24',
'2007-08-27',
'2007-08-28',
'2007-08-29',
'2007-08-30',
'2007-08-31',
'2007-09-03',
'2007-09-04',
'2007-09-05',
'2007-09-06',
'2007-09-07',
'2007-09-10',
'2007-09-11',
'2007-09-12',
'2007-09-13',
'2007-09-14',
'2007-09-17',
'2007-09-18',
'2007-09-19',
'2007-09-20',
'2007-09-21',
'2007-09-24',
'2007-09-25',
'2007-09-26',
'2007-09-27',
'2007-09-28',
'2007-10-08',
'2007-10-09',
'2007-10-10',
'2007-10-11',
'2007-10-12',
'2007-10-15',
'2007-10-16',
'2007-10-17',
'2007-10-18',
'2007-10-19',
'2007-10-22',
'2007-10-23',
'2007-10-24',
'2007-10-25',
'2007-10-26',
'2007-10-29',
'2007-10-30',
'2007-10-31',
'2007-11-01',
'2007-11-02',
'2007-11-05',
'2007-11-06',
'2007-11-07',
'2007-11-08',
'2007-11-09',
'2007-11-12',
'2007-11-13',
'2007-11-14',
'2007-11-15',
'2007-11-16',
'2007-11-19',
'2007-11-20',
'2007-11-21',
'2007-11-22',
'2007-11-23',
'2007-11-26',
'2007-11-27',
'2007-11-28',
'2007-11-29',
'2007-11-30',
'2007-12-03',
'2007-12-04',
'2007-12-05',
'2007-12-06',
'2007-12-07',
'2007-12-10',
'2007-12-11',
'2007-12-12',
'2007-12-13',
'2007-12-14',
'2007-12-17',
'2007-12-18',
'2007-12-19',
'2007-12-20',
'2007-12-21',
'2007-12-24',
'2007-12-25',
'2007-12-26',
'2007-12-27',
'2007-12-28',
'2008-01-02',
'2008-01-03',
'2008-01-04',
'2008-01-07',
'2008-01-08',
'2008-01-09',
'2008-01-10',
'2008-01-11',
'2008-01-14',
'2008-01-15',
'2008-01-16',
'2008-01-17',
'2008-01-18',
'2008-01-21',
'2008-01-22',
'2008-01-23',
'2008-01-24',
'2008-01-25',
'2008-01-28',
'2008-01-29',
'2008-01-30',
'2008-01-31',
'2008-02-01',
'2008-02-04',
'2008-02-05',
'2008-02-13',
'2008-02-14',
'2008-02-15',
'2008-02-18',
'2008-02-19',
'2008-02-20',
'2008-02-21',
'2008-02-22',
'2008-02-25',
'2008-02-26',
'2008-02-27',
'2008-02-28',
'2008-02-29',
'2008-03-03',
'2008-03-04',
'2008-03-05',
'2008-03-06',
'2008-03-07',
'2008-03-10',
'2008-03-11',
'2008-03-12',
'2008-03-13',
'2008-03-14',
'2008-03-17',
'2008-03-18',
'2008-03-19',
'2008-03-20',
'2008-03-21',
'2008-03-24',
'2008-03-25',
'2008-03-26',
'2008-03-27',
'2008-03-28',
'2008-03-31',
'2008-04-01',
'2008-04-02',
'2008-04-03',
'2008-04-07',
'2008-04-08',
'2008-04-09',
'2008-04-10',
'2008-04-11',
'2008-04-14',
'2008-04-15',
'2008-04-16',
'2008-04-17',
'2008-04-18',
'2008-04-21',
'2008-04-22',
'2008-04-23',
'2008-04-24',
'2008-04-25',
'2008-04-28',
'2008-04-29',
'2008-04-30',
'2008-05-05',
'2008-05-06',
'2008-05-07',
'2008-05-08',
'2008-05-09',
'2008-05-12',
'2008-05-13',
'2008-05-14',
'2008-05-15',
'2008-05-16',
'2008-05-19',
'2008-05-20',
'2008-05-21',
'2008-05-22',
'2008-05-23',
'2008-05-26',
'2008-05-27',
'2008-05-28',
'2008-05-29',
'2008-05-30',
'2008-06-02',
'2008-06-03',
'2008-06-04',
'2008-06-05',
'2008-06-06',
'2008-06-10',
'2008-06-11',
'2008-06-12',
'2008-06-13',
'2008-06-16',
'2008-06-17',
'2008-06-18',
'2008-06-19',
'2008-06-20',
'2008-06-23',
'2008-06-24',
'2008-06-25',
'2008-06-26',
'2008-06-27',
'2008-06-30',
'2008-07-01',
'2008-07-02',
'2008-07-03',
'2008-07-04',
'2008-07-07',
'2008-07-08',
'2008-07-09',
'2008-07-10',
'2008-07-11',
'2008-07-14',
'2008-07-15',
'2008-07-16',
'2008-07-17',
'2008-07-18',
'2008-07-21',
'2008-07-22',
'2008-07-23',
'2008-07-24',
'2008-07-25',
'2008-07-28',
'2008-07-29',
'2008-07-30',
'2008-07-31',
'2008-08-01',
'2008-08-04',
'2008-08-05',
'2008-08-06',
'2008-08-07',
'2008-08-08',
'2008-08-11',
'2008-08-12',
'2008-08-13',
'2008-08-14',
'2008-08-15',
'2008-08-18',
'2008-08-19',
'2008-08-20',
'2008-08-21',
'2008-08-22',
'2008-08-25',
'2008-08-26',
'2008-08-27',
'2008-08-28',
'2008-08-29',
'2008-09-01',
'2008-09-02',
'2008-09-03',
'2008-09-04',
'2008-09-05',
'2008-09-08',
'2008-09-09',
'2008-09-10',
'2008-09-11',
'2008-09-12',
'2008-09-16',
'2008-09-17',
'2008-09-18',
'2008-09-19',
'2008-09-22',
'2008-09-23',
'2008-09-24',
'2008-09-25',
'2008-09-26',
'2008-10-06',
'2008-10-07',
'2008-10-08',
'2008-10-09',
'2008-10-10',
'2008-10-13',
'2008-10-14',
'2008-10-15',
'2008-10-16',
'2008-10-17',
'2008-10-20',
'2008-10-21',
'2008-10-22',
'2008-10-23',
'2008-10-24',
'2008-10-27',
'2008-10-28',
'2008-10-29',
'2008-10-30',
'2008-10-31',
'2008-11-03',
'2008-11-04',
'2008-11-05',
'2008-11-06',
'2008-11-07',
'2008-11-10',
'2008-11-11',
'2008-11-12',
'2008-11-13',
'2008-11-14',
'2008-11-17',
'2008-11-18',
'2008-11-19',
'2008-11-20',
'2008-11-21',
'2008-11-24',
'2008-11-25',
'2008-11-26',
'2008-11-27',
'2008-11-28',
'2008-12-01',
'2008-12-02',
'2008-12-03',
'2008-12-04',
'2008-12-05',
'2008-12-08',
'2008-12-09',
'2008-12-10',
'2008-12-11',
'2008-12-12',
'2008-12-15',
'2008-12-16',
'2008-12-17',
'2008-12-18',
'2008-12-19',
'2008-12-22',
'2008-12-23',
'2008-12-24',
'2008-12-25',
'2008-12-26',
'2008-12-29',
'2008-12-30',
'2008-12-31',
'2009-01-05',
'2009-01-06',
'2009-01-07',
'2009-01-08',
'2009-01-09',
'2009-01-12',
'2009-01-13',
'2009-01-14',
'2009-01-15',
'2009-01-16',
'2009-01-19',
'2009-01-20',
'2009-01-21',
'2009-01-22',
'2009-01-23',
'2009-02-02',
'2009-02-03',
'2009-02-04',
'2009-02-05',
'2009-02-06',
'2009-02-09',
'2009-02-10',
'2009-02-11',
'2009-02-12',
'2009-02-13',
'2009-02-16',
'2009-02-17',
'2009-02-18',
'2009-02-19',
'2009-02-20',
'2009-02-23',
'2009-02-24',
'2009-02-25',
'2009-02-26',
'2009-02-27',
'2009-03-02',
'2009-03-03',
'2009-03-04',
'2009-03-05',
'2009-03-06',
'2009-03-09',
'2009-03-10',
'2009-03-11',
'2009-03-12',
'2009-03-13',
'2009-03-16',
'2009-03-17',
'2009-03-18',
'2009-03-19',
'2009-03-20',
'2009-03-23',
'2009-03-24',
'2009-03-25',
'2009-03-26',
'2009-03-27',
'2009-03-30',
'2009-03-31',
'2009-04-01',
'2009-04-02',
'2009-04-03',
'2009-04-07',
'2009-04-08',
'2009-04-09',
'2009-04-10',
'2009-04-13',
'2009-04-14',
'2009-04-15',
'2009-04-16',
'2009-04-17',
'2009-04-20',
'2009-04-21',
'2009-04-22',
'2009-04-23',
'2009-04-24',
'2009-04-27',
'2009-04-28',
'2009-04-29',
'2009-04-30',
'2009-05-04',
'2009-05-05',
'2009-05-06',
'2009-05-07',
'2009-05-08',
'2009-05-11',
'2009-05-12',
'2009-05-13',
'2009-05-14',
'2009-05-15',
'2009-05-18',
'2009-05-19',
'2009-05-20',
'2009-05-21',
'2009-05-22',
'2009-05-25',
'2009-05-26',
'2009-05-27',
'2009-06-01',
'2009-06-02',
'2009-06-03',
'2009-06-04',
'2009-06-05',
'2009-06-08',
'2009-06-09',
'2009-06-10',
'2009-06-11',
'2009-06-12',
'2009-06-15',
'2009-06-16',
'2009-06-17',
'2009-06-18',
'2009-06-19',
'2009-06-22',
'2009-06-23',
'2009-06-24',
'2009-06-25',
'2009-06-26',
'2009-06-29',
'2009-06-30',
'2009-07-01',
'2009-07-02',
'2009-07-03',
'2009-07-06',
'2009-07-07',
'2009-07-08',
'2009-07-09',
'2009-07-10',
'2009-07-13',
'2009-07-14',
'2009-07-15',
'2009-07-16',
'2009-07-17',
'2009-07-20',
'2009-07-21',
'2009-07-22',
'2009-07-23',
'2009-07-24',
'2009-07-27',
'2009-07-28',
'2009-07-29',
'2009-07-30',
'2009-07-31',
'2009-08-03',
'2009-08-04',
'2009-08-05',
'2009-08-06',
'2009-08-07',
'2009-08-10',
'2009-08-11',
'2009-08-12',
'2009-08-13',
'2009-08-14',
'2009-08-17',
'2009-08-18',
'2009-08-19',
'2009-08-20',
'2009-08-21',
'2009-08-24',
'2009-08-25',
'2009-08-26',
'2009-08-27',
'2009-08-28',
'2009-08-31',
'2009-09-01',
'2009-09-02',
'2009-09-03',
'2009-09-04',
'2009-09-07',
'2009-09-08',
'2009-09-09',
'2009-09-10',
'2009-09-11',
'2009-09-14',
'2009-09-15',
'2009-09-16',
'2009-09-17',
'2009-09-18',
'2009-09-21',
'2009-09-22',
'2009-09-23',
'2009-09-24',
'2009-09-25',
'2009-09-28',
'2009-09-29',
'2009-09-30',
'2009-10-09',
'2009-10-12',
'2009-10-13',
'2009-10-14',
'2009-10-15',
'2009-10-16',
'2009-10-19',
'2009-10-20',
'2009-10-21',
'2009-10-22',
'2009-10-23',
'2009-10-26',
'2009-10-27',
'2009-10-28',
'2009-10-29',
'2009-10-30',
'2009-11-02',
'2009-11-03',
'2009-11-04',
'2009-11-05',
'2009-11-06',
'2009-11-09',
'2009-11-10',
'2009-11-11',
'2009-11-12',
'2009-11-13',
'2009-11-16',
'2009-11-17',
'2009-11-18',
'2009-11-19',
'2009-11-20',
'2009-11-23',
'2009-11-24',
'2009-11-25',
'2009-11-26',
'2009-11-27',
'2009-11-30',
'2009-12-01',
'2009-12-02',
'2009-12-03',
'2009-12-04',
'2009-12-07',
'2009-12-08',
'2009-12-09',
'2009-12-10',
'2009-12-11',
'2009-12-14',
'2009-12-15',
'2009-12-16',
'2009-12-17',
'2009-12-18',
'2009-12-21',
'2009-12-22',
'2009-12-23',
'2009-12-24',
'2009-12-25',
'2009-12-28',
'2009-12-29',
'2009-12-30',
'2009-12-31',
'2010-01-04',
'2010-01-05',
'2010-01-06',
'2010-01-07',
'2010-01-08',
'2010-01-11',
'2010-01-12',
'2010-01-13',
'2010-01-14',
'2010-01-15',
'2010-01-18',
'2010-01-19',
'2010-01-20',
'2010-01-21',
'2010-01-22',
'2010-01-25',
'2010-01-26',
'2010-01-27',
'2010-01-28',
'2010-01-29',
'2010-02-01',
'2010-02-02',
'2010-02-03',
'2010-02-04',
'2010-02-05',
'2010-02-08',
'2010-02-09',
'2010-02-10',
'2010-02-11',
'2010-02-12',
'2010-02-22',
'2010-02-23',
'2010-02-24',
'2010-02-25',
'2010-02-26',
'2010-03-01',
'2010-03-02',
'2010-03-03',
'2010-03-04',
'2010-03-05',
'2010-03-08',
'2010-03-09',
'2010-03-10',
'2010-03-11',
'2010-03-12',
'2010-03-15',
'2010-03-16',
'2010-03-17',
'2010-03-18',
'2010-03-19',
'2010-03-22',
'2010-03-23',
'2010-03-24',
'2010-03-25',
'2010-03-26',
'2010-03-29',
'2010-03-30',
'2010-03-31',
'2010-04-01',
'2010-04-02',
'2010-04-06',
'2010-04-07',
'2010-04-08',
'2010-04-09',
'2010-04-12',
'2010-04-13',
'2010-04-14',
'2010-04-15',
'2010-04-16',
'2010-04-19',
'2010-04-20',
'2010-04-21',
'2010-04-22',
'2010-04-23',
'2010-04-26',
'2010-04-27',
'2010-04-28',
'2010-04-29',
'2010-04-30',
'2010-05-04',
'2010-05-05',
'2010-05-06',
'2010-05-07',
'2010-05-10',
'2010-05-11',
'2010-05-12',
'2010-05-13',
'2010-05-14',
'2010-05-17',
'2010-05-18',
'2010-05-19',
'2010-05-20',
'2010-05-21',
'2010-05-24',
'2010-05-25',
'2010-05-26',
'2010-05-27',
'2010-05-28',
'2010-05-31',
'2010-06-01',
'2010-06-02',
'2010-06-03',
'2010-06-04',
'2010-06-07',
'2010-06-08',
'2010-06-09',
'2010-06-10',
'2010-06-11',
'2010-06-17',
'2010-06-18',
'2010-06-21',
'2010-06-22',
'2010-06-23',
'2010-06-24',
'2010-06-25',
'2010-06-28',
'2010-06-29',
'2010-06-30',
'2010-07-01',
'2010-07-02',
'2010-07-05',
'2010-07-06',
'2010-07-07',
'2010-07-08',
'2010-07-09',
'2010-07-12',
'2010-07-13',
'2010-07-14',
'2010-07-15',
'2010-07-16',
'2010-07-19',
'2010-07-20',
'2010-07-21',
'2010-07-22',
'2010-07-23',
'2010-07-26',
'2010-07-27',
'2010-07-28',
'2010-07-29',
'2010-07-30',
'2010-08-02',
'2010-08-03',
'2010-08-04',
'2010-08-05',
'2010-08-06',
'2010-08-09',
'2010-08-10',
'2010-08-11',
'2010-08-12',
'2010-08-13',
'2010-08-16',
'2010-08-17',
'2010-08-18',
'2010-08-19',
'2010-08-20',
'2010-08-23',
'2010-08-24',
'2010-08-25',
'2010-08-26',
'2010-08-27',
'2010-08-30',
'2010-08-31',
'2010-09-01',
'2010-09-02',
'2010-09-03',
'2010-09-06',
'2010-09-07',
'2010-09-08',
'2010-09-09',
'2010-09-10',
'2010-09-13',
'2010-09-14',
'2010-09-15',
'2010-09-16',
'2010-09-17',
'2010-09-20',
'2010-09-21',
'2010-09-27',
'2010-09-28',
'2010-09-29',
'2010-09-30',
'2010-10-08',
'2010-10-11',
'2010-10-12',
'2010-10-13',
'2010-10-14',
'2010-10-15',
'2010-10-18',
'2010-10-19',
'2010-10-20',
'2010-10-21',
'2010-10-22',
'2010-10-25',
'2010-10-26',
'2010-10-27',
'2010-10-28',
'2010-10-29',
'2010-11-01',
'2010-11-02',
'2010-11-03',
'2010-11-04',
'2010-11-05',
'2010-11-08',
'2010-11-09',
'2010-11-10',
'2010-11-11',
'2010-11-12',
'2010-11-15',
'2010-11-16',
'2010-11-17',
'2010-11-18',
'2010-11-19',
'2010-11-22',
'2010-11-23',
'2010-11-24',
'2010-11-25',
'2010-11-26',
'2010-11-29',
'2010-11-30',
'2010-12-01',
'2010-12-02',
'2010-12-03',
'2010-12-06',
'2010-12-07',
'2010-12-08',
'2010-12-09',
'2010-12-10',
'2010-12-13',
'2010-12-14',
'2010-12-15',
'2010-12-16',
'2010-12-17',
'2010-12-20',
'2010-12-21',
'2010-12-22',
'2010-12-23',
'2010-12-24',
'2010-12-27',
'2010-12-28',
'2010-12-29',
'2010-12-30',
'2010-12-31',
'2011-01-04',
'2011-01-05',
'2011-01-06',
'2011-01-07',
'2011-01-10',
'2011-01-11',
'2011-01-12',
'2011-01-13',
'2011-01-14',
'2011-01-17',
'2011-01-18',
'2011-01-19',
'2011-01-20',
'2011-01-21',
'2011-01-24',
'2011-01-25',
'2011-01-26',
'2011-01-27',
'2011-01-28',
'2011-01-31',
'2011-02-01',
'2011-02-09',
'2011-02-10',
'2011-02-11',
'2011-02-14',
'2011-02-15',
'2011-02-16',
'2011-02-17',
'2011-02-18',
'2011-02-21',
'2011-02-22',
'2011-02-23',
'2011-02-24',
'2011-02-25',
'2011-02-28',
'2011-03-01',
'2011-03-02',
'2011-03-03',
'2011-03-04',
'2011-03-07',
'2011-03-08',
'2011-03-09',
'2011-03-10',
'2011-03-11',
'2011-03-14',
'2011-03-15',
'2011-03-16',
'2011-03-17',
'2011-03-18',
'2011-03-21',
'2011-03-22',
'2011-03-23',
'2011-03-24',
'2011-03-25',
'2011-03-28',
'2011-03-29',
'2011-03-30',
'2011-03-31',
'2011-04-01',
'2011-04-06',
'2011-04-07',
'2011-04-08',
'2011-04-11',
'2011-04-12',
'2011-04-13',
'2011-04-14',
'2011-04-15',
'2011-04-18',
'2011-04-19',
'2011-04-20',
'2011-04-21',
'2011-04-22',
'2011-04-25',
'2011-04-26',
'2011-04-27',
'2011-04-28',
'2011-04-29',
'2011-05-03',
'2011-05-04',
'2011-05-05',
'2011-05-06',
'2011-05-09',
'2011-05-10',
'2011-05-11',
'2011-05-12',
'2011-05-13',
'2011-05-16',
'2011-05-17',
'2011-05-18',
'2011-05-19',
'2011-05-20',
'2011-05-23',
'2011-05-24',
'2011-05-25',
'2011-05-26',
'2011-05-27',
'2011-05-30',
'2011-05-31',
'2011-06-01',
'2011-06-02',
'2011-06-03',
'2011-06-07',
'2011-06-08',
'2011-06-09',
'2011-06-10',
'2011-06-13',
'2011-06-14',
'2011-06-15',
'2011-06-16',
'2011-06-17',
'2011-06-20',
'2011-06-21',
'2011-06-22',
'2011-06-23',
'2011-06-24',
'2011-06-27',
'2011-06-28',
'2011-06-29',
'2011-06-30',
'2011-07-01',
'2011-07-04',
'2011-07-05',
'2011-07-06',
'2011-07-07',
'2011-07-08',
'2011-07-11',
'2011-07-12',
'2011-07-13',
'2011-07-14',
'2011-07-15',
'2011-07-18',
'2011-07-19',
'2011-07-20',
'2011-07-21',
'2011-07-22',
'2011-07-25',
'2011-07-26',
'2011-07-27',
'2011-07-28',
'2011-07-29',
'2011-08-01',
'2011-08-02',
'2011-08-03',
'2011-08-04',
'2011-08-05',
'2011-08-08',
'2011-08-09',
'2011-08-10',
'2011-08-11',
'2011-08-12',
'2011-08-15',
'2011-08-16',
'2011-08-17',
'2011-08-18',
'2011-08-19',
'2011-08-22',
'2011-08-23',
'2011-08-24',
'2011-08-25',
'2011-08-26',
'2011-08-29',
'2011-08-30',
'2011-08-31',
'2011-09-01',
'2011-09-02',
'2011-09-05',
'2011-09-06',
'2011-09-07',
'2011-09-08',
'2011-09-09',
'2011-09-13',
'2011-09-14',
'2011-09-15',
'2011-09-16',
'2011-09-19',
'2011-09-20',
'2011-09-21',
'2011-09-22',
'2011-09-23',
'2011-09-26',
'2011-09-27',
'2011-09-28',
'2011-09-29',
'2011-09-30',
'2011-10-10',
'2011-10-11',
'2011-10-12',
'2011-10-13',
'2011-10-14',
'2011-10-17',
'2011-10-18',
'2011-10-19',
'2011-10-20',
'2011-10-21',
'2011-10-24',
'2011-10-25',
'2011-10-26',
'2011-10-27',
'2011-10-28',
'2011-10-31',
'2011-11-01',
'2011-11-02',
'2011-11-03',
'2011-11-04',
'2011-11-07',
'2011-11-08',
'2011-11-09',
'2011-11-10',
'2011-11-11',
'2011-11-14',
'2011-11-15',
'2011-11-16',
'2011-11-17',
'2011-11-18',
'2011-11-21',
'2011-11-22',
'2011-11-23',
'2011-11-24',
'2011-11-25',
'2011-11-28',
'2011-11-29',
'2011-11-30',
'2011-12-01',
'2011-12-02',
'2011-12-05',
'2011-12-06',
'2011-12-07',
'2011-12-08',
'2011-12-09',
'2011-12-12',
'2011-12-13',
'2011-12-14',
'2011-12-15',
'2011-12-16',
'2011-12-19',
'2011-12-20',
'2011-12-21',
'2011-12-22',
'2011-12-23',
'2011-12-26',
'2011-12-27',
'2011-12-28',
'2011-12-29',
'2011-12-30',
'2012-01-04',
'2012-01-05',
'2012-01-06',
'2012-01-09',
'2012-01-10',
'2012-01-11',
'2012-01-12',
'2012-01-13',
'2012-01-16',
'2012-01-17',
'2012-01-18',
'2012-01-19',
'2012-01-20',
'2012-01-30',
'2012-01-31',
'2012-02-01',
'2012-02-02',
'2012-02-03',
'2012-02-06',
'2012-02-07',
'2012-02-08',
'2012-02-09',
'2012-02-10',
'2012-02-13',
'2012-02-14',
'2012-02-15',
'2012-02-16',
'2012-02-17',
'2012-02-20',
'2012-02-21',
'2012-02-22',
'2012-02-23',
'2012-02-24',
'2012-02-27',
'2012-02-28',
'2012-02-29',
'2012-03-01',
'2012-03-02',
'2012-03-05',
'2012-03-06',
'2012-03-07',
'2012-03-08',
'2012-03-09',
'2012-03-12',
'2012-03-13',
'2012-03-14',
'2012-03-15',
'2012-03-16',
'2012-03-19',
'2012-03-20',
'2012-03-21',
'2012-03-22',
'2012-03-23',
'2012-03-26',
'2012-03-27',
'2012-03-28',
'2012-03-29',
'2012-03-30',
'2012-04-05',
'2012-04-06',
'2012-04-09',
'2012-04-10',
'2012-04-11',
'2012-04-12',
'2012-04-13',
'2012-04-16',
'2012-04-17',
'2012-04-18',
'2012-04-19',
'2012-04-20',
'2012-04-23',
'2012-04-24',
'2012-04-25',
'2012-04-26',
'2012-04-27',
'2012-05-02',
'2012-05-03',
'2012-05-04',
'2012-05-07',
'2012-05-08',
'2012-05-09',
'2012-05-10',
'2012-05-11',
'2012-05-14',
'2012-05-15',
'2012-05-16',
'2012-05-17',
'2012-05-18',
'2012-05-21',
'2012-05-22',
'2012-05-23',
'2012-05-24',
'2012-05-25',
'2012-05-28',
'2012-05-29',
'2012-05-30',
'2012-05-31',
'2012-06-01',
'2012-06-04',
'2012-06-05',
'2012-06-06',
'2012-06-07',
'2012-06-08',
'2012-06-11',
'2012-06-12',
'2012-06-13',
'2012-06-14',
'2012-06-15',
'2012-06-18',
'2012-06-19',
'2012-06-20',
'2012-06-21',
'2012-06-25',
'2012-06-26',
'2012-06-27',
'2012-06-28',
'2012-06-29',
'2012-07-02',
'2012-07-03',
'2012-07-04',
'2012-07-05',
'2012-07-06',
'2012-07-09',
'2012-07-10',
'2012-07-11',
'2012-07-12',
'2012-07-13',
'2012-07-16',
'2012-07-17',
'2012-07-18',
'2012-07-19',
'2012-07-20',
'2012-07-23',
'2012-07-24',
'2012-07-25',
'2012-07-26',
'2012-07-27',
'2012-07-30',
'2012-07-31',
'2012-08-01',
'2012-08-02',
'2012-08-03',
'2012-08-06',
'2012-08-07',
'2012-08-08',
'2012-08-09',
'2012-08-10',
'2012-08-13',
'2012-08-14',
'2012-08-15',
'2012-08-16',
'2012-08-17',
'2012-08-20',
'2012-08-21',
'2012-08-22',
'2012-08-23',
'2012-08-24',
'2012-08-27',
'2012-08-28',
'2012-08-29',
'2012-08-30',
'2012-08-31',
'2012-09-03',
'2012-09-04',
'2012-09-05',
'2012-09-06',
'2012-09-07',
'2012-09-10',
'2012-09-11',
'2012-09-12',
'2012-09-13',
'2012-09-14',
'2012-09-17',
'2012-09-18',
'2012-09-19',
'2012-09-20',
'2012-09-21',
'2012-09-24',
'2012-09-25',
'2012-09-26',
'2012-09-27',
'2012-09-28',
'2012-10-08',
'2012-10-09',
'2012-10-10',
'2012-10-11',
'2012-10-12',
'2012-10-15',
'2012-10-16',
'2012-10-17',
'2012-10-18',
'2012-10-19',
'2012-10-22',
'2012-10-23',
'2012-10-24',
'2012-10-25',
'2012-10-26',
'2012-10-29',
'2012-10-30',
'2012-10-31',
'2012-11-01',
'2012-11-02',
'2012-11-05',
'2012-11-06',
'2012-11-07',
'2012-11-08',
'2012-11-09',
'2012-11-12',
'2012-11-13',
'2012-11-14',
'2012-11-15',
'2012-11-16',
'2012-11-19',
'2012-11-20',
'2012-11-21',
'2012-11-22',
'2012-11-23',
'2012-11-26',
'2012-11-27',
'2012-11-28',
'2012-11-29',
'2012-11-30',
'2012-12-03',
'2012-12-04',
'2012-12-05',
'2012-12-06',
'2012-12-07',
'2012-12-10',
'2012-12-11',
'2012-12-12',
'2012-12-13',
'2012-12-14',
'2012-12-17',
'2012-12-18',
'2012-12-19',
'2012-12-20',
'2012-12-21',
'2012-12-24',
'2012-12-25',
'2012-12-26',
'2012-12-27',
'2012-12-28',
'2012-12-31',
'2013-01-04',
'2013-01-07',
'2013-01-08',
'2013-01-09',
'2013-01-10',
'2013-01-11',
'2013-01-14',
'2013-01-15',
'2013-01-16',
'2013-01-17',
'2013-01-18',
'2013-01-21',
'2013-01-22',
'2013-01-23',
'2013-01-24',
'2013-01-25',
'2013-01-28',
'2013-01-29',
'2013-01-30',
'2013-01-31',
'2013-02-01',
'2013-02-04',
'2013-02-05',
'2013-02-06',
'2013-02-07',
'2013-02-08',
'2013-02-18',
'2013-02-19',
'2013-02-20',
'2013-02-21',
'2013-02-22',
'2013-02-25',
'2013-02-26',
'2013-02-27',
'2013-02-28',
'2013-03-01',
'2013-03-04',
'2013-03-05',
'2013-03-06',
'2013-03-07',
'2013-03-08',
'2013-03-11',
'2013-03-12',
'2013-03-13',
'2013-03-14',
'2013-03-15',
'2013-03-18',
'2013-03-19',
'2013-03-20',
'2013-03-21',
'2013-03-22',
'2013-03-25',
'2013-03-26',
'2013-03-27',
'2013-03-28',
'2013-03-29',
'2013-04-01',
'2013-04-02',
'2013-04-03',
'2013-04-08',
'2013-04-09',
'2013-04-10',
'2013-04-11',
'2013-04-12',
'2013-04-15',
'2013-04-16',
'2013-04-17',
'2013-04-18',
'2013-04-19',
'2013-04-22',
'2013-04-23',
'2013-04-24',
'2013-04-25',
'2013-04-26',
'2013-05-02',
'2013-05-03',
'2013-05-06',
'2013-05-07',
'2013-05-08',
'2013-05-09',
'2013-05-10',
'2013-05-13',
'2013-05-14',
'2013-05-15',
'2013-05-16',
'2013-05-17',
'2013-05-20',
'2013-05-21',
'2013-05-22',
'2013-05-23',
'2013-05-24',
'2013-05-27',
'2013-05-28',
'2013-05-29',
'2013-05-30',
'2013-05-31',
'2013-06-03',
'2013-06-04',
'2013-06-05',
'2013-06-06',
'2013-06-07',
'2013-06-13',
'2013-06-14',
'2013-06-17',
'2013-06-18',
'2013-06-19',
'2013-06-20',
'2013-06-21',
'2013-06-24',
'2013-06-25',
'2013-06-26',
'2013-06-27',
'2013-06-28',
'2013-07-01',
'2013-07-02',
'2013-07-03',
'2013-07-04',
'2013-07-05',
'2013-07-08',
'2013-07-09',
'2013-07-10',
'2013-07-11',
'2013-07-12',
'2013-07-15',
'2013-07-16',
'2013-07-17',
'2013-07-18',
'2013-07-19',
'2013-07-22',
'2013-07-23',
'2013-07-24',
'2013-07-25',
'2013-07-26',
'2013-07-29',
'2013-07-30',
'2013-07-31',
'2013-08-01',
'2013-08-02',
'2013-08-05',
'2013-08-06',
'2013-08-07',
'2013-08-08',
'2013-08-09',
'2013-08-12',
'2013-08-13',
'2013-08-14',
'2013-08-15',
'2013-08-16',
'2013-08-19',
'2013-08-20',
'2013-08-21',
'2013-08-22',
'2013-08-23',
'2013-08-26',
'2013-08-27',
'2013-08-28',
'2013-08-29',
'2013-08-30',
'2013-09-02',
'2013-09-03',
'2013-09-04',
'2013-09-05',
'2013-09-06',
'2013-09-09',
'2013-09-10',
'2013-09-11',
'2013-09-12',
'2013-09-13',
'2013-09-16',
'2013-09-17',
'2013-09-18',
'2013-09-23',
'2013-09-24',
'2013-09-25',
'2013-09-26',
'2013-09-27',
'2013-09-30',
'2013-10-08',
'2013-10-09',
'2013-10-10',
'2013-10-11',
'2013-10-14',
'2013-10-15',
'2013-10-16',
'2013-10-17',
'2013-10-18',
'2013-10-21',
'2013-10-22',
'2013-10-23',
'2013-10-24',
'2013-10-25',
'2013-10-28',
'2013-10-29',
'2013-10-30',
'2013-10-31',
'2013-11-01',
'2013-11-04',
'2013-11-05',
'2013-11-06',
'2013-11-07',
'2013-11-08',
'2013-11-11',
'2013-11-12',
'2013-11-13',
'2013-11-14',
'2013-11-15',
'2013-11-18',
'2013-11-19',
'2013-11-20',
'2013-11-21',
'2013-11-22',
'2013-11-25',
'2013-11-26',
'2013-11-27',
'2013-11-28',
'2013-11-29',
'2013-12-02',
'2013-12-03',
'2013-12-04',
'2013-12-05',
'2013-12-06',
'2013-12-09',
'2013-12-10',
'2013-12-11',
'2013-12-12',
'2013-12-13',
'2013-12-16',
'2013-12-17',
'2013-12-18',
'2013-12-19',
'2013-12-20',
'2013-12-23',
'2013-12-24',
'2013-12-25',
'2013-12-26',
'2013-12-27',
'2013-12-30',
'2013-12-31',
'2014-01-02',
'2014-01-03',
'2014-01-06',
'2014-01-07',
'2014-01-08',
'2014-01-09',
'2014-01-10',
'2014-01-13',
'2014-01-14',
'2014-01-15',
'2014-01-16',
'2014-01-17',
'2014-01-20',
'2014-01-21',
'2014-01-22',
'2014-01-23',
'2014-01-24',
'2014-01-27',
'2014-01-28',
'2014-01-29',
'2014-01-30',
'2014-02-07',
'2014-02-10',
'2014-02-11',
'2014-02-12',
'2014-02-13',
'2014-02-14',
'2014-02-17',
'2014-02-18',
'2014-02-19',
'2014-02-20',
'2014-02-21',
'2014-02-24',
'2014-02-25',
'2014-02-26',
'2014-02-27',
'2014-02-28',
'2014-03-03',
'2014-03-04',
'2014-03-05',
'2014-03-06',
'2014-03-07',
'2014-03-10',
'2014-03-11',
'2014-03-12',
'2014-03-13',
'2014-03-14',
'2014-03-17',
'2014-03-18',
'2014-03-19',
'2014-03-20',
'2014-03-21',
'2014-03-24',
'2014-03-25',
'2014-03-26',
'2014-03-27',
'2014-03-28',
'2014-03-31',
'2014-04-01',
'2014-04-02',
'2014-04-03',
'2014-04-04',
'2014-04-08',
'2014-04-09',
'2014-04-10',
'2014-04-11',
'2014-04-14',
'2014-04-15',
'2014-04-16',
'2014-04-17',
'2014-04-18',
'2014-04-21',
'2014-04-22',
'2014-04-23',
'2014-04-24',
'2014-04-25',
'2014-04-28',
'2014-04-29',
'2014-04-30',
'2014-05-05',
'2014-05-06',
'2014-05-07',
'2014-05-08',
'2014-05-09',
'2014-05-12',
'2014-05-13',
'2014-05-14',
'2014-05-15',
'2014-05-16',
'2014-05-19',
'2014-05-20',
'2014-05-21',
'2014-05-22',
'2014-05-23',
'2014-05-26',
'2014-05-27',
'2014-05-28',
'2014-05-29',
'2014-05-30',
'2014-06-03',
'2014-06-04',
'2014-06-05',
'2014-06-06',
'2014-06-09',
'2014-06-10',
'2014-06-11',
'2014-06-12',
'2014-06-13',
'2014-06-16',
'2014-06-17',
'2014-06-18',
'2014-06-19',
'2014-06-20',
'2014-06-23',
'2014-06-24',
'2014-06-25',
'2014-06-26',
'2014-06-27',
'2014-06-30',
'2014-07-01',
'2014-07-02',
'2014-07-03',
'2014-07-04',
'2014-07-07',
'2014-07-08',
'2014-07-09',
'2014-07-10',
'2014-07-11',
'2014-07-14',
'2014-07-15',
'2014-07-16',
'2014-07-17',
'2014-07-18',
'2014-07-21',
'2014-07-22',
'2014-07-23',
'2014-07-24',
'2014-07-25',
'2014-07-28',
'2014-07-29',
'2014-07-30',
'2014-07-31',
'2014-08-01',
'2014-08-04',
'2014-08-05',
'2014-08-06',
'2014-08-07',
'2014-08-08',
'2014-08-11',
'2014-08-12',
'2014-08-13',
'2014-08-14',
'2014-08-15',
'2014-08-18',
'2014-08-19',
'2014-08-20',
'2014-08-21',
'2014-08-22',
'2014-08-25',
'2014-08-26',
'2014-08-27',
'2014-08-28',
'2014-08-29',
'2014-09-01',
'2014-09-02',
'2014-09-03',
'2014-09-04',
'2014-09-05',
'2014-09-09',
'2014-09-10',
'2014-09-11',
'2014-09-12',
'2014-09-15',
'2014-09-16',
'2014-09-17',
'2014-09-18',
'2014-09-19',
'2014-09-22',
'2014-09-23',
'2014-09-24',
'2014-09-25',
'2014-09-26',
'2014-09-29',
'2014-09-30',
'2014-10-08',
'2014-10-09',
'2014-10-10',
'2014-10-13',
'2014-10-14',
'2014-10-15',
'2014-10-16',
'2014-10-17',
'2014-10-20',
'2014-10-21',
'2014-10-22',
'2014-10-23',
'2014-10-24',
'2014-10-27',
'2014-10-28',
'2014-10-29',
'2014-10-30',
'2014-10-31',
'2014-11-03',
'2014-11-04',
'2014-11-05',
'2014-11-06',
'2014-11-07',
'2014-11-10',
'2014-11-11',
'2014-11-12',
'2014-11-13',
'2014-11-14',
'2014-11-17',
'2014-11-18',
'2014-11-19',
'2014-11-20',
'2014-11-21',
'2014-11-24',
'2014-11-25',
'2014-11-26',
'2014-11-27',
'2014-11-28',
'2014-12-01',
'2014-12-02',
'2014-12-03',
'2014-12-04',
'2014-12-05',
'2014-12-08',
'2014-12-09',
'2014-12-10',
'2014-12-11',
'2014-12-12',
'2014-12-15',
'2014-12-16',
'2014-12-17',
'2014-12-18',
'2014-12-19',
'2014-12-22',
'2014-12-23',
'2014-12-24',
'2014-12-25',
'2014-12-26',
'2014-12-29',
'2014-12-30',
'2014-12-31',
'2015-01-05',
'2015-01-06',
'2015-01-07',
'2015-01-08',
'2015-01-09',
'2015-01-12',
'2015-01-13',
'2015-01-14',
'2015-01-15',
'2015-01-16',
'2015-01-19',
'2015-01-20',
'2015-01-21',
'2015-01-22',
'2015-01-23',
'2015-01-26',
'2015-01-27',
'2015-01-28',
'2015-01-29',
'2015-01-30',
'2015-02-02',
'2015-02-03',
'2015-02-04',
'2015-02-05',
'2015-02-06',
'2015-02-09',
'2015-02-10',
'2015-02-11',
'2015-02-12',
'2015-02-13',
'2015-02-16',
'2015-02-17',
'2015-02-25',
'2015-02-26',
'2015-02-27',
'2015-03-02',
'2015-03-03',
'2015-03-04',
'2015-03-05',
'2015-03-06',
'2015-03-09',
'2015-03-10',
'2015-03-11',
'2015-03-12',
'2015-03-13',
'2015-03-16',
'2015-03-17',
'2015-03-18',
'2015-03-19',
'2015-03-20',
'2015-03-23',
'2015-03-24',
'2015-03-25',
'2015-03-26',
'2015-03-27',
'2015-03-30',
'2015-03-31',
'2015-04-01',
'2015-04-02',
'2015-04-03',
'2015-04-07',
'2015-04-08',
'2015-04-09',
'2015-04-10',
'2015-04-13',
'2015-04-14',
'2015-04-15',
'2015-04-16',
'2015-04-17',
'2015-04-20',
'2015-04-21',
'2015-04-22',
'2015-04-23',
'2015-04-24',
'2015-04-27',
'2015-04-28',
'2015-04-29',
'2015-04-30',
'2015-05-04',
'2015-05-05',
'2015-05-06',
'2015-05-07',
'2015-05-08',
'2015-05-11',
'2015-05-12',
'2015-05-13',
'2015-05-14',
'2015-05-15',
'2015-05-18',
'2015-05-19',
'2015-05-20',
'2015-05-21',
'2015-05-22',
'2015-05-25',
'2015-05-26',
'2015-05-27',
'2015-05-28',
'2015-05-29',
'2015-06-01',
'2015-06-02',
'2015-06-03',
'2015-06-04',
'2015-06-05',
'2015-06-08',
'2015-06-09',
'2015-06-10',
'2015-06-11',
'2015-06-12',
'2015-06-15',
'2015-06-16',
'2015-06-17',
'2015-06-18',
'2015-06-19',
'2015-06-23',
'2015-06-24',
'2015-06-25',
'2015-06-26',
'2015-06-29',
'2015-06-30',
'2015-07-01',
'2015-07-02',
'2015-07-03',
'2015-07-06',
'2015-07-07',
'2015-07-08',
'2015-07-09',
'2015-07-10',
'2015-07-13',
'2015-07-14',
'2015-07-15',
'2015-07-16',
'2015-07-17',
'2015-07-20',
'2015-07-21',
'2015-07-22',
'2015-07-23',
'2015-07-24',
'2015-07-27',
'2015-07-28',
'2015-07-29',
'2015-07-30',
'2015-07-31',
'2015-08-03',
'2015-08-04',
'2015-08-05',
'2015-08-06',
'2015-08-07',
'2015-08-10',
'2015-08-11',
'2015-08-12',
'2015-08-13',
'2015-08-14',
'2015-08-17',
'2015-08-18',
'2015-08-19',
'2015-08-20',
'2015-08-21',
'2015-08-24',
'2015-08-25',
'2015-08-26',
'2015-08-27',
'2015-08-28',
'2015-08-31',
'2015-09-01',
'2015-09-02',
'2015-09-07',
'2015-09-08',
'2015-09-09',
'2015-09-10',
'2015-09-11',
'2015-09-14',
'2015-09-15',
'2015-09-16',
'2015-09-17',
'2015-09-18',
'2015-09-21',
'2015-09-22',
'2015-09-23',
'2015-09-24',
'2015-09-25',
'2015-09-28',
'2015-09-29',
'2015-09-30',
'2015-10-08',
'2015-10-09',
'2015-10-12',
'2015-10-13',
'2015-10-14',
'2015-10-15',
'2015-10-16',
'2015-10-19',
'2015-10-20',
'2015-10-21',
'2015-10-22',
'2015-10-23',
'2015-10-26',
'2015-10-27',
'2015-10-28',
'2015-10-29',
'2015-10-30',
'2015-11-02',
'2015-11-03',
'2015-11-04',
'2015-11-05',
'2015-11-06',
'2015-11-09',
'2015-11-10',
'2015-11-11',
'2015-11-12',
'2015-11-13',
'2015-11-16',
'2015-11-17',
'2015-11-18',
'2015-11-19',
'2015-11-20',
'2015-11-23',
'2015-11-24',
'2015-11-25',
'2015-11-26',
'2015-11-27',
'2015-11-30',
'2015-12-01',
'2015-12-02',
'2015-12-03',
'2015-12-04',
'2015-12-07',
'2015-12-08',
'2015-12-09',
'2015-12-10',
'2015-12-11',
'2015-12-14',
'2015-12-15',
'2015-12-16',
'2015-12-17',
'2015-12-18',
'2015-12-21',
'2015-12-22',
'2015-12-23',
'2015-12-24',
'2015-12-25',
'2015-12-28',
'2015-12-29',
'2015-12-30',
'2015-12-31',
'2016-01-04',
'2016-01-05',
'2016-01-06',
'2016-01-07',
'2016-01-08',
'2016-01-11',
'2016-01-12',
'2016-01-13',
'2016-01-14',
'2016-01-15',
'2016-01-18',
'2016-01-19',
'2016-01-20',
'2016-01-21',
'2016-01-22',
'2016-01-25',
'2016-01-26',
'2016-01-27',
'2016-01-28',
'2016-01-29',
'2016-02-01',
'2016-02-02',
'2016-02-03',
'2016-02-04',
'2016-02-05',
'2016-02-15',
'2016-02-16',
'2016-02-17',
'2016-02-18',
'2016-02-19',
'2016-02-22',
'2016-02-23',
'2016-02-24',
'2016-02-25',
'2016-02-26',
'2016-02-29',
'2016-03-01',
'2016-03-02',
'2016-03-03',
'2016-03-04',
'2016-03-07',
'2016-03-08',
'2016-03-09',
'2016-03-10',
'2016-03-11',
'2016-03-14',
'2016-03-15',
'2016-03-16',
'2016-03-17',
'2016-03-18',
'2016-03-21',
'2016-03-22',
'2016-03-23',
'2016-03-24',
'2016-03-25',
'2016-03-28',
'2016-03-29',
'2016-03-30',
'2016-03-31',
'2016-04-01',
'2016-04-05',
'2016-04-06',
'2016-04-07',
'2016-04-08',
'2016-04-11',
'2016-04-12',
'2016-04-13',
'2016-04-14',
'2016-04-15',
'2016-04-18',
'2016-04-19',
'2016-04-20',
'2016-04-21',
'2016-04-22',
'2016-04-25',
'2016-04-26',
'2016-04-27',
'2016-04-28',
'2016-04-29',
'2016-05-03',
'2016-05-04',
'2016-05-05',
'2016-05-06',
'2016-05-09',
'2016-05-10',
'2016-05-11',
'2016-05-12',
'2016-05-13',
'2016-05-16',
'2016-05-17',
'2016-05-18',
'2016-05-19',
'2016-05-20',
'2016-05-23',
'2016-05-24',
'2016-05-25',
'2016-05-26',
'2016-05-27',
'2016-05-30',
'2016-05-31',
'2016-06-01',
'2016-06-02',
'2016-06-03',
'2016-06-06',
'2016-06-07',
'2016-06-08',
'2016-06-13',
'2016-06-14',
'2016-06-15',
'2016-06-16',
'2016-06-17',
'2016-06-20',
'2016-06-21',
'2016-06-22',
'2016-06-23',
'2016-06-24',
'2016-06-27',
'2016-06-28',
'2016-06-29',
'2016-06-30',
'2016-07-01',
'2016-07-04',
'2016-07-05',
'2016-07-06',
'2016-07-07',
'2016-07-08',
'2016-07-11',
'2016-07-12',
'2016-07-13',
'2016-07-14',
'2016-07-15',
'2016-07-18',
'2016-07-19',
'2016-07-20',
'2016-07-21',
'2016-07-22',
'2016-07-25',
'2016-07-26',
'2016-07-27',
'2016-07-28',
'2016-07-29',
'2016-08-01',
'2016-08-02',
'2016-08-03',
'2016-08-04',
'2016-08-05',
'2016-08-08',
'2016-08-09',
'2016-08-10',
'2016-08-11',
'2016-08-12',
'2016-08-15',
'2016-08-16',
'2016-08-17',
'2016-08-18',
'2016-08-19',
'2016-08-22',
'2016-08-23',
'2016-08-24',
'2016-08-25',
'2016-08-26',
'2016-08-29',
'2016-08-30',
'2016-08-31',
'2016-09-01',
'2016-09-02',
'2016-09-05',
'2016-09-06',
'2016-09-07',
'2016-09-08',
'2016-09-09',
'2016-09-12',
'2016-09-13',
'2016-09-14',
'2016-09-19',
'2016-09-20',
'2016-09-21',
'2016-09-22',
'2016-09-23',
'2016-09-26',
'2016-09-27',
'2016-09-28',
'2016-09-29',
'2016-09-30',
'2016-10-10',
'2016-10-11',
'2016-10-12',
'2016-10-13',
'2016-10-14',
'2016-10-17',
'2016-10-18',
'2016-10-19',
'2016-10-20',
'2016-10-21',
'2016-10-24',
'2016-10-25',
'2016-10-26',
'2016-10-27',
'2016-10-28',
'2016-10-31',
'2016-11-01',
'2016-11-02',
'2016-11-03',
'2016-11-04',
'2016-11-07',
'2016-11-08',
'2016-11-09',
'2016-11-10',
'2016-11-11',
'2016-11-14',
'2016-11-15',
'2016-11-16',
'2016-11-17',
'2016-11-18',
'2016-11-21',
'2016-11-22',
'2016-11-23',
'2016-11-24',
'2016-11-25',
'2016-11-28',
'2016-11-29',
'2016-11-30',
'2016-12-01',
'2016-12-02',
'2016-12-05',
'2016-12-06',
'2016-12-07',
'2016-12-08',
'2016-12-09',
'2016-12-12',
'2016-12-13',
'2016-12-14',
'2016-12-15',
'2016-12-16',
'2016-12-19',
'2016-12-20',
'2016-12-21',
'2016-12-22',
'2016-12-23',
'2016-12-26',
'2016-12-27',
'2016-12-28',
'2016-12-29',
'2016-12-30',
'2017-01-03',
'2017-01-04',
'2017-01-05',
'2017-01-06',
'2017-01-09',
'2017-01-10',
'2017-01-11',
'2017-01-12',
'2017-01-13',
'2017-01-16',
'2017-01-17',
'2017-01-18',
'2017-01-19',
'2017-01-20',
'2017-01-23',
'2017-01-24',
'2017-01-25',
'2017-01-26',
'2017-02-03',
'2017-02-06',
'2017-02-07',
'2017-02-08',
'2017-02-09',
'2017-02-10',
'2017-02-13',
'2017-02-14',
'2017-02-15',
'2017-02-16',
'2017-02-17',
'2017-02-20',
'2017-02-21',
'2017-02-22',
'2017-02-23',
'2017-02-24',
'2017-02-27',
'2017-02-28',
'2017-03-01',
'2017-03-02',
'2017-03-03',
'2017-03-06',
'2017-03-07',
'2017-03-08',
'2017-03-09',
'2017-03-10',
'2017-03-13',
'2017-03-14',
'2017-03-15',
'2017-03-16',
'2017-03-17',
'2017-03-20',
'2017-03-21',
'2017-03-22',
'2017-03-23',
'2017-03-24',
'2017-03-27',
'2017-03-28',
'2017-03-29',
'2017-03-30',
'2017-03-31',
'2017-04-05',
'2017-04-06',
'2017-04-07',
'2017-04-10',
'2017-04-11',
'2017-04-12',
'2017-04-13',
'2017-04-14',
'2017-04-17',
'2017-04-18',
'2017-04-19',
'2017-04-20',
'2017-04-21',
'2017-04-24',
'2017-04-25',
'2017-04-26',
'2017-04-27',
'2017-04-28',
'2017-05-02',
'2017-05-03',
'2017-05-04',
'2017-05-05',
'2017-05-08',
'2017-05-09',
'2017-05-10',
'2017-05-11',
'2017-05-12',
'2017-05-15',
'2017-05-16',
'2017-05-17',
'2017-05-18',
'2017-05-19',
'2017-05-22',
'2017-05-23',
'2017-05-24',
'2017-05-25',
'2017-05-26',
'2017-05-31',
'2017-06-01',
'2017-06-02',
'2017-06-05',
'2017-06-06',
'2017-06-07',
'2017-06-08',
'2017-06-09',
'2017-06-12',
'2017-06-13',
'2017-06-14',
'2017-06-15',
'2017-06-16',
'2017-06-19',
'2017-06-20',
'2017-06-21',
'2017-06-22',
'2017-06-23',
'2017-06-26',
'2017-06-27',
'2017-06-28',
'2017-06-29',
'2017-06-30',
'2017-07-03',
'2017-07-04',
'2017-07-05',
'2017-07-06',
'2017-07-07',
'2017-07-10',
'2017-07-11',
'2017-07-12',
'2017-07-13',
'2017-07-14',
'2017-07-17',
'2017-07-18',
'2017-07-19',
'2017-07-20',
'2017-07-21',
'2017-07-24',
'2017-07-25',
'2017-07-26',
'2017-07-27',
'2017-07-28',
'2017-07-31',
'2017-08-01',
'2017-08-02',
'2017-08-03',
'2017-08-04',
'2017-08-07',
'2017-08-08',
'2017-08-09',
'2017-08-10',
'2017-08-11',
'2017-08-14',
'2017-08-15',
'2017-08-16',
'2017-08-17',
'2017-08-18',
'2017-08-21',
'2017-08-22',
'2017-08-23',
'2017-08-24',
'2017-08-25',
'2017-08-28',
'2017-08-29',
'2017-08-30',
'2017-08-31',
'2017-09-01',
'2017-09-04',
'2017-09-05',
'2017-09-06',
'2017-09-07',
'2017-09-08',
'2017-09-11',
'2017-09-12',
'2017-09-13',
'2017-09-14',
'2017-09-15',
'2017-09-18',
'2017-09-19',
'2017-09-20',
'2017-09-21',
'2017-09-22',
'2017-09-25',
'2017-09-26',
'2017-09-27',
'2017-09-28',
'2017-09-29',
'2017-10-09',
'2017-10-10',
'2017-10-11',
'2017-10-12',
'2017-10-13',
'2017-10-16',
'2017-10-17',
'2017-10-18',
'2017-10-19',
'2017-10-20',
'2017-10-23',
'2017-10-24',
'2017-10-25',
'2017-10-26',
'2017-10-27',
'2017-10-30',
'2017-10-31',
'2017-11-01',
'2017-11-02',
'2017-11-03',
'2017-11-06',
'2017-11-07',
'2017-11-08',
'2017-11-09',
'2017-11-10',
'2017-11-13',
'2017-11-14',
'2017-11-15',
'2017-11-16',
'2017-11-17',
'2017-11-20',
'2017-11-21',
'2017-11-22',
'2017-11-23',
'2017-11-24',
'2017-11-27',
'2017-11-28',
'2017-11-29',
'2017-11-30',
'2017-12-01',
'2017-12-04',
'2017-12-05',
'2017-12-06',
'2017-12-07',
'2017-12-08',
'2017-12-11',
'2017-12-12',
'2017-12-13',
'2017-12-14',
'2017-12-15',
'2017-12-18',
'2017-12-19',
'2017-12-20',
'2017-12-21',
'2017-12-22',
'2017-12-25',
'2017-12-26',
'2017-12-27',
'2017-12-28',
'2017-12-29',
'2018-01-02',
'2018-01-03',
'2018-01-04',
'2018-01-05',
'2018-01-08',
'2018-01-09',
'2018-01-10',
'2018-01-11',
'2018-01-12',
'2018-01-15',
'2018-01-16',
'2018-01-17',
'2018-01-18',
'2018-01-19',
'2018-01-22',
'2018-01-23',
'2018-01-24',
'2018-01-25',
'2018-01-26',
'2018-01-29',
'2018-01-30',
'2018-01-31',
'2018-02-01',
'2018-02-02',
'2018-02-05',
'2018-02-06',
'2018-02-07',
'2018-02-08',
'2018-02-09',
'2018-02-12',
'2018-02-13',
'2018-02-14',
'2018-02-22',
'2018-02-23',
'2018-02-26',
'2018-02-27',
'2018-02-28',
'2018-03-01',
'2018-03-02',
'2018-03-05',
'2018-03-06',
'2018-03-07',
'2018-03-08',
'2018-03-09',
'2018-03-12',
'2018-03-13',
'2018-03-14',
'2018-03-15',
'2018-03-16',
'2018-03-19',
'2018-03-20',
'2018-03-21',
'2018-03-22',
'2018-03-23',
'2018-03-26',
'2018-03-27',
'2018-03-28',
'2018-03-29',
'2018-03-30',
'2018-04-02',
'2018-04-03',
'2018-04-04',
'2018-04-09',
'2018-04-10',
'2018-04-11',
'2018-04-12',
'2018-04-13',
'2018-04-16',
'2018-04-17',
'2018-04-18',
'2018-04-19',
'2018-04-20',
'2018-04-23',
'2018-04-24',
'2018-04-25',
'2018-04-26',
'2018-04-27',
'2018-05-02',
'2018-05-03',
'2018-05-04',
'2018-05-07',
'2018-05-08',
'2018-05-09',
'2018-05-10',
'2018-05-11',
'2018-05-14',
'2018-05-15',
'2018-05-16',
'2018-05-17',
'2018-05-18',
'2018-05-21',
'2018-05-22',
'2018-05-23',
'2018-05-24',
'2018-05-25',
'2018-05-28',
'2018-05-29',
'2018-05-30',
'2018-05-31',
'2018-06-01',
'2018-06-04',
'2018-06-05',
'2018-06-06',
'2018-06-07',
'2018-06-08',
'2018-06-11',
'2018-06-12',
'2018-06-13',
'2018-06-14',
'2018-06-15',
'2018-06-19',
'2018-06-20',
'2018-06-21',
'2018-06-22',
'2018-06-25',
'2018-06-26',
'2018-06-27',
'2018-06-28',
'2018-06-29',
'2018-07-02',
'2018-07-03',
'2018-07-04',
'2018-07-05',
'2018-07-06',
'2018-07-09',
'2018-07-10',
'2018-07-11',
'2018-07-12',
'2018-07-13',
'2018-07-16',
'2018-07-17',
'2018-07-18',
'2018-07-19',
'2018-07-20',
'2018-07-23',
'2018-07-24',
'2018-07-25',
'2018-07-26',
'2018-07-27',
'2018-07-30',
'2018-07-31',
'2018-08-01',
'2018-08-02',
'2018-08-03',
'2018-08-06',
'2018-08-07',
'2018-08-08',
'2018-08-09',
'2018-08-10',
'2018-08-13',
'2018-08-14',
'2018-08-15',
'2018-08-16',
'2018-08-17',
'2018-08-20',
'2018-08-21',
'2018-08-22',
'2018-08-23',
'2018-08-24',
'2018-08-27',
'2018-08-28',
'2018-08-29',
'2018-08-30',
'2018-08-31',
'2018-09-03',
'2018-09-04',
'2018-09-05',
'2018-09-06',
'2018-09-07',
'2018-09-10',
'2018-09-11',
'2018-09-12',
'2018-09-13',
'2018-09-14',
'2018-09-17',
'2018-09-18',
'2018-09-19',
'2018-09-20',
'2018-09-21',
'2018-09-25',
'2018-09-26',
'2018-09-27',
'2018-09-28',
'2018-10-08',
'2018-10-09',
'2018-10-10',
'2018-10-11',
'2018-10-12',
'2018-10-15',
'2018-10-16',
'2018-10-17',
'2018-10-18',
'2018-10-19',
'2018-10-22',
'2018-10-23',
'2018-10-24',
'2018-10-25',
'2018-10-26',
'2018-10-29',
'2018-10-30',
'2018-10-31',
'2018-11-01',
'2018-11-02',
'2018-11-05',
'2018-11-06',
'2018-11-07',
'2018-11-08',
'2018-11-09',
'2018-11-12',
'2018-11-13',
'2018-11-14',
'2018-11-15',
'2018-11-16',
'2018-11-19',
'2018-11-20',
'2018-11-21',
'2018-11-22',
'2018-11-23',
'2018-11-26',
'2018-11-27',
'2018-11-28',
'2018-11-29',
'2018-11-30',
'2018-12-03',
'2018-12-04',
'2018-12-05',
'2018-12-06',
'2018-12-07',
'2018-12-10',
'2018-12-11',
'2018-12-12',
'2018-12-13',
'2018-12-14',
'2018-12-17',
'2018-12-18',
'2018-12-19',
'2018-12-20',
'2018-12-21',
'2018-12-24',
'2018-12-25',
'2018-12-26',
'2018-12-27',
'2018-12-28',
'2019-01-02',
'2019-01-03',
'2019-01-04',
'2019-01-07',
'2019-01-08',
'2019-01-09',
'2019-01-10',
'2019-01-11',
'2019-01-14',
'2019-01-15',
'2019-01-16',
'2019-01-17',
'2019-01-18',
'2019-01-21',
'2019-01-22',
'2019-01-23',
'2019-01-24',
'2019-01-25',
'2019-01-28',
'2019-01-29',
'2019-01-30',
'2019-01-31',
'2019-02-01',
'2019-02-11',
'2019-02-12',
'2019-02-13',
'2019-02-14',
'2019-02-15',
'2019-02-18',
'2019-02-19',
'2019-02-20',
'2019-02-21',
'2019-02-22',
'2019-02-25',
'2019-02-26',
'2019-02-27',
'2019-02-28',
'2019-03-01',
'2019-03-04',
'2019-03-05',
'2019-03-06',
'2019-03-07',
'2019-03-08',
'2019-03-11',
'2019-03-12',
'2019-03-13',
'2019-03-14',
'2019-03-15',
'2019-03-18',
'2019-03-19',
'2019-03-20',
'2019-03-21',
'2019-03-22',
'2019-03-25',
'2019-03-26',
'2019-03-27',
'2019-03-28',
'2019-03-29',
'2019-04-01',
'2019-04-02',
'2019-04-03',
'2019-04-04',
'2019-04-08',
'2019-04-09',
'2019-04-10',
'2019-04-11',
'2019-04-12',
'2019-04-15',
'2019-04-16',
'2019-04-17',
'2019-04-18',
'2019-04-19',
'2019-04-22',
'2019-04-23',
'2019-04-24',
'2019-04-25',
'2019-04-26',
'2019-04-29',
'2019-04-30',
'2019-05-06',
'2019-05-07',
'2019-05-08',
'2019-05-09',
'2019-05-10',
'2019-05-13',
'2019-05-14',
'2019-05-15',
'2019-05-16',
'2019-05-17',
'2019-05-20',
'2019-05-21',
'2019-05-22',
'2019-05-23',
'2019-05-24',
'2019-05-27',
'2019-05-28',
'2019-05-29',
'2019-05-30',
'2019-05-31',
'2019-06-03',
'2019-06-04',
'2019-06-05',
'2019-06-06',
'2019-06-10',
'2019-06-11',
'2019-06-12',
'2019-06-13',
'2019-06-14',
'2019-06-17',
'2019-06-18',
'2019-06-19',
'2019-06-20',
'2019-06-21',
'2019-06-24',
'2019-06-25',
'2019-06-26',
'2019-06-27',
'2019-06-28',
'2019-07-01',
'2019-07-02',
'2019-07-03',
'2019-07-04',
'2019-07-05',
'2019-07-08',
'2019-07-09',
'2019-07-10',
'2019-07-11',
'2019-07-12',
'2019-07-15',
'2019-07-16',
'2019-07-17',
'2019-07-18',
'2019-07-19',
'2019-07-22',
'2019-07-23',
'2019-07-24',
'2019-07-25',
'2019-07-26',
'2019-07-29',
'2019-07-30',
'2019-07-31',
'2019-08-01',
'2019-08-02',
'2019-08-05',
'2019-08-06',
'2019-08-07',
'2019-08-08',
'2019-08-09',
'2019-08-12',
'2019-08-13',
'2019-08-14',
'2019-08-15',
'2019-08-16',
'2019-08-19',
'2019-08-20',
'2019-08-21',
'2019-08-22',
'2019-08-23',
'2019-08-26',
'2019-08-27',
'2019-08-28',
'2019-08-29',
'2019-08-30',
'2019-09-02',
'2019-09-03',
'2019-09-04',
'2019-09-05',
'2019-09-06',
'2019-09-09',
'2019-09-10',
'2019-09-11',
'2019-09-12',
'2019-09-16',
'2019-09-17',
'2019-09-18',
'2019-09-19',
'2019-09-20',
'2019-09-23',
'2019-09-24',
'2019-09-25',
'2019-09-26',
'2019-09-27',
'2019-09-30',
'2019-10-08',
'2019-10-09',
'2019-10-10',
'2019-10-11',
'2019-10-14',
'2019-10-15',
'2019-10-16',
'2019-10-17',
'2019-10-18',
'2019-10-21',
'2019-10-22',
'2019-10-23',
'2019-10-24',
'2019-10-25',
'2019-10-28',
'2019-10-29',
'2019-10-30',
'2019-10-31',
'2019-11-01',
'2019-11-04',
'2019-11-05',
'2019-11-06',
'2019-11-07',
'2019-11-08',
'2019-11-11',
'2019-11-12',
'2019-11-13',
'2019-11-14',
'2019-11-15',
'2019-11-18',
'2019-11-19',
'2019-11-20',
'2019-11-21',
'2019-11-22',
'2019-11-25',
'2019-11-26',
'2019-11-27',
'2019-11-28',
'2019-11-29',
'2019-12-02',
'2019-12-03',
'2019-12-04',
'2019-12-05',
'2019-12-06',
'2019-12-09',
'2019-12-10',
'2019-12-11',
'2019-12-12',
'2019-12-13',
'2019-12-16',
'2019-12-17',
'2019-12-18',
'2019-12-19',
'2019-12-20',
'2019-12-23',
'2019-12-24',
'2019-12-25',
'2019-12-26',
'2019-12-27',
'2019-12-30',
'2019-12-31',
'2020-01-02',
'2020-01-03',
'2020-01-06',
'2020-01-07',
'2020-01-08',
'2020-01-09',
'2020-01-10',
'2020-01-13',
'2020-01-14',
'2020-01-15',
'2020-01-16',
'2020-01-17',
'2020-01-20',
'2020-01-21',
'2020-01-22',
'2020-01-23',
'2020-02-03',
'2020-02-04',
'2020-02-05',
'2020-02-06',
'2020-02-07',
'2020-02-10',
'2020-02-11',
'2020-02-12',
'2020-02-13',
'2020-02-14',
'2020-02-17',
'2020-02-18',
'2020-02-19',
'2020-02-20',
'2020-02-21',
'2020-02-24',
'2020-02-25',
'2020-02-26',
'2020-02-27',
'2020-02-28',
'2020-03-02',
'2020-03-03',
'2020-03-04',
'2020-03-05',
'2020-03-06',
'2020-03-09',
'2020-03-10',
'2020-03-11',
'2020-03-12',
'2020-03-13',
'2020-03-16',
'2020-03-17',
'2020-03-18',
'2020-03-19',
'2020-03-20',
'2020-03-23',
'2020-03-24',
'2020-03-25',
'2020-03-26',
'2020-03-27',
'2020-03-30',
'2020-03-31',
'2020-04-01',
'2020-04-02',
'2020-04-03',
'2020-04-07',
'2020-04-08',
'2020-04-09',
'2020-04-10',
'2020-04-13',
'2020-04-14',
'2020-04-15',
'2020-04-16',
'2020-04-17',
'2020-04-20',
'2020-04-21',
'2020-04-22',
'2020-04-23',
'2020-04-24',
'2020-04-27',
'2020-04-28',
'2020-04-29',
'2020-04-30',
'2020-05-06',
'2020-05-07',
'2020-05-08',
'2020-05-11',
'2020-05-12',
'2020-05-13',
'2020-05-14',
'2020-05-15',
'2020-05-18',
'2020-05-19',
'2020-05-20',
'2020-05-21',
'2020-05-22',
'2020-05-25',
'2020-05-26',
'2020-05-27',
'2020-05-28',
'2020-05-29',
'2020-06-01',
'2020-06-02',
'2020-06-03',
'2020-06-04',
'2020-06-05',
'2020-06-08',
'2020-06-09',
'2020-06-10',
'2020-06-11',
'2020-06-12',
'2020-06-15',
'2020-06-16',
'2020-06-17',
'2020-06-18',
'2020-06-19',
'2020-06-22',
'2020-06-23',
'2020-06-24',
'2020-06-29',
'2020-06-30',
'2020-07-01',
'2020-07-02',
'2020-07-03',
'2020-07-06',
'2020-07-07',
'2020-07-08',
'2020-07-09',
'2020-07-10',
'2020-07-13',
'2020-07-14',
'2020-07-15',
'2020-07-16',
'2020-07-17',
'2020-07-20',
'2020-07-21',
'2020-07-22',
'2020-07-23',
'2020-07-24',
'2020-07-27',
'2020-07-28',
'2020-07-29',
'2020-07-30',
'2020-07-31',
'2020-08-03',
'2020-08-04',
'2020-08-05',
'2020-08-06',
'2020-08-07',
'2020-08-10',
'2020-08-11',
'2020-08-12',
'2020-08-13',
'2020-08-14',
'2020-08-17',
'2020-08-18',
'2020-08-19',
'2020-08-20',
'2020-08-21',
'2020-08-24',
'2020-08-25',
'2020-08-26',
'2020-08-27',
'2020-08-28',
'2020-08-31',
'2020-09-01',
'2020-09-02',
'2020-09-03',
'2020-09-04',
'2020-09-07',
'2020-09-08',
'2020-09-09',
'2020-09-10',
'2020-09-11',
'2020-09-14',
'2020-09-15',
'2020-09-16',
'2020-09-17',
'2020-09-18',
'2020-09-21',
'2020-09-22',
'2020-09-23',
'2020-09-24',
'2020-09-25',
'2020-09-28',
'2020-09-29',
'2020-09-30',
'2020-10-09',
'2020-10-12',
'2020-10-13',
'2020-10-14',
'2020-10-15',
'2020-10-16',
'2020-10-19',
'2020-10-20',
'2020-10-21',
'2020-10-22',
'2020-10-23',
'2020-10-26',
'2020-10-27',
'2020-10-28',
'2020-10-29',
'2020-10-30',
'2020-11-02',
'2020-11-03',
'2020-11-04',
'2020-11-05',
'2020-11-06',
'2020-11-09',
'2020-11-10',
'2020-11-11',
'2020-11-12',
'2020-11-13',
'2020-11-16',
'2020-11-17',
'2020-11-18',
'2020-11-19',
'2020-11-20',
'2020-11-23',
'2020-11-24',
'2020-11-25',
'2020-11-26',
'2020-11-27',
'2020-11-30',
'2020-12-01',
'2020-12-02',
'2020-12-03',
'2020-12-04',
'2020-12-07',
'2020-12-08',
'2020-12-09',
'2020-12-10',
'2020-12-11',
'2020-12-14',
'2020-12-15',
'2020-12-16',
'2020-12-17',
'2020-12-18',
'2020-12-21',
'2020-12-22',
'2020-12-23',
'2020-12-24',
'2020-12-25',
'2020-12-28',
'2020-12-29',
'2020-12-30',
'2020-12-31',
"2021-01-04",
"2021-01-05",
"2021-01-06",
"2021-01-07",
"2021-01-08",
"2021-01-11",
"2021-01-12",
"2021-01-13",
"2021-01-14",
"2021-01-15",
"2021-01-18",
"2021-01-19",
"2021-01-20",
"2021-01-21",
"2021-01-22",
"2021-01-25",
"2021-01-26",
"2021-01-27",
"2021-01-28",
"2021-01-29",
"2021-02-01",
"2021-02-02",
"2021-02-03",
"2021-02-04",
"2021-02-05",
"2021-02-08",
"2021-02-09",
"2021-02-10",
"2021-02-18",
"2021-02-19",
"2021-02-22",
"2021-02-23",
"2021-02-24",
"2021-02-25",
"2021-02-26",
"2021-03-01",
"2021-03-02",
"2021-03-03",
"2021-03-04",
"2021-03-05",
"2021-03-08",
"2021-03-09",
"2021-03-10",
"2021-03-11",
"2021-03-12",
"2021-03-15",
"2021-03-16",
"2021-03-17",
"2021-03-18",
"2021-03-19",
"2021-03-22",
"2021-03-23",
"2021-03-24",
"2021-03-25",
"2021-03-26",
"2021-03-29",
"2021-03-30",
"2021-03-31",
"2021-04-01",
"2021-04-02",
"2021-04-06",
"2021-04-07",
"2021-04-08",
"2021-04-09",
"2021-04-12",
"2021-04-13",
"2021-04-14",
"2021-04-15",
"2021-04-16",
"2021-04-19",
"2021-04-20",
"2021-04-21",
"2021-04-22",
"2021-04-23",
"2021-04-26",
"2021-04-27",
"2021-04-28",
"2021-04-29",
"2021-04-30",
"2021-05-06",
"2021-05-07",
"2021-05-10",
"2021-05-11",
"2021-05-12",
"2021-05-13",
"2021-05-14",
"2021-05-17",
"2021-05-18",
"2021-05-19",
"2021-05-20",
"2021-05-21",
"2021-05-24",
"2021-05-25",
"2021-05-26",
"2021-05-27",
"2021-05-28",
"2021-05-31",
"2021-06-01",
"2021-06-02",
"2021-06-03",
"2021-06-04",
"2021-06-07",
"2021-06-08",
"2021-06-09",
"2021-06-10",
"2021-06-11",
"2021-06-15",
"2021-06-16",
"2021-06-17",
"2021-06-18",
"2021-06-21",
"2021-06-22",
"2021-06-23",
"2021-06-24",
"2021-06-25",
"2021-06-28",
"2021-06-29",
"2021-06-30",
"2021-07-01",
"2021-07-02",
"2021-07-05",
"2021-07-06",
"2021-07-07",
"2021-07-08",
"2021-07-09",
"2021-07-12",
"2021-07-13",
"2021-07-14",
"2021-07-15",
"2021-07-16",
"2021-07-19",
"2021-07-20",
"2021-07-21",
"2021-07-22",
"2021-07-23",
"2021-07-26",
"2021-07-27",
"2021-07-28",
"2021-07-29",
"2021-07-30",
"2021-08-02",
"2021-08-03",
"2021-08-04",
"2021-08-05",
"2021-08-06",
"2021-08-09",
"2021-08-10",
"2021-08-11",
"2021-08-12",
"2021-08-13",
"2021-08-16",
"2021-08-17",
"2021-08-18",
"2021-08-19",
"2021-08-20",
"2021-08-23",
"2021-08-24",
"2021-08-25",
"2021-08-26",
"2021-08-27",
"2021-08-30",
"2021-08-31",
"2021-09-01",
"2021-09-02",
"2021-09-03",
"2021-09-06",
"2021-09-07",
"2021-09-08",
"2021-09-09",
"2021-09-10",
"2021-09-13",
"2021-09-14",
"2021-09-15",
"2021-09-16",
"2021-09-17",
"2021-09-20",
"2021-09-22",
"2021-09-23",
"2021-09-24",
"2021-09-27",
"2021-09-28",
"2021-09-29",
"2021-09-30",
"2021-10-08",
"2021-10-11",
"2021-10-12",
"2021-10-13",
"2021-10-14",
"2021-10-15",
"2021-10-18",
"2021-10-19",
"2021-10-20",
"2021-10-21",
"2021-10-22",
"2021-10-25",
"2021-10-26",
"2021-10-27",
"2021-10-28",
"2021-10-29",
"2021-11-01",
"2021-11-02",
"2021-11-03",
"2021-11-04",
"2021-11-05",
"2021-11-08",
"2021-11-09",
"2021-11-10",
"2021-11-11",
"2021-11-12",
"2021-11-15",
"2021-11-16",
"2021-11-17",
"2021-11-18",
"2021-11-19",
"2021-11-22",
"2021-11-23",
"2021-11-24",
"2021-11-25",
"2021-11-26",
"2021-11-29",
"2021-11-30",
"2021-12-01",
"2021-12-02",
"2021-12-03",
"2021-12-06",
"2021-12-07",
"2021-12-08",
"2021-12-09",
"2021-12-10",
"2021-12-13",
"2021-12-14",
"2021-12-15",
"2021-12-16",
"2021-12-17",
"2021-12-20",
"2021-12-21",
"2021-12-22",
"2021-12-23",
"2021-12-24",
"2021-12-27",
"2021-12-28",
"2021-12-29",
"2021-12-30",
"2021-12-31",
"2022-01-04",
"2022-01-05",
"2022-01-06",
"2022-01-07",
"2022-01-10",
"2022-01-11",
"2022-01-12",
"2022-01-13",
"2022-01-14",
"2022-01-17",
"2022-01-18",
"2022-01-19",
"2022-01-20",
"2022-01-21",
"2022-01-24",
"2022-01-25",
"2022-01-26",
"2022-01-27",
"2022-01-28",
"2022-02-07",
"2022-02-08",
"2022-02-09",
"2022-02-10",
"2022-02-11",
"2022-02-14",
"2022-02-15",
"2022-02-16",
"2022-02-17",
"2022-02-18",
"2022-02-21",
"2022-02-22",
"2022-02-23",
"2022-02-24",
"2022-02-25",
"2022-02-28",
"2022-03-01",
"2022-03-02",
"2022-03-03",
"2022-03-04",
"2022-03-07",
"2022-03-08",
"2022-03-09",
"2022-03-10",
"2022-03-11",
"2022-03-14",
"2022-03-15",
"2022-03-16",
"2022-03-17",
"2022-03-18",
"2022-03-21",
"2022-03-22",
"2022-03-23",
"2022-03-24",
"2022-03-25",
"2022-03-28",
"2022-03-29",
"2022-03-30",
"2022-03-31",
"2022-04-01",
"2022-04-06",
"2022-04-07",
"2022-04-08",
"2022-04-11",
"2022-04-12",
"2022-04-13",
"2022-04-14",
"2022-04-15",
"2022-04-18",
"2022-04-19",
"2022-04-20",
"2022-04-21",
"2022-04-22",
"2022-04-25",
"2022-04-26",
"2022-04-27",
"2022-04-28",
"2022-04-29",
"2022-05-05",
"2022-05-06",
"2022-05-09",
"2022-05-10",
"2022-05-11",
"2022-05-12",
"2022-05-13",
"2022-05-16",
"2022-05-17",
"2022-05-18",
"2022-05-19",
"2022-05-20",
"2022-05-23",
"2022-05-24",
"2022-05-25",
"2022-05-26",
"2022-05-27",
"2022-05-30",
"2022-05-31",
"2022-06-01",
"2022-06-02",
"2022-06-06",
"2022-06-07",
"2022-06-08",
"2022-06-09",
"2022-06-10",
"2022-06-13",
"2022-06-14",
"2022-06-15",
"2022-06-16",
"2022-06-17",
"2022-06-20",
"2022-06-21",
"2022-06-22",
"2022-06-23",
"2022-06-24",
"2022-06-27",
"2022-06-28",
"2022-06-29",
"2022-06-30",
"2022-07-01",
"2022-07-04",
"2022-07-05",
"2022-07-06",
"2022-07-07",
"2022-07-08",
"2022-07-11",
"2022-07-12",
"2022-07-13",
"2022-07-14",
"2022-07-15",
"2022-07-18",
"2022-07-19",
"2022-07-20",
"2022-07-21",
"2022-07-22",
"2022-07-25",
"2022-07-26",
"2022-07-27",
"2022-07-28",
"2022-07-29",
"2022-08-01",
"2022-08-02",
"2022-08-03",
"2022-08-04",
"2022-08-05",
"2022-08-08",
"2022-08-09",
"2022-08-10",
"2022-08-11",
"2022-08-12",
"2022-08-15",
"2022-08-16",
"2022-08-17",
"2022-08-18",
"2022-08-19",
"2022-08-22",
"2022-08-23",
"2022-08-24",
"2022-08-25",
"2022-08-26",
"2022-08-29",
"2022-08-30",
"2022-08-31",
"2022-09-01",
"2022-09-02",
"2022-09-05",
"2022-09-06",
"2022-09-07",
"2022-09-08",
"2022-09-09",
"2022-09-13",
"2022-09-14",
"2022-09-15",
"2022-09-16",
"2022-09-19",
"2022-09-20",
"2022-09-21",
"2022-09-22",
"2022-09-23",
"2022-09-26",
"2022-09-27",
"2022-09-28",
"2022-09-29",
"2022-09-30",
"2022-10-10",
"2022-10-11",
"2022-10-12",
"2022-10-13",
"2022-10-14",
"2022-10-17",
"2022-10-18",
"2022-10-19",
"2022-10-20",
"2022-10-21",
"2022-10-24",
"2022-10-25",
"2022-10-26",
"2022-10-27",
"2022-10-28",
"2022-10-31",
"2022-11-01",
"2022-11-02",
"2022-11-03",
"2022-11-04",
"2022-11-07",
"2022-11-08",
"2022-11-09",
"2022-11-10",
"2022-11-11",
"2022-11-14",
"2022-11-15",
"2022-11-16",
"2022-11-17",
"2022-11-18",
"2022-11-21",
"2022-11-22",
"2022-11-23",
"2022-11-24",
"2022-11-25",
"2022-11-28",
"2022-11-29",
"2022-11-30",
"2022-12-01",
"2022-12-02",
"2022-12-05",
"2022-12-06",
"2022-12-07",
"2022-12-08",
"2022-12-09",
"2022-12-12",
"2022-12-13",
"2022-12-14",
"2022-12-15",
"2022-12-16",
"2022-12-19",
"2022-12-20",
"2022-12-21",
"2022-12-22",
"2022-12-23",
"2022-12-26",
"2022-12-27",
"2022-12-28",
"2022-12-29",
"2022-12-30",
'2023-01-03',
'2023-01-04',
'2023-01-05',
'2023-01-06',
'2023-01-09',
'2023-01-10',
'2023-01-11',
'2023-01-12',
'2023-01-13',
'2023-01-16',
'2023-01-17',
'2023-01-18',
'2023-01-19',
'2023-01-20',
'2023-01-30',
'2023-01-31',
'2023-02-01',
'2023-02-02',
'2023-02-03',
'2023-02-06',
'2023-02-07',
'2023-02-08',
'2023-02-09',
'2023-02-10',
'2023-02-13',
'2023-02-14',
'2023-02-15',
'2023-02-16',
'2023-02-17',
'2023-02-20',
'2023-02-21',
'2023-02-22',
'2023-02-23',
'2023-02-24',
'2023-02-27',
'2023-02-28',
'2023-03-01',
'2023-03-02',
'2023-03-03',
'2023-03-06',
'2023-03-07',
'2023-03-08',
'2023-03-09',
'2023-03-10',
'2023-03-13',
'2023-03-14',
'2023-03-15',
'2023-03-16',
'2023-03-17',
'2023-03-20',
'2023-03-21',
'2023-03-22',
'2023-03-23',
'2023-03-24',
'2023-03-27',
'2023-03-28',
'2023-03-29',
'2023-03-30',
'2023-03-31',
'2023-04-06',
'2023-04-07',
'2023-04-10',
'2023-04-11',
'2023-04-12',
'2023-04-13',
'2023-04-14',
'2023-04-17',
'2023-04-18',
'2023-04-19',
'2023-04-20',
'2023-04-21',
'2023-04-24',
'2023-04-25',
'2023-04-26',
'2023-04-27',
'2023-04-28',
'2023-05-02',
'2023-05-03',
'2023-05-04',
'2023-05-05',
'2023-05-08',
'2023-05-09',
'2023-05-10',
'2023-05-11',
'2023-05-12',
'2023-05-15',
'2023-05-16',
'2023-05-17',
'2023-05-18',
'2023-05-19',
'2023-05-22',
'2023-05-23',
'2023-05-24',
'2023-05-25',
'2023-05-26',
'2023-05-29',
'2023-05-30',
'2023-05-31',
'2023-06-01',
'2023-06-02',
'2023-06-05',
'2023-06-06',
'2023-06-07',
'2023-06-08',
'2023-06-09',
'2023-06-12',
'2023-06-13',
'2023-06-14',
'2023-06-15',
'2023-06-16',
'2023-06-19',
'2023-06-20',
'2023-06-21',
'2023-06-26',
'2023-06-27',
'2023-06-28',
'2023-06-29',
'2023-06-30',
'2023-07-03',
'2023-07-04',
'2023-07-05',
'2023-07-06',
'2023-07-07',
'2023-07-10',
'2023-07-11',
'2023-07-12',
'2023-07-13',
'2023-07-14',
'2023-07-17',
'2023-07-18',
'2023-07-19',
'2023-07-20',
'2023-07-21',
'2023-07-24',
'2023-07-25',
'2023-07-26',
'2023-07-27',
'2023-07-28',
'2023-07-31',
'2023-08-01',
'2023-08-02',
'2023-08-03',
'2023-08-04',
'2023-08-07',
'2023-08-08',
'2023-08-09',
'2023-08-10',
'2023-08-11',
'2023-08-14',
'2023-08-15',
'2023-08-16',
'2023-08-17',
'2023-08-18',
'2023-08-21',
'2023-08-22',
'2023-08-23',
'2023-08-24',
'2023-08-25',
'2023-08-28',
'2023-08-29',
'2023-08-30',
'2023-08-31',
'2023-09-01',
'2023-09-04',
'2023-09-05',
'2023-09-06',
'2023-09-07',
'2023-09-08',
'2023-09-11',
'2023-09-12',
'2023-09-13',
'2023-09-14',
'2023-09-15',
'2023-09-18',
'2023-09-19',
'2023-09-20',
'2023-09-21',
'2023-09-22',
'2023-09-25',
'2023-09-26',
'2023-09-27',
'2023-09-28',
'2023-10-09',
'2023-10-10',
'2023-10-11',
'2023-10-12',
'2023-10-13',
'2023-10-16',
'2023-10-17',
'2023-10-18',
'2023-10-19',
'2023-10-20',
'2023-10-23',
'2023-10-24',
'2023-10-25',
'2023-10-26',
'2023-10-27',
'2023-10-30',
'2023-10-31',
'2023-11-01',
'2023-11-02',
'2023-11-03',
'2023-11-06',
'2023-11-07',
'2023-11-08',
'2023-11-09',
'2023-11-10',
'2023-11-13',
'2023-11-14',
'2023-11-15',
'2023-11-16',
'2023-11-17',
'2023-11-20',
'2023-11-21',
'2023-11-22',
'2023-11-23',
'2023-11-24',
'2023-11-27',
'2023-11-28',
'2023-11-29',
'2023-11-30',
'2023-12-01',
'2023-12-04',
'2023-12-05',
'2023-12-06',
'2023-12-07',
'2023-12-08',
'2023-12-11',
'2023-12-12',
'2023-12-13',
'2023-12-14',
'2023-12-15',
'2023-12-18',
'2023-12-19',
'2023-12-20',
'2023-12-21',
'2023-12-22',
'2023-12-25',
'2023-12-26',
'2023-12-27',
'2023-12-28',
'2023-12-29'
]
def get_day_from(date: str, ne: int = 1) -> str:
"""
根据日期和步长来搜寻交易日
Args:
date (str): 交易日 比如 "2020-07-15"
ne (int): 步长
Returns:
str: 交易日期
"""
try:
return trade_dates[trade_dates.index(date) + ne]
except IndexError:
raise IndexError("参数date不为交易日")
| [
"[email protected]"
]
| |
7c016d8b6388aebf0272e9d020906a09f3c9df6b | 4cb189467bf31816fcd8bfb248947b7dd00c2017 | /pixace/__init__.py | 3819954c037ec6693028c42d50dcb3c11351c8bf | [
"MIT"
]
| permissive | vishnubob/pixace | 6945861372d70fbbbe8f15ac1d36d65b8f0b0f06 | 8871f3ac79101a2e7780571b8aafb226382ad83d | refs/heads/main | 2023-02-25T14:01:47.110728 | 2020-12-26T03:10:40 | 2020-12-26T03:10:40 | 320,168,163 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,025 | py | def download_model(model_name=None, checkpoint="default", weights_dir="model-weights"):
from . zoo import ModelZoo
zoo = ModelZoo(weights_dir=weights_dir)
return zoo.download(model_name=model_name, checkpoint=checkpoint)
def get_trainer(
model_name=None,
model_type="reformer",
weights_dir="model-weights",
image_size=32,
bitdepth=(5,4,4),
):
from . train import Trainer
return Trainer(
model_name=model_name,
model_type=model_type,
weights_dir=weights_dir,
image_size=image_size,
bitdepth=bitdepth,
)
def get_predictor(
model_name=None,
model_type="reformer",
weights_dir="model-weights",
checkpoint=None,
image_size=32,
bitdepth=(5,4,4)
):
from . inference import Inference
return Inference(
model_name=model_name,
model_type=model_type,
weights_dir=weights_dir,
image_size=image_size,
bitdepth=bitdepth,
)
| [
"[email protected]"
]
| |
674a462645ec6e5cafacdcf593439c253f7c3c93 | 32dcb7c872cbc5048a2024df73866ee20e7405ec | /0x0B-python-input_output/7-save_to_json_file.py | 16166a4b92ea1c6e34dbec961951bd9ae6613ebe | []
| no_license | Bzhamdi/holbertonschool-higher_level_programming | f52eccc48fe388d6728e59e68da336b392057b8e | d92c749ed64d8b795533105520ddea4e12c2a508 | refs/heads/master | 2023-06-07T08:07:47.579114 | 2021-06-24T22:55:38 | 2021-06-24T22:55:38 | 259,213,414 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | #!/usr/bin/python3
"""save_to_json_file"""
import json
def save_to_json_file(my_obj, filename):
"""save object to jason file"""
with open(filename, 'w', encoding='utf-8') as file:
return json.dump(my_obj, file)
| [
"[email protected]"
]
| |
ba621a38f90ffdae0a50420c5fe784f09c301c67 | 0b4d36e45ac9a192982f01ebab15321981a17be5 | /app/admin/views/__init__.py | 2f32a1494d559d8911ab501dcc05d5b027e6a58a | []
| no_license | xuannanxan/maitul | 02f4f3ce85f02915c8d18cb4d291c3c6da4573d5 | 6407415f6beb6677875b23b06ac694996e840256 | refs/heads/master | 2020-07-11T00:10:18.569571 | 2019-11-17T12:39:38 | 2019-11-17T12:39:38 | 204,406,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | # -*- coding: utf-8 -*-
# Created by xuannan on 2019-01-26.
__author__ = 'Allen xu'
from datetime import datetime
from flask_login import current_user
from flask import request,session
from app.expand.utils import build_tree,object_to_dict
from app.models import Crud,Menu,Conf,Admin,Role,Auth
from app.models.base import db
from .. import admin
# 上下文处理器
@admin.context_processor
def tpl_extra():
menu_data = Crud.get_data(Menu, Menu.sort.desc())
rule = str(request.url_rule)
#用户权限列表
auth_urls = []
if hasattr(current_user, 'id') and current_user.id != 1:
auth_urls = session.get('auth_urls')
# 如果有分页,去掉分页标签
has_pagination = rule.find("<")
if has_pagination>0:
rule = rule[0:has_pagination-1]
#获取当前菜单信息,用于页面展示
active_menu = Menu.query.filter(Menu.url == rule).first()
#配置项
conf_model_data = Crud.get_data(Conf, Conf.sort.desc())
conf_data,webconfig = [],{}
for v in conf_model_data:
conf = object_to_dict(v)
if conf['optional_values']:
conf['optional_values'] = (conf['optional_values']).split(',')
conf_data.append(conf)
webconfig[conf['ename']] = conf['default_value']
data = dict(
online_time= datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
menu_tree=build_tree(menu_data, 0, 0),
rule = rule,
active_menu = active_menu,
conf_data = conf_data,
webconfig = webconfig,
auth_urls = auth_urls
)
return data | [
"[email protected]"
]
| |
d2af8f50791c38016c4bcce317336785a07c9ba2 | b1303152c3977a22ff9a0192c0c32310e65a6d77 | /python/884.uncommon-words-from-two-sentences.py | 67fdbd8c238f32b3c9e47676cc49d73841bc8d08 | [
"Apache-2.0"
]
| permissive | stavanmehta/leetcode | 1b8da1c2bfacaa76ddfb96b8dbce03bf08c54c27 | 1224e43ce29430c840e65daae3b343182e24709c | refs/heads/master | 2021-07-15T16:02:16.107962 | 2021-06-24T05:39:14 | 2021-06-24T05:39:14 | 201,658,706 | 0 | 0 | Apache-2.0 | 2021-06-24T05:39:15 | 2019-08-10T16:59:32 | Java | UTF-8 | Python | false | false | 91 | py | class Solution:
def uncommonFromSentences(self, A: str, B: str) -> List[str]:
| [
"[email protected]"
]
| |
1726741c163cca9863e2ad17ba1ad644d88dbc3d | c8da3539397dbd49388719fb6d8720db61e859a7 | /catkin_ws/build/robot_vision/catkin_generated/pkg.develspace.context.pc.py | 915527130a23863a915859f35818063c56b9d58c | []
| no_license | pinkpinkheart/ROS | a465c9e967cd1c71da7648a62d1cc8af342b70df | bd91772e24b72d466a90d2dd65f54be4be49ce99 | refs/heads/master | 2023-03-12T17:55:40.650415 | 2021-03-03T09:20:00 | 2021-03-03T09:20:00 | 344,137,644 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "robot_vision"
PROJECT_SPACE_DIR = "/home/cy/workspace/ROS/catkin_ws/devel"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
]
| |
333a6c4dfc8182319414de1bdc18d089c1898ac5 | c7a1c1ae40e9d95dfb92251dcfbf3c5010e6ba81 | /picamera/essentials-camera/Chapter 05 - Python/ch5listing2.py | 0b39ad336e65cbd478911fd113792f6648cee2cd | []
| no_license | pranavlathigara/Raspberry-Pi-DIY-Projects | efd18e2e5b9b8369bb1a5f5418782480cf9bc729 | 0c14c316898d4d06015912ac4a8cb7b71a3980c0 | refs/heads/master | 2021-04-06T09:14:28.088223 | 2018-02-19T00:15:22 | 2018-02-19T00:15:22 | 124,649,553 | 1 | 2 | null | 2018-03-10T11:30:59 | 2018-03-10T11:30:59 | null | UTF-8 | Python | false | false | 173 | py | from picamera import PiCamera
from time import sleep
camera = PiCamera()
camera.start_preview()
sleep(5)
camera.capture('/home/pi/Desktop/image.jpg')
camera.stop_preview() | [
"[email protected]"
]
| |
df1725401ad1ed3098e58f43cf648d10b867d034 | d45b4db35e5e8baef1aa71bb8ae55236e8e8de67 | /rm2bed.py | e36135ab8501ab8e641e864df18e86f177377c97 | []
| no_license | davek44/utility | a5af6bfff2cf576671dcdfa7bdfdac97a417b26a | 5a2581078bf9dab78cc182f2917ecb671d04570c | refs/heads/master | 2023-04-30T21:19:40.683342 | 2023-04-20T22:30:48 | 2023-04-20T22:30:48 | 7,212,829 | 18 | 11 | null | null | null | null | UTF-8 | Python | false | false | 1,352 | py | #!/usr/bin/env python
from optparse import OptionParser
import gzip
'''
rm2bed.py
Convert RepeatMasker .out format to BED.
'''
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <rm out>'
parser = OptionParser(usage)
#parser.add_option()
(options,args) = parser.parse_args()
if len(args) != 1:
parser.error('Must provide RepeatMasker .out file')
else:
if args[0][-2:] == 'gz':
rm_in = gzip.open(args[0], 'rt')
else:
rm_in = open(args[0])
for i in range(4):
line = rm_in.readline()
while line:
a = line.split()
chrm = a[4]
start = str(int(a[5])-1)
end = a[6]
if a[8] == '+':
strand = '+'
else:
strand = '-'
repeat = a[9]
family = a[10]
cols = (chrm, start, end, '%s;%s' % (family,repeat), '.', strand)
print('\t'.join(cols))
line = rm_in.readline()
rm_in.close()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
c1881a3f7167b40d0502938ef8175b49df657c8c | e86364b36b82c24596dd71f9fa2221d036e8defc | /collections/ansible_collections/arista/eos/plugins/modules/eos_l2_interfaces.py | bb55e26753e4377bd7f101bd2b60c9ca4dbcffa8 | []
| no_license | ganeshrn/network_collections_migration | b3f11be5ecb9557787bcd12ca01b227379c7c102 | 8f56b60bfde606b291627665a1218bf7ce15f3a1 | refs/heads/master | 2020-09-12T12:10:58.189645 | 2019-11-18T11:44:48 | 2019-11-18T11:44:48 | 222,419,125 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,267 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
##############################################
# WARNING #
##############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
##############################################
"""
The module file for eos_l2_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'
}
DOCUMENTATION = '''module: eos_l2_interfaces
short_description: Manages Layer-2 interface attributes of Arista EOS devices
description: This module provides declarative management of Layer-2 interface on Arista
EOS devices.
author: Nathaniel Case (@qalthos)
notes:
- Tested against Arista EOS 4.20.10M
- This module works with connection C(network_cli). See the L(EOS Platform Options,../network/user_guide/platform_eos.html).
options:
config:
description: A dictionary of Layer-2 interface options
type: list
elements: dict
suboptions:
name:
description:
- Full name of interface, e.g. Ethernet1.
type: str
required: true
access:
description:
- Switchport mode access command to configure the interface as a layer 2 access.
type: dict
suboptions:
vlan:
description:
- Configure given VLAN in access port. It's used as the access VLAN ID.
type: int
trunk:
description:
- Switchport mode trunk command to configure the interface as a Layer 2 trunk.
type: dict
suboptions:
native_vlan:
description:
- Native VLAN to be configured in trunk port. It is used as the trunk
native VLAN ID.
type: int
trunk_allowed_vlans:
description:
- List of allowed VLANs in a given trunk port. These are the only VLANs
that will be configured on the trunk.
type: list
state:
choices:
- merged
- replaced
- overridden
- deleted
default: merged
description:
- The state of the configuration after module completion
type: str
'''
EXAMPLES = """
---
# Using merged
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# switchport access vlan 20
# !
# interface Ethernet2
# switchport trunk native vlan 20
# switchport mode trunk
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
- name: Merge provided configuration with device configuration.
eos_l2_interfaces:
config:
- name: Ethernet1
trunk:
native_vlan: 10
- name: Ethernet2
access:
vlan: 30
state: merged
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# switchport trunk native vlan 10
# switchport mode trunk
# !
# interface Ethernet2
# switchport access vlan 30
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
# Using replaced
# Before state:
# -------------
#
# veos2#show running-config | s int
# interface Ethernet1
# switchport access vlan 20
# !
# interface Ethernet2
# switchport trunk native vlan 20
# switchport mode trunk
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
- name: Replace device configuration of specified L2 interfaces with provided configuration.
eos_l2_interfaces:
config:
- name: Ethernet1
trunk:
native_vlan: 20
trunk_vlans: 5-10, 15
state: replaced
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# switchport trunk native vlan 20
# switchport trunk allowed vlan 5-10,15
# switchport mode trunk
# !
# interface Ethernet2
# switchport trunk native vlan 20
# switchport mode trunk
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
# Using overridden
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# switchport access vlan 20
# !
# interface Ethernet2
# switchport trunk native vlan 20
# switchport mode trunk
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
- name: Override device configuration of all L2 interfaces on device with provided configuration.
eos_l2_interfaces:
config:
- name: Ethernet2
access:
vlan: 30
state: overridden
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# !
# interface Ethernet2
# switchport access vlan 30
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
# Using deleted
# Before state:
# -------------
#
# veos#show running-config | section interface
# interface Ethernet1
# switchport access vlan 20
# !
# interface Ethernet2
# switchport trunk native vlan 20
# switchport mode trunk
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
# !
- name: Delete EOS L2 interfaces as in given arguments.
eos_l2_interfaces:
config:
- name: Ethernet1
- name: Ethernet2
state: deleted
# After state:
# ------------
#
# veos#show running-config | section interface
# interface Ethernet1
# !
# interface Ethernet2
# !
# interface Management1
# ip address dhcp
# ipv6 address auto-config
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: The configuration returned will always be in the same format of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: The configuration returned will always be in the same format of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface Ethernet2', 'switchport access vlan 20']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.arista.eos.plugins.module_utils.network.eos.argspec.l2_interfaces.l2_interfaces import L2_interfacesArgs
from ansible_collections.arista.eos.plugins.module_utils.network.eos.config.l2_interfaces.l2_interfaces import L2_interfaces
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
module = AnsibleModule(argument_spec=L2_interfacesArgs.argument_spec,
supports_check_mode=True)
result = L2_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
38cfc7ab0516a8cc95e4612d23acabbe0368a327 | 2c13edf74323021a63697216bb39e1e2e9758342 | /listBackupFiles.py | 7efc89d660b08ec426460fdb5e011811f2a48e75 | []
| no_license | andreycizov/Ahsay-OBC-Restore-tool | 5126d0e9460b3a78ed51c41bacd7d1d3eb7372ea | b94450a8bd5de47ab1d909df93097950cd1af6c6 | refs/heads/master | 2021-01-22T19:13:47.283496 | 2011-08-26T17:53:23 | 2011-08-26T17:53:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | #!/usr/bin/python3
from AhsayAPI import GZStreamRead as GZ
from AhsayAPI import XMLFileList as FL
from AhsayAPI import XMLFileInfo
from AhsayAPI import to_size
from AhsayAPI import salt
from AhsayAPI.urls import ls
from AhsayAPI.urls import urlopen
import sys
t = ls({'dir':sys.argv[1],
'backupjob':'Current',
'start_page':sys.argv[2]})
r = urlopen(t)
print(r.getheader('Content-Length'))
gz = GZ(r)
def callback(self, event, e):
if e.tag == 'F':
f = XMLFileInfo(e)
type = 'F'
if f.type == "T":
type = 'D'
print("{type} {name} {size} {size_enc} {salt}".format(
type=type, name=f.path, size=to_size(f.size),
size_enc=to_size(f.size_enc),
salt=salt(f.salt)
))
print(e.attrib)
#self.stdcallback(event, e)
def endcallback(self):
pass
fl = FL(gz, callback=callback, endcallback=endcallback)
fl.start()
print('Transferred data:', to_size(gz.n_comp),'/',to_size(gz.n))
| [
"[email protected]"
]
| |
99ecd43278056b15bee3a05aef0b937342baa5b9 | 7a4da5ec2196bf975a9e6115846244788b36b952 | /3.7.0/lldb-3.7.0.src/test/lang/objc/objc-dynamic-value/TestObjCDynamicValue.py | 61a69cbc993695d10cacecdfbba90e4afb839332 | [
"NCSA",
"MIT"
]
| permissive | androm3da/clang_sles | ca4ada2ec85d625c65818ca9b60dcf1bc27f0756 | 2ba6d0711546ad681883c42dfb8661b842806695 | refs/heads/master | 2021-01-10T13:50:25.353394 | 2016-03-31T21:38:29 | 2016-03-31T21:38:29 | 44,787,977 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,460 | py | """
Use lldb Python API to test dynamic values in ObjC
"""
import os, time
import re
import unittest2
import lldb, lldbutil
from lldbtest import *
class ObjCDynamicValueTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
@python_api_test
@dsym_test
@expectedFailureDarwin("llvm.org/pr20271 rdar://18684107")
def test_get_dynamic_objc_vals_with_dsym(self):
"""Test fetching ObjC dynamic values."""
if self.getArchitecture() == 'i386':
# rdar://problem/9946499
self.skipTest("Dynamic types for ObjC V1 runtime not implemented")
self.buildDsym()
self.do_get_dynamic_vals()
@skipUnlessDarwin
@python_api_test
@dwarf_test
@expectedFailureDarwin("llvm.org/pr20271 rdar://18684107")
def test_get_objc_dynamic_vals_with_dwarf(self):
"""Test fetching ObjC dynamic values."""
if self.getArchitecture() == 'i386':
# rdar://problem/9946499
self.skipTest("Dynamic types for ObjC V1 runtime not implemented")
self.buildDwarf()
self.do_get_dynamic_vals()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.c.
self.source_name = 'dynamic-value.m'
self.set_property_line = line_number(self.source_name, '// This is the line in setProperty, make sure we step to here.')
self.handle_SourceBase = line_number(self.source_name,
'// Break here to check dynamic values.')
self.main_before_setProperty_line = line_number(self.source_name,
'// Break here to see if we can step into real method.')
def examine_SourceDerived_ptr (self, object):
self.assertTrue (object)
self.assertTrue (object.GetTypeName().find ('SourceDerived') != -1)
derivedValue = object.GetChildMemberWithName ('_derivedValue')
self.assertTrue (derivedValue)
self.assertTrue (int (derivedValue.GetValue(), 0) == 30)
def do_get_dynamic_vals(self):
"""Make sure we get dynamic values correctly both for compiled in classes and dynamic ones"""
exe = os.path.join(os.getcwd(), "a.out")
# Create a target from the debugger.
target = self.dbg.CreateTarget (exe)
self.assertTrue(target, VALID_TARGET)
# Set up our breakpoints:
handle_SourceBase_bkpt = target.BreakpointCreateByLocation(self.source_name, self.handle_SourceBase)
self.assertTrue(handle_SourceBase_bkpt and
handle_SourceBase_bkpt.GetNumLocations() == 1,
VALID_BREAKPOINT)
main_before_setProperty_bkpt = target.BreakpointCreateByLocation(self.source_name, self.main_before_setProperty_line)
self.assertTrue(main_before_setProperty_bkpt and
main_before_setProperty_bkpt.GetNumLocations() == 1,
VALID_BREAKPOINT)
# Now launch the process, and do not stop at the entry point.
process = target.LaunchSimple (None, None, self.get_process_working_directory())
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
threads = lldbutil.get_threads_stopped_at_breakpoint (process, main_before_setProperty_bkpt)
self.assertTrue (len(threads) == 1)
thread = threads[0]
#
# At this point, myObserver has a Source pointer that is actually a KVO swizzled SourceDerived
# make sure we can get that properly:
frame = thread.GetFrameAtIndex(0)
myObserver = frame.FindVariable('myObserver', lldb.eDynamicCanRunTarget)
self.assertTrue (myObserver)
myObserver_source = myObserver.GetChildMemberWithName ('_source', lldb.eDynamicCanRunTarget)
self.examine_SourceDerived_ptr (myObserver_source)
#
# Make sure a static value can be correctly turned into a dynamic value.
frame = thread.GetFrameAtIndex(0)
myObserver_static = frame.FindVariable('myObserver', lldb.eNoDynamicValues)
self.assertTrue (myObserver_static)
myObserver = myObserver_static.GetDynamicValue (lldb.eDynamicCanRunTarget)
myObserver_source = myObserver.GetChildMemberWithName ('_source', lldb.eDynamicCanRunTarget)
self.examine_SourceDerived_ptr (myObserver_source)
# The "frame var" code uses another path to get into children, so let's
# make sure that works as well:
result = lldb.SBCommandReturnObject()
self.expect('frame var -d run-target myObserver->_source', 'frame var finds its way into a child member',
patterns = ['\(SourceDerived \*\)'])
# check that our ObjC GetISA() does a good job at hiding KVO swizzled classes
self.expect('frame var -d run-target myObserver->_source -T', 'the KVO-ed class is hidden',
substrs = ['SourceDerived'])
self.expect('frame var -d run-target myObserver->_source -T', 'the KVO-ed class is hidden', matching = False,
substrs = ['NSKVONotify'])
# This test is not entirely related to the main thrust of this test case, but since we're here,
# try stepping into setProperty, and make sure we get into the version in Source:
thread.StepInto()
threads = lldbutil.get_stopped_threads (process, lldb.eStopReasonPlanComplete)
self.assertTrue (len(threads) == 1)
line_entry = threads[0].GetFrameAtIndex(0).GetLineEntry()
self.assertEqual (line_entry.GetLine(), self.set_property_line)
self.assertEqual (line_entry.GetFileSpec().GetFilename(), self.source_name)
# Okay, back to the main business. Continue to the handle_SourceBase and make sure we get the correct dynamic value.
threads = lldbutil.continue_to_breakpoint (process, handle_SourceBase_bkpt)
self.assertTrue (len(threads) == 1)
thread = threads[0]
frame = thread.GetFrameAtIndex(0)
# Get "object" using FindVariable:
noDynamic = lldb.eNoDynamicValues
useDynamic = lldb.eDynamicCanRunTarget
object_static = frame.FindVariable ('object', noDynamic)
object_dynamic = frame.FindVariable ('object', useDynamic)
# Delete this object to make sure that this doesn't cause havoc with the dynamic object that depends on it.
del (object_static)
self.examine_SourceDerived_ptr (object_dynamic)
# Get "this" using FindValue, make sure that works too:
object_static = frame.FindValue ('object', lldb.eValueTypeVariableArgument, noDynamic)
object_dynamic = frame.FindValue ('object', lldb.eValueTypeVariableArgument, useDynamic)
del (object_static)
self.examine_SourceDerived_ptr (object_dynamic)
# Get "this" using the EvaluateExpression:
object_static = frame.EvaluateExpression ('object', noDynamic)
object_dynamic = frame.EvaluateExpression ('object', useDynamic)
del (object_static)
self.examine_SourceDerived_ptr (object_dynamic)
# Continue again to the handle_SourceBase and make sure we get the correct dynamic value.
# This one looks exactly the same, but in fact this is an "un-KVO'ed" version of SourceBase, so
# its isa pointer points to SourceBase not NSKVOSourceBase or whatever...
threads = lldbutil.continue_to_breakpoint (process, handle_SourceBase_bkpt)
self.assertTrue (len(threads) == 1)
thread = threads[0]
frame = thread.GetFrameAtIndex(0)
# Get "object" using FindVariable:
object_static = frame.FindVariable ('object', noDynamic)
object_dynamic = frame.FindVariable ('object', useDynamic)
# Delete this object to make sure that this doesn't cause havoc with the dynamic object that depends on it.
del (object_static)
self.examine_SourceDerived_ptr (object_dynamic)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| [
"[email protected]"
]
| |
a12ba1e7a58cbb5f71f5e88633c027104e7aa5a3 | 060b4486244008e40137b590397ed1264b4116de | /poetry/core/_vendor/configparser.py | 00c2335b787069a1566bc45f8b959f71f3307322 | [
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-python-cwi",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"Python-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | hoefling/core | d8bfa4b59e81a0a86d56c75e4b3d135c9f91ad2b | 7e9fbac94bbd2211b63421dc47fa91cb507c466d | refs/heads/master | 2022-05-08T06:29:41.070632 | 2020-04-22T18:59:34 | 2020-04-22T18:59:34 | 257,990,591 | 0 | 0 | MIT | 2020-04-22T18:46:57 | 2020-04-22T18:46:56 | null | UTF-8 | Python | false | false | 1,586 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Convenience module importing everything from backports.configparser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from poetry.core._vendor.backports.configparser import (
RawConfigParser,
ConfigParser,
SafeConfigParser,
SectionProxy,
Interpolation,
BasicInterpolation,
ExtendedInterpolation,
LegacyInterpolation,
NoSectionError,
DuplicateSectionError,
DuplicateOptionError,
NoOptionError,
InterpolationError,
InterpolationMissingOptionError,
InterpolationSyntaxError,
InterpolationDepthError,
ParsingError,
MissingSectionHeaderError,
ConverterMapping,
DEFAULTSECT,
MAX_INTERPOLATION_DEPTH,
)
from poetry.core._vendor.backports.configparser import Error, _UNSET, _default_dict, _ChainMap # noqa: F401
__all__ = [
"NoSectionError",
"DuplicateOptionError",
"DuplicateSectionError",
"NoOptionError",
"InterpolationError",
"InterpolationDepthError",
"InterpolationMissingOptionError",
"InterpolationSyntaxError",
"ParsingError",
"MissingSectionHeaderError",
"ConfigParser",
"SafeConfigParser",
"RawConfigParser",
"Interpolation",
"BasicInterpolation",
"ExtendedInterpolation",
"LegacyInterpolation",
"SectionProxy",
"ConverterMapping",
"DEFAULTSECT",
"MAX_INTERPOLATION_DEPTH",
]
# NOTE: names missing from __all__ imported anyway for backwards compatibility.
| [
"[email protected]"
]
| |
07a1b69874544a91d2d108bcfcdd3e27ba9f3de2 | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /402/402.remove-k-digits.282089710.Accepted.leetcode.python3.py | 5768f0de4828d4f67130aabc3ed918c82c4573aa | []
| no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | class Solution:
def removeKdigits(self, num: str, k: int) -> str:
if k == num:
return "0"
stack = []
for i in range(len(num)):
while k and stack and stack[-1] > num[i]:
stack.pop()
k -= 1
stack.append(num[i])
while k:
stack.pop()
k -= 1
index = 0
while index < len(stack) and stack[index] == "0":
index += 1
return "0" if index == len(stack) else "".join(stack[index:])
| [
"[email protected]"
]
| |
2bc9de9da32d67746c3de41150491c969903db68 | 94d8bb0e323ee478b580f766b7700acd32b519fd | /augmented-reality/stitch.py | 7e6f169b0c70081fb1478e5f69c51dd4d3a04369 | []
| no_license | vanstorm9/SLAM-experiments | d5db1d7680193d664029230e135ddb0d5648d38d | b17c4f83ae2c7a9dfafebd8559953d7341699fc6 | refs/heads/master | 2021-01-17T20:26:38.135564 | 2016-08-20T05:59:06 | 2016-08-20T05:59:06 | 65,873,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,117 | py | from panorama.panorama import Stitcher
import imutils
import numpy as np
import os
import cv2
widthResize = 600
def cropFocus(img):
subtractThresh = 20
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray,1,255,cv2.THRESH_BINARY)
contours = cv2.findContours(thresh,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cnt = contours[0]
x,y,w,h = cv2.boundingRect(cnt)
crop = img[y:y+h,x:x+w-subtractThresh]
return crop
def listDirCreate(root):
for imgPath in root:
imgPathList.append(imgPath)
imgPathList.sort()
return imgPathList
def drawHarris(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = np.float32(gray)
dst = cv2.cornerHarris(gray,2,3,0.04)
#result is dilated for marking the corners, not important
dst = cv2.dilate(dst,None)
# Threshold for an optimal value, it may vary depending on the image.
img[dst>0.01*dst.max()]=[0,0,255]
return img
root_path = 'panorama-input/'
slash = '/'
root = os.listdir(root_path)
i = 0
result = None
imgPathList = []
# Creating list of paths
listDirCreate = listDirCreate(root)
print imgPathList
for fn in imgPathList:
print fn
if i == 0:
# This is our first image
mainImage = cv2.imread(root_path + slash + fn)
mainImage = imutils.resize(mainImage, width=widthResize)
#cv2.imwrite("mainImage.jpg", mainImage)
i = i + 1
continue
else:
# We shall combine current image with main image
#mainImage = cv2.imread("mainImage.jpg")
imageB = cv2.imread(root_path + slash + fn)
imageB = imutils.resize(imageB, width=widthResize)
# stitch the images together to create a panorama
stitcher = Stitcher()
result, vis = stitcher.stitch([mainImage, imageB], showMatches=False)
mainImage = cropFocus(result)
# show the images
'''
cv2.imshow("Image A", mainImage)
cv2.imshow("Image B", imageB)
'''
'''
cv2.imwrite("result.jpg", result)
cv2.imshow("Result", result)
cv2.waitKey(0)
'''
i = i + 1
cv2.imwrite("result.jpg", result)
pointresult = result.copy()
pointresult = drawHarris(pointresult)
cv2.imshow("Result", result)
cv2.imshow("Points", pointresult)
cv2.waitKey(0)
| [
"[email protected]"
]
| |
43864d0cecc6965fd89fe49768f52b05fda4096d | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_4/hllbra005/ndom.py | e5b1bd8aa42e852320f2d1d656ace34011f9d230 | []
| no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,597 | py | # Functions for ndom calculations
# Brandon Hall (HLLBRA005)
# 4 April 2014
def ndom_to_decimal(ndom): # This method converts ndom to decimal numbers, i.e base 6 to base 10
decimal = ndom
ndomS = str(ndom) # Converts the parsed number ndom to a string
length = len(ndomS) # Length of the string is taken
if(length == 2): # If its two numbers
decimal = ( ( int(ndomS[0]) * 6 ) + int(ndomS[1]) )
if (length == 3): # If its three numbers
nd = ( ( int(ndomS[0]) * 6 ) + int(ndomS[1]) )
decimal = (nd*6) + int(ndomS[2])
return decimal
def decimal_to_ndom(decimal):
power = 0
multiple = decimal
re = ""
ans = 0
while (multiple >=1):
re += str((multiple)%6) #remainder
multiple = multiple//6 #base number
return int(re[::-1])
def ndom_add(a,b): # converts to decimal, adds two numbers
# it then converts them into ndom numbers
decA = ndom_to_decimal(a)
decB = ndom_to_decimal(b)
dec = decA+decB
ndom_tot = decimal_to_ndom(dec)
return ndom_tot
def ndom_multiply(a,b): # This method multiplies two ndom numbers
# It does this by multipying the numbers and then
# converting them to ndom
A = ndom_to_decimal(a)
B = ndom_to_decimal(b)
dec = A*B
ndom_tot = decimal_to_ndom(dec)
return ndom_tot | [
"[email protected]"
]
| |
ff89354798fc6cbc57a7af70c715c3cdaeb26fd3 | 3fe1b6f36bfd02156f606cf90797d69b18dd19d2 | /creme/optim/newton.py | 4997a95c4ab37fe8b551142380fd0e79804c74ff | [
"BSD-3-Clause"
]
| permissive | mihir-thakkar-ai/creme | a19a1975bb462a1a93046b6ea55830e88846cb88 | 008b0c1beb26b36b448fc3d04537e02e66d402b3 | refs/heads/master | 2022-12-18T01:15:18.132117 | 2020-09-15T20:17:16 | 2020-09-15T20:17:16 | 296,288,773 | 0 | 0 | BSD-3-Clause | 2020-09-17T10:04:27 | 2020-09-17T10:04:26 | null | UTF-8 | Python | false | false | 1,182 | py | from .. import utils
from . import base
__all__ = ['Newton']
class Newton(base.Optimizer):
"""Online Newton Step (ONS) optimizer.
This optimizer uses second-order information (i.e. the Hessian of the cost function) in
addition to first-order information (i.e. the gradient of the cost function).
Parameters:
lr
eps
References:
1. [Hazan, E., Agarwal, A. and Kale, S., 2007. Logarithmic regret algorithms for online convex optimization. Machine Learning, 69(2-3), pp.169-192](https://www.cs.princeton.edu/~ehazan/papers/log-journal.pdf)
"""
def __init__(self, lr=.1, eps=1e-5):
super().__init__(lr)
self.eps = eps
self.H_inv = {}
def _update_after_pred(self, w, g):
for i in g:
if (i, i) not in self.H_inv:
self.H_inv[i, i] = self.eps
# Update the Hessian
self.H = utils.math.sherman_morrison(A_inv=self.H_inv, u=g, v=g)
# Calculate the update step
step = utils.math.dotvecmat(x=g, A=self.H_inv)
# Update the weights
for i, s in step.items():
w[i] -= self.learning_rate * s
return w
| [
"[email protected]"
]
| |
78ed70cfb6eeef227fb3bfb09143da364540ab98 | e4ad15cb20c2701f33c60001841a66fc03cd45ff | /pre_process_3.py | 586671edb3888c4c121c715fe9643c56c4479c0a | [
"MIT"
]
| permissive | foamliu/Face-Attributes | 90fb70947155d0c773a4bf2888190a843a280db5 | fbf90cd55b01e4b84ec69d01132b4b77e0417952 | refs/heads/master | 2020-05-21T04:59:33.707892 | 2019-09-23T08:18:15 | 2019-09-23T08:18:15 | 185,909,551 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py | import pickle
import cv2 as cv
from tqdm import tqdm
from config import im_size, pickle_file_landmarks, pickle_file_aligned
from utils import crop_image
ONE_SECOND = 1
if __name__ == "__main__":
print('loading {}...'.format(pickle_file_landmarks))
with open(pickle_file_landmarks, 'rb') as file:
data = pickle.load(file)
items = data['samples']
print('num_items: ' + str(len(items)))
samples = []
for item in tqdm(items):
try:
full_path = item['full_path']
bbox = item['bboxes'][0]
img = cv.imread(full_path)
img = crop_image(img, bbox)
img = cv.resize(img, (im_size, im_size))
samples.append(item)
except:
pass
print('num_items: ' + str(len(samples)))
print('saving {}...'.format(pickle_file_aligned))
with open(pickle_file_aligned, 'wb') as file:
save = {
'samples': samples
}
pickle.dump(save, file, pickle.HIGHEST_PROTOCOL)
| [
"[email protected]"
]
| |
4813e58f082e8347dbf5a8f63497e847b4c8ac7f | f3f19eaa73f2adb4375dbe6fbfa89eaaa8796cbc | /code/preprocessing/feature_engineering/rel_feature_groups/dep.py | 630cbe1fce98362a26687ff7ec077c7681d5248d | []
| no_license | sanjukladher/WNUT_2020_RE | 2b4c172de236a7766d27a588aa09a2f2f5d5a402 | 3ea31038bdc4a3b39def354ebee69ab00805ab0d | refs/heads/master | 2023-01-06T04:38:11.726589 | 2020-10-30T03:19:14 | 2020-10-30T03:19:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,269 | py | from corpus.ProtoFile import Relation
from preprocessing.feature_engineering.datasets import RelationWindow
class DependencyFeatureGroup(object):
def __init__(self):
pass
def convert_window(self, window):
result = []
assert isinstance(window, RelationWindow)
if window.relations is not None:
for rel in window.relations:
assert isinstance(rel, Relation)
result.append([self.et1dw1(rel), # combination of mention entity types
self.et2dw2(rel),
self.h1dw1(rel),
self.h2dw2(rel),
self.et12SameNP(rel),
self.et12SamePP(rel),
self.et12SameVP(rel)
])
# print("done")
return result
@staticmethod
def get_words(tokens):
l= [token.word for token in tokens]
if len(l)==0:
l = [""]
return l
def et1dw1(self, rel):
et = rel.arg1_tag.tag_name
dep = rel.arg1_deps()
return "et1dw1={0}{1}".format(et, dep)
def et2dw2(self, rel):
et = rel.arg2_tag.tag_name
dep = rel.arg2_deps()
return "et2dw2={0}{1}".format(et, dep)
def h1dw1(self, rel):
arg1_tokens = rel.get_arg1_tokens()
words = self.get_words(arg1_tokens)
h1 = words[-1]
dep = rel.arg1_deps()
return "h1dw1={0}{1}".format(h1, dep)
def h2dw2(self, rel):
arg2_tokens = rel.get_arg2_tokens()
words = self.get_words(arg2_tokens)
h2 = words[-1]
dep = rel.arg2_deps()
return "h1dw1={0}{1}".format(h2, dep)
@staticmethod
def et12(rel):
return "et12={0}".format("_".join([rel.arg1_tag.tag_name, rel.arg2_tag.tag_name]))
def et12SameNP(self, rel):
et12 = self.et12(rel)
return "et12SameNP={0}_{1}".format(et12, rel.sameNP())
def et12SamePP(self, rel):
et12 = self.et12(rel)
return "et12SamePP={0}_{1}".format(et12, rel.samePP())
def et12SameVP(self, rel):
et12 = self.et12(rel)
return "et12SameVB={0}_{1}".format(et12, rel.sameVP())
| [
"[email protected]"
]
| |
301c79bdf0ec8be3cc4f05ca54b72e601197f4c9 | bba7f5a363d57473f583747af259f5ff60a53631 | /webrobot/app/main/controller/test_controller.py | 4dcd305dc12cb238fc24ba8826df878acd5d3efb | [
"MIT"
]
| permissive | githubwangcc/Auto-Test-System | 63f7953c0dd90859a3cd0b9330e102330df064aa | 4462fde8c23fef625f459d51d6bb7560ba29d726 | refs/heads/master | 2020-09-21T13:58:29.185560 | 2019-10-22T01:23:48 | 2019-10-22T01:23:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,858 | py | import os
import time
from pathlib import Path
from flask import Flask, send_from_directory, request
from flask_restplus import Resource
from app.main.util.decorator import token_required, organization_team_required_by_args
from app.main.util.get_path import get_test_result_path, get_back_scripts_root
from ..config import get_config
from ..model.database import *
from ..util.dto import TestDto
from ..util.tarball import make_tarfile, pack_files
from ..util.errors import *
api = TestDto.api
_test_cases = TestDto.test_cases
_test_suite = TestDto.test_suite
@api.route('/script')
@api.response(404, 'Script not found.')
@api.response(200, 'Download the script successfully.')
class ScriptDownload(Resource):
# @token_required
@api.doc('get_test_script')
@api.param('id', description='The task id')
@api.param('test', description='The test suite name')
def get(self):
"""
Get the test script
Get the bundled file that contains all necessary test scripts that the test needs to run
"""
task_id = request.args.get('id', None)
if not task_id:
return error_message(EINVAL, 'Field id is required'), 400
test_suite = request.args.get('test', None)
if not test_suite:
return error_message(EINVAL, 'Field test is required'), 400
task = Task.objects(pk=task_id).first()
if not task:
return error_message(ENOENT, 'Task not found'), 404
if test_suite.endswith('.py'):
test_suite = test_suite[0:-3]
result_dir = get_test_result_path(task)
scripts_root = get_back_scripts_root(task)
results_tmp = result_dir / 'temp'
script_file = scripts_root / (test_suite + '.py')
if not os.path.exists(script_file):
return error_message(ENOENT, "file {} does not exist".format(script_file)), 404
for _ in range(3):
tarball = pack_files(test_suite, scripts_root, results_tmp)
if tarball is None:
print("retry packaging files")
time.sleep(1)
else:
tarball = os.path.basename(tarball)
return send_from_directory(Path(os.getcwd()) / results_tmp, tarball)
else:
return error_message(EIO, "packaging files failed"), 404
@api.route('/<test_suite>')
@api.param('test_suite', 'The test suite to query')
@api.response(404, 'Script not found.')
class TestSuiteGet(Resource):
@token_required
@organization_team_required_by_args
@api.doc('get_the_test_cases')
@api.marshal_with(_test_cases)
def get(self, test_suite, **kwargs):
"""Get the test cases of a test suite"""
organization = kwargs['organization']
team = kwargs['team']
test = Test.objects(test_suite=test_suite, organization=organization, team=team).first()
if not test:
return error_message(ENOENT, 'Test {} not found'.format(test_suite)), 404
return {
'test_cases': test.test_cases,
'test_suite': test.test_suite
}
@api.route('/')
class TestSuitesList(Resource):
@token_required
@organization_team_required_by_args
@api.doc('get_the_test_suite_list')
@api.marshal_list_with(_test_suite)
def get(self, **kwargs):
"""Get the test suite list which contains some necessary test details"""
organization = kwargs['organization']
team = kwargs['team']
tests = Test.objects(organization=organization, team=team)
ret = []
for t in tests:
ret.append({
'id': str(t.id),
'test_suite': t.test_suite,
'test_cases': t.test_cases,
'variables': t.variables,
'author': t.author.name
})
return ret
| [
"[email protected]"
]
| |
f21aa89b98bd1ddc09335867c23cbf53a8a6c2a7 | 06f238313235b279cad3ade94cb69f8c4f073215 | /model_verbq_working.py | 5638e6d98b9d212a37be48e2a72ecbf0b599877d | [
"MIT"
]
| permissive | thilinicooray/mac-network-pytorch | f26f9fac0e67c21abdff6862a696187c4eb3126e | 0e4bf3f7f301570b652490f697758361c866f3c1 | refs/heads/master | 2020-03-29T04:39:35.133060 | 2019-05-14T12:48:00 | 2019-05-14T12:48:00 | 149,541,433 | 0 | 0 | MIT | 2018-09-20T02:44:20 | 2018-09-20T02:44:20 | null | UTF-8 | Python | false | false | 5,018 | py | import torch
import torch.nn as nn
from attention import Attention, NewAttention
from language_model import WordEmbedding, QuestionEmbedding
from classifier import SimpleClassifier
from fc import FCNet
import torch.nn.functional as F
import torchvision as tv
import utils
import numpy as np
import model_verb_directcnn
import model_roles_independent
class vgg16_modified(nn.Module):
def __init__(self):
super(vgg16_modified, self).__init__()
vgg = tv.models.vgg16_bn(pretrained=True)
self.vgg_features = vgg.features
def rep_size(self):
return 1024
def base_size(self):
return 512
def forward(self,x):
#return self.dropout2(self.relu2(self.lin2(self.dropout1(self.relu1(self.lin1(self.vgg_features(x).view(-1, 512*7*7)))))))
features = self.vgg_features(x)
return features
class TopDown(nn.Module):
def __init__(self,
vocab_size,
embed_hidden=300,
mlp_hidden=512):
super(TopDown, self).__init__()
self.vocab_size = vocab_size
self.q_emb = nn.LSTM(embed_hidden, mlp_hidden,
batch_first=True, bidirectional=True)
self.lstm_proj = nn.Linear(mlp_hidden * 2, mlp_hidden)
self.v_att = NewAttention(mlp_hidden, mlp_hidden, mlp_hidden)
self.classifier = nn.Sequential(
nn.Linear(mlp_hidden * 7 *7 + mlp_hidden, mlp_hidden*8),
nn.BatchNorm1d(mlp_hidden*8),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(mlp_hidden * 8, mlp_hidden*8),
nn.BatchNorm1d(mlp_hidden*8),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(mlp_hidden*8, self.vocab_size)
)
def forward(self, img, q):
batch_size = img.size(0)
w_emb = q
self.q_emb.flatten_parameters()
lstm_out, (h, _) = self.q_emb(w_emb)
q_emb = h.permute(1, 0, 2).contiguous().view(batch_size, -1)
q_emb = self.lstm_proj(q_emb)
att = self.v_att(img, q_emb)
v_emb = (att * img)
v_emb = v_emb.permute(0, 2, 1)
v_emb = v_emb.contiguous().view(-1, 512*7*7)
v_emb_with_q = torch.cat([v_emb, q_emb], -1)
logits = self.classifier(v_emb_with_q)
return logits
class BaseModel(nn.Module):
def __init__(self, encoder,
gpu_mode,
embed_hidden=300,
mlp_hidden = 512
):
super(BaseModel, self).__init__()
self.normalize = tv.transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
self.train_transform = tv.transforms.Compose([
tv.transforms.RandomRotation(10),
tv.transforms.RandomResizedCrop(224),
tv.transforms.RandomHorizontalFlip(),
tv.transforms.ToTensor(),
self.normalize,
])
self.dev_transform = tv.transforms.Compose([
tv.transforms.Resize(224),
tv.transforms.CenterCrop(224),
tv.transforms.ToTensor(),
self.normalize,
])
self.encoder = encoder
self.gpu_mode = gpu_mode
self.mlp_hidden = mlp_hidden
self.verbq_word_count = len(self.encoder.verb_q_words)
self.n_verbs = self.encoder.get_num_verbs()
self.conv = vgg16_modified()
'''for param in self.verb_module.parameters():
param.require_grad = False
for param in self.role_module.parameters():
param.require_grad = False
for param in self.conv.parameters():
param.require_grad = False'''
self.verb_vqa = TopDown(self.n_verbs)
self.verb_q_emb = nn.Embedding(self.verbq_word_count + 1, embed_hidden, padding_idx=self.verbq_word_count)
def train_preprocess(self):
return self.train_transform
def dev_preprocess(self, ):
return self.dev_transform
def forward(self, img, verbs=None, labels=None):
verb_q_idx = self.encoder.get_common_verbq(img.size(0))
if self.gpu_mode >= 0:
verb_q_idx = verb_q_idx.to(torch.device('cuda'))
img_embd = self.conv(img)
batch_size, n_channel, conv_h, conv_w = img_embd.size()
img_embd = img_embd.view(batch_size, n_channel, -1)
img_embd = img_embd.permute(0, 2, 1)
q_emb = self.verb_q_emb(verb_q_idx)
verb_pred = self.verb_vqa(img_embd, q_emb)
loss = self.calculate_loss(verb_pred, verbs)
return verb_pred, loss
def calculate_loss(self, verb_pred, gt_verbs):
batch_size = verb_pred.size()[0]
loss = 0
#print('eval pred verbs :', pred_verbs)
for i in range(batch_size):
verb_loss = 0
verb_loss += utils.cross_entropy_loss(verb_pred[i], gt_verbs[i])
loss += verb_loss
final_loss = loss/batch_size
#print('loss :', final_loss)
return final_loss | [
"[email protected]"
]
| |
9ada941a10b5da9a1d14e5a9f5e8e2771b3c806c | 27440297f68994be89764ec1eb996df19c408749 | /processing/merge.py | f30b74502636511db234e2edb0563fa361bbf836 | []
| no_license | anoop-phoenix/Web_Presentation | ed084dfb56e8c9c6eb8d5e00b339cb7be989da1d | cb253cb6290a6c52183bae40330d8b79de69bbc1 | refs/heads/master | 2022-07-23T06:17:23.962036 | 2020-05-11T01:56:03 | 2020-05-11T01:56:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,209 | py | import json
import cv2
import numpy as np
from os.path import join as pjoin
import os
from lib_uied.ip_preprocessing import preprocess
from CONFIG import Config
C = Config()
compo_index = {'img':0, 'text':0, 'button':0, 'input':0, 'icon':0}
def draw_bounding_box_class(org, corners, compo_class, color_map=C.COLOR, line=3, show=False, name='img'):
board = org.copy()
for i in range(len(corners)):
if compo_class[i] == 'text':
continue
board = cv2.rectangle(board, (corners[i][0], corners[i][1]), (corners[i][2], corners[i][3]), color_map[compo_class[i]], line)
board = cv2.putText(board, compo_class[i], (corners[i][0]+5, corners[i][1]+20),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, color_map[compo_class[i]], 2)
if show:
cv2.imshow(name, board)
cv2.waitKey(0)
return board
def draw_bounding_box(org, corners, color=(0, 255, 0), line=3, show=False):
board = org.copy()
for i in range(len(corners)):
board = cv2.rectangle(board, (corners[i][0], corners[i][1]), (corners[i][2], corners[i][3]), color, line)
if show:
cv2.imshow('a', board)
cv2.waitKey(0)
return board
def save_clipping(org, corners, compo_classes, compo_index, output_root=C.ROOT_IMG_COMPONENT):
if output_root is None:
output_root = C.ROOT_IMG_COMPONENT
if not os.path.exists(output_root):
os.mkdir(output_root)
pad = 1
for i in range(len(corners)):
compo = compo_classes[i]
(col_min, row_min, col_max, row_max) = corners[i]
col_min = max(col_min - pad, 0)
col_max = min(col_max + pad, org.shape[1])
row_min = max(row_min - pad, 0)
row_max = min(row_max + pad, org.shape[0])
# if component type already exists, index increase by 1, otherwise add this type
compo_path = pjoin(output_root, compo)
if not os.path.exists(compo_path):
os.mkdir(compo_path)
if compo_classes[i] not in compo_index:
compo_index[compo_classes[i]] = 0
else:
compo_index[compo_classes[i]] += 1
clip = org[row_min:row_max, col_min:col_max]
cv2.imwrite(pjoin(compo_path, str(compo_index[compo_classes[i]]) + '.png'), clip)
def save_label_txt(img_path, compo_corners, compo_class, label_txt_path):
f = open(label_txt_path, 'a')
label_txt = img_path + ' '
for i in range(len(compo_corners)):
if compo_class[i] == 'text':
continue
label_txt += ','.join([str(c) for c in compo_corners[i]]) + ',' + str(C.class_index[compo_class[i]]) + ' '
label_txt += '\n'
f.write(label_txt)
def nms(org, corners_compo_old, compos_class_old, corner_text):
corners_compo_refine = []
compos_class_refine = []
corner_text = np.array(corner_text)
for i in range(len(corners_compo_old)):
# if compos_class_old[i] != 'img':
# corners_compo_refine.append(corners_compo_old[i])
# compos_class_refine.append(compos_class_old[i])
# continue
a = corners_compo_old[i]
noise = False
area_a = (a[2] - a[0]) * (a[3] - a[1])
area_text = 0
for b in corner_text:
area_b = (b[2] - b[0]) * (b[3] - b[1])
# get the intersected area
col_min_s = max(a[0], b[0])
row_min_s = max(a[1], b[1])
col_max_s = min(a[2], b[2])
row_max_s = min(a[3], b[3])
w = np.maximum(0, col_max_s - col_min_s + 1)
h = np.maximum(0, row_max_s - row_min_s + 1)
inter = w * h
if inter == 0:
continue
# calculate IoU
ioa = inter / area_a
iob = inter / area_b
if compos_class_old[i] == 'img':
# sum up all text area in a img
# if iob > 0.8:
area_text += inter
# loose threshold for img
if ioa > 0.38:
noise = True
break
else:
# tight threshold for other components
if ioa > 0.8:
noise = True
break
# check if img is text paragraph
if compos_class_old[i] == 'img' and area_text / area_a > 0.8:
noise = True
if not noise:
corners_compo_refine.append(corners_compo_old[i])
compos_class_refine.append(compos_class_old[i])
return corners_compo_refine, compos_class_refine
def refine_text(org, corners_text, max_line_gap, min_word_length):
def refine(bin):
head = 0
rear = 0
gap = 0
get_word = False
for i in range(bin.shape[1]):
# find head
if not get_word and np.sum(bin[:, i]) != 0:
head = i
rear = i
get_word = True
continue
if get_word and np.sum(bin[:, i]) != 0:
rear = i
continue
if get_word and np.sum(bin[:, i]) == 0:
gap += 1
if gap > max_line_gap:
if (rear - head) > min_word_length:
corners_text_refine.append((head + col_min, row_min, rear + col_min, row_max))
gap = 0
get_word = False
if get_word and (rear - head) > min_word_length:
corners_text_refine.append((head + col_min, row_min, rear + col_min, row_max))
corners_text_refine = []
pad = 1
for corner in corners_text:
(col_min, row_min, col_max, row_max) = corner
col_min = max(col_min - pad, 0)
col_max = min(col_max + pad, org.shape[1])
row_min = max(row_min - pad, 0)
row_max = min(row_max + pad, org.shape[0])
clip = org[row_min:row_max, col_min:col_max]
clip_bin = preprocess(clip)
refine(clip_bin)
return corners_text_refine
def incorporate(img_path, compo_path, text_path, output_path_img, output_path_label_txt, img_section, is_clip=False, clip_path=None):
img = cv2.imread(img_path)
img = img[:img_section[0], :img_section[1]]
compo_f = open(compo_path, 'r')
text_f = open(text_path, 'r')
compos = json.load(compo_f)
corners_compo = []
compos_class = []
corners_text = []
for compo in compos['compos']:
corners_compo.append([compo['column_min'], compo['row_min'], compo['column_max'], compo['row_max']])
compos_class.append(compo['class'])
for line in text_f.readlines():
if len(line) > 1:
corners_text.append([int(c) for c in line[:-1].split(',')])
corners_text = refine_text(img, corners_text, 20, 10)
corners_compo_new, compos_class_new = nms(img, corners_compo, compos_class, corners_text)
board = draw_bounding_box_class(img, corners_compo_new, compos_class_new)
save_label_txt(img_path, corners_compo_new, compos_class_new, output_path_label_txt)
cv2.imwrite(output_path_img, board)
print('*** Merge Complete and Save to', output_path_img, '***')
if is_clip:
save_clipping(img, corners_compo_new, compos_class_new, compo_index, clip_path)
| [
"[email protected]"
]
| |
82910b40db0a306227fb84d75d64878ce5263901 | ac415850ca1926439a5f882c4a3b6c6105247149 | /setup.py | db2d3d2e0e3fbbb8faf3f159cb6b0aae0d70b750 | []
| no_license | RedTurtle/collective.itvalidators | 2a4e816628e48e218f82d626f3d48adf191977c3 | 86f59adf51ea185796d7c8e5007e3fd9ebf66f92 | refs/heads/master | 2020-05-20T16:46:43.456230 | 2012-11-12T13:52:01 | 2012-11-12T13:52:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,631 | py | from setuptools import setup, find_packages
import os
version = '1.0.1.dev0'
tests_require=['zope.testing', 'Products.CMFTestCase']
setup(name='collective.itvalidators',
version=version,
description="A set of Archetypes validators for Plone, some for Italian specific needs, others useful for all",
long_description=open("README.rst").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
# Get more strings from
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Framework :: Plone",
"Framework :: Plone :: 3.3",
"Framework :: Plone :: 4.0",
"Framework :: Plone :: 4.1",
"Framework :: Plone :: 4.2",
"Programming Language :: Python",
"Intended Audience :: Developers",
"Development Status :: 5 - Production/Stable",
],
keywords='plone archetypes validator plonegov',
author='RedTurtle Technology',
author_email='[email protected]',
url='http://plone.org/products/collective.itvalidators',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['collective'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Products.validation',
],
tests_require=tests_require,
extras_require=dict(test=tests_require),
test_suite='collective.itvalidators.tests.test_validation.test_suite',
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)
| [
"[email protected]"
]
| |
3bae7b1543b54747cd569854abb8d5a2f2c55705 | 1ee2087e1879b3d40661940f630f94576b38a75b | /migrations/versions/275b0e49dff7_.py | 8b691a445a1f3fb66ce336331ce9cbe0af46bfb4 | [
"Apache-2.0"
]
| permissive | dpdi-unifor/limonero | be332baf0c3596c2195c8aaaefd67def64a57c8a | 3b46f780f82e1d291ebe0120d95c71e82cd46ed9 | refs/heads/master | 2023-01-03T22:00:06.474233 | 2020-08-06T18:04:21 | 2020-08-06T18:04:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,213 | py | """empty messagepark-2.3.0/
Revision ID: 275b0e49dff7
Revises: 66d4be40bced
Create Date: 2018-07-11 16:15:33.196417
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from sqlalchemy.sql import text
# revision identifiers, used by Alembic.
revision = '275b0e49dff7'
down_revision = '66d4be40bced'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('data_source',
sa.Column('command', mysql.LONGTEXT(), nullable=True))
op.get_bind().execute(text(
"ALTER TABLE data_source CHANGE `format` `format` "
"ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON',"
"'CSV','PICKLE','GEO_JSON','JDBC') CHARSET utf8 "
"COLLATE utf8_unicode_ci NOT NULL;"
))
op.get_bind().execute(text("""
ALTER TABLE storage CHANGE `type` `type`
ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB',
'POSTGIS','HBASE','CASSANDRA','JDBC') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;"""
))
op.add_column('storage', sa.Column('enabled', sa.Boolean(), nullable=False,
server_default=sa.schema.DefaultClause(
"1"), default=1))
op.add_column('data_source',
sa.Column('updated', sa.DateTime(), nullable=False,
server_default='2018-01-01'))
def downgrade():
op.drop_column('data_source', 'command')
try:
op.get_bind().execute(text(
"ALTER TABLE data_source CHANGE `format` `format` "
"ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON',"
"'CSV','PICKLE','GEO_JSON') CHARSET utf8 "
"COLLATE utf8_unicode_ci NOT NULL;"
))
op.get_bind().execute(text("""
ALTER TABLE storage CHANGE `type` `type`
ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB',
'POSTGIS','HBASE','CASSANDRA') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;"""
))
except:
pass
op.drop_column('storage', 'enabled')
op.drop_column('data_source', 'updated')
| [
"[email protected]"
]
| |
5aee70ed6493dc5c7fe1fd68084783b335802ad2 | 4bcc9806152542ab43fc2cf47c499424f200896c | /tensorflow/lite/testing/op_tests/space_to_depth.py | 27f5dbde160fea3d10d81554543841f1152f45b4 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
]
| permissive | tensorflow/tensorflow | 906276dbafcc70a941026aa5dc50425ef71ee282 | a7f3934a67900720af3d3b15389551483bee50b8 | refs/heads/master | 2023-08-25T04:24:41.611870 | 2023-08-25T04:06:24 | 2023-08-25T04:14:08 | 45,717,250 | 208,740 | 109,943 | Apache-2.0 | 2023-09-14T20:55:50 | 2015-11-07T01:19:20 | C++ | UTF-8 | Python | false | false | 2,125 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for space_to_depth."""
import tensorflow as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_space_to_depth_tests(options):
"""Make a set of tests to do space_to_depth."""
test_parameters = [{
"dtype": [tf.float32, tf.int32, tf.uint8, tf.int64],
"input_shape": [[2, 12, 24, 1]],
"block_size": [2, 3, 4],
"fully_quantize": [False],
}, {
"dtype": [tf.float32],
"input_shape": [[2, 12, 24, 1], [1, 12, 24, 1]],
"block_size": [2, 3, 4],
"fully_quantize": [True],
}]
def build_graph(parameters):
input_tensor = tf.compat.v1.placeholder(
dtype=parameters["dtype"],
name="input",
shape=parameters["input_shape"])
out = tf.compat.v1.space_to_depth(
input=input_tensor, block_size=parameters["block_size"])
return [input_tensor], [out]
def build_inputs(parameters, sess, inputs, outputs):
input_values = create_tensor_data(
parameters["dtype"],
parameters["input_shape"],
min_value=-1,
max_value=1)
return [input_values], sess.run(
outputs, feed_dict=dict(zip(inputs, [input_values])))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| [
"[email protected]"
]
| |
64c18dd4c90df51f1a5970e4cc74abafd124de81 | 18619af2eb81d74d9e76d61971da1f0fe57b4bbb | /stochastic_optimizer/framework/__init__.py | 70d7241d62b8ae87c23c2fac6491ab6c84e3725d | []
| no_license | asahi417/StochasticOptimizers | 4b9245c7fc99e660d9298077823972cf86e21205 | d98c91136835206dc36d9409e425e1caf4fbb275 | refs/heads/master | 2020-12-01T04:37:20.798101 | 2020-01-01T07:50:58 | 2020-01-01T07:50:58 | 230,559,078 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 162 | py | from .learning_curve_classifier import LearningCurveClassifier
from .grid_search import GridSearch
__all__ = (
"LearningCurveClassifier",
"GridSearch"
)
| [
"[email protected]"
]
| |
8e8baac73edc574040ae13f34cab1d3b1185e8ce | d75371f629cf881de3c49b53533879a5b862da2e | /python/flatten-nested-list-iterator.py | 35a303de0ca7014e654d63308aa377c0f04c99be | []
| no_license | michaelrbock/leet-code | 7352a1e56429bb03842b588ba6bda2a90315a2f4 | 070db59d4e0ded3fb168c89c3d73cb09b3c4fe86 | refs/heads/master | 2020-04-01T05:40:49.262575 | 2019-10-10T22:03:10 | 2019-10-10T22:03:10 | 152,914,631 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,838 | py | # """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger(object):
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class NestedIterator(object):
def __init__(self, nestedList):
"""
Initialize your data structure here.
:type nestedList: List[NestedInteger]
"""
self.nested_list = nestedList
def next(self):
"""
:rtype: int
"""
def hasNext(self):
"""
:rtype: bool
"""
# Your NestedIterator object will be instantiated and called as such:
# i, v = NestedIterator(nestedList), []
# while i.hasNext(): v.append(i.next())
i, v = NestedIterator([[1,1],2,[1,1]]), []
while i.hasNext(): v.append(i.next())
assert v == [1,1,2,1,1]
# Explanation: By calling next repeatedly until hasNext returns false,
# the order of elements returned by next should be: [1,1,2,1,1].
# Example 2:
i, v = NestedIterator([1,[4,[6]]]), []
while i.hasNext(): v.append(i.next())
assert v == [1,4,6]
# Explanation: By calling next repeatedly until hasNext returns false,
# the order of elements returned by next should be: [1,4,6]. | [
"[email protected]"
]
| |
3eb8e8be307b73db69c95441580767c265d74e73 | 03e3138f99f275d15d41a5c5bfb212f85d64d02e | /source/res/scripts/client/gui/Scaleform/daapi/view/lobby/profile/ProfileHof.py | 84be578dc3dfaf43f6f2f9fbcf9766701417e1b0 | []
| no_license | TrenSeP/WorldOfTanks-Decompiled | e428728e7901146d0b599d02c930d70532232a97 | 1faa748acec1b7e435b657fd054ecba23dd72778 | refs/heads/1.4.1 | 2020-04-27T08:07:49.813023 | 2019-03-05T17:37:06 | 2019-03-05T17:37:06 | 174,159,837 | 1 | 0 | null | 2019-03-06T14:33:33 | 2019-03-06T14:24:36 | Python | UTF-8 | Python | false | false | 7,566 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/profile/ProfileHof.py
from functools import partial
import BigWorld
from adisp import process
from debug_utils import LOG_WARNING, LOG_ERROR
from helpers import dependency, i18n
from gui.Scaleform import MENU
from gui.Scaleform.locale.WAITING import WAITING
from gui.shared.formatters import icons
from skeletons.gui.web import IWebController
from skeletons.gui.server_events import IEventsCache
from gui import DialogsInterface
from gui.Scaleform.daapi.settings.views import VIEW_ALIAS
from gui.Scaleform.daapi.view.lobby.hof.hof_helpers import getHofAchievementsRatingUrl, getHofVehiclesRatingUrl, isHofButtonNew, setHofButtonOld, getHofDisabledKeys, onServerSettingsChange
from gui.Scaleform.daapi.view.lobby.hof.web_handlers import createHofWebHandlers
from gui.Scaleform.daapi.view.meta.ProfileHofMeta import ProfileHofMeta
from gui.Scaleform.genConsts.PROFILE_CONSTANTS import PROFILE_CONSTANTS
from gui.shared import g_eventBus, events, EVENT_BUS_SCOPE
class ProfileHof(ProfileHofMeta):
_eventsCache = dependency.descriptor(IEventsCache)
_clansController = dependency.descriptor(IWebController)
_errorsStatusMap = {'1004': PROFILE_CONSTANTS.HOF_SPECIAL_CASES,
'1005': PROFILE_CONSTANTS.HOF_SPECIAL_CASES,
'1015': PROFILE_CONSTANTS.HOF_SPECIAL_CASES,
'1016': PROFILE_CONSTANTS.HOF_SPECIAL_CASES,
'1003': PROFILE_CONSTANTS.HOF_RESULTS_HIDE,
'1006': PROFILE_CONSTANTS.HOF_RESULTS_EXCLUSION,
'1007': PROFILE_CONSTANTS.HOF_RESULTS_INCLUSION}
_requestRetriesCount = 3
_retryDelay = 0.5
_bgPath = '../maps/icons/hof/hof_back_landing.png'
_buttonsWithCounter = (PROFILE_CONSTANTS.HOF_ACHIEVEMENTS_BUTTON, PROFILE_CONSTANTS.HOF_VEHICLES_BUTTON)
def __init__(self, *args):
super(ProfileHof, self).__init__(*args)
self.__status = PROFILE_CONSTANTS.HOF_RESULTS_SHOW
self.__retriesCount = 0
self.__isMaintenance = False
self.__viewDisposed = False
self.__requestProcessing = False
self.__retryCallback = None
return
def showAchievementsRating(self):
setHofButtonOld(PROFILE_CONSTANTS.HOF_ACHIEVEMENTS_BUTTON)
self.__openHofBrowserView(getHofAchievementsRatingUrl())
def showVehiclesRating(self):
setHofButtonOld(PROFILE_CONSTANTS.HOF_VEHICLES_BUTTON)
self.__openHofBrowserView(getHofVehiclesRatingUrl())
@process
def changeStatus(self):
if self.__status == PROFILE_CONSTANTS.HOF_RESULTS_SHOW:
success = yield DialogsInterface.showI18nConfirmDialog('hof/excludeRating')
if success:
self.__makeRequest(self._clansController.getClanDossier().requestHofUserExclude, PROFILE_CONSTANTS.HOF_RESULTS_EXCLUSION, lambda errorCode: self.__getRatingStatus())
elif self.__status == PROFILE_CONSTANTS.HOF_RESULTS_HIDE:
self.__makeRequest(self._clansController.getClanDossier().requestHofUserRestore, PROFILE_CONSTANTS.HOF_RESULTS_INCLUSION, lambda errorCode: self.__getRatingStatus())
else:
LOG_WARNING('Something went wrong! Getting actual status.')
self.__getRatingStatus()
def onSectionActivated(self):
if self.lobbyContext.getServerSettings().bwHallOfFame.isStatusEnabled:
if self.__requestProcessing:
LOG_WARNING('ProfileHof request canceled: another request is processing')
else:
self.__getRatingStatus()
else:
self.as_setStatusS(PROFILE_CONSTANTS.HOF_SPECIAL_CASES)
def _populate(self):
super(ProfileHof, self)._populate()
self.lobbyContext.getServerSettings().onServerSettingsChange += self.__onServerSettingChanged
self.as_setBackgroundS(self._bgPath)
self.as_setBtnCountersS(self.__getCountersList())
def _dispose(self):
if self.__retryCallback:
LOG_WARNING('ProfileHof request canceled: ProfileHof view was disposed')
BigWorld.cancelCallback(self.__retryCallback)
self.__viewDisposed = True
self.lobbyContext.getServerSettings().onServerSettingsChange -= self.__onServerSettingChanged
super(ProfileHof, self)._dispose()
def __getCountersList(self):
counters = []
for buttonName in self._buttonsWithCounter:
if isHofButtonNew(buttonName):
counters.append({'componentId': buttonName,
'count': '1'})
return counters
def __getRatingStatus(self):
def handleError(errorCode):
status = self._errorsStatusMap.get(errorCode)
if status:
self.__status = status
self.as_setStatusS(status)
else:
LOG_ERROR('Unknown error code: ' + str(errorCode))
self.__makeRequest(self._clansController.getClanDossier().requestHofUserInfo, PROFILE_CONSTANTS.HOF_RESULTS_SHOW, handleError)
@process
def __makeRequest(self, requestFunc, successStatus, errorCallback):
if self.__retriesCount == 0:
if not self.__isMaintenance:
self.as_showWaitingS(WAITING.HOF_LOADING)
self.__requestProcessing = True
else:
self.__retryCallback = None
response = yield requestFunc()
if self.__viewDisposed:
LOG_WARNING('ProfileHof request canceled: ProfileHof view was disposed')
return
else:
if response:
self.__refreshRequest()
if self.__isMaintenance:
self.as_hideServiceViewS()
self.as_setBtnCountersS(self.__getCountersList())
self.__isMaintenance = False
errors = response.getErrors()
if not errors:
self.__status = successStatus
self.as_setStatusS(successStatus)
else:
errorCallback(errors[0])
elif self.__retriesCount < self._requestRetriesCount:
self.__retriesCount += 1
self.__retryCallback = BigWorld.callback(self._retryDelay, partial(self.__makeRequest, requestFunc, successStatus, errorCallback))
else:
self.__refreshRequest()
if not self.__isMaintenance:
self.__isMaintenance = True
header = icons.alert() + i18n.makeString(MENU.BROWSER_DATAUNAVAILABLE_HEADER)
description = i18n.makeString(MENU.BROWSER_DATAUNAVAILABLE_DESCRIPTION)
self.as_showServiceViewS(header, description)
self.as_setBtnCountersS([])
return
def __refreshRequest(self):
self.__retriesCount = 0
if not self.__isMaintenance:
self.as_hideWaitingS()
self.__requestProcessing = False
def __openHofBrowserView(self, url):
self._eventsCache.onProfileVisited()
g_eventBus.handleEvent(events.LoadViewEvent(VIEW_ALIAS.BROWSER_VIEW, ctx={'url': url,
'returnAlias': VIEW_ALIAS.LOBBY_PROFILE,
'allowRightClick': True,
'webHandlers': createHofWebHandlers(),
'selectedAlias': VIEW_ALIAS.PROFILE_HOF,
'disabledKeys': getHofDisabledKeys(),
'onServerSettingsChange': onServerSettingsChange}), EVENT_BUS_SCOPE.LOBBY)
def __onServerSettingChanged(self, diff):
if 'hallOfFame' in diff:
self.onSectionActivated()
| [
"[email protected]"
]
| |
38af75495681eb2f6ff8f41b12bb003b2b7641d6 | 3daf036e3911c00e50fb76e083ada2134ff1758f | /firefox/export_firefox_cookies.py | 861a3b9d04167e0ea5495f881f256632f0561819 | [
"MIT"
]
| permissive | jabbalaci/Bash-Utils | 954b234148745a9d73747392b137884ee7817246 | c880ff48eafb0f8f5f60f62d9cc3ddbbc0dd88b7 | refs/heads/master | 2023-05-01T22:40:46.713341 | 2023-04-24T14:30:25 | 2023-04-24T14:30:25 | 1,561,380 | 91 | 32 | null | null | null | null | UTF-8 | Python | false | false | 3,941 | py | #!/usr/bin/env python3
"""
Extract Firefox cookies
=======================
This script extracts cookies from Firefox's cookies.sqlite file
that are specific to a given host. The exported cookies are saved
in the file cookies.txt .
New! It also exports session cookies from Firefox's recovery.js file.
The exported cookies are saved to session_cookies.txt .
Then, you can use this exported file with wget to download content
that require authentication via cookies:
wget --cookies=on --load-cookies=cookies.txt --keep-session-cookies "http://..."
The original script was written by Dirk Sohler:
https://old.0x7be.de/2008/06/19/firefox-3-und-cookiestxt/
This version is a bit refactored and extended with session cookies.
Website: https://ubuntuincident.wordpress.com/2011/09/05/download-pages-with-wget-that-are-protected-by-cookies/
GitHub: https://github.com/jabbalaci/Bash-Utils (see the firefox/ folder)
Last update: 2017-05-17 (yyyy-mm-dd)
"""
import json
import os
import pprint
import sqlite3 as db
import sys
from pathlib import Path
FIREFOX_DIR = Path(os.path.expanduser('~'), '.mozilla', 'firefox')
COOKIES_TXT = 'cookies.txt'
SESSION_COOKIES_TXT = 'session_cookies.txt'
CONTENTS = "host, path, isSecure, expiry, name, value"
def get_cookie_db_path(firefox_dir):
for e in os.listdir(firefox_dir):
if e.endswith('.default'):
p = Path(firefox_dir, e, 'cookies.sqlite')
if not p.is_file():
print("Error: the file '{0}' doesn't exist".format(str(p)), file=sys.stderr)
sys.exit(1)
else:
return str(p)
# else
print("Error: the user dir. was not found in '{0}'".format(firefox_dir), file=sys.stderr)
sys.exit(1)
def get_recovery_js_path(firefox_dir):
for e in os.listdir(firefox_dir):
if e.endswith('.default'):
p = Path(firefox_dir, e, 'sessionstore-backups', 'recovery.js')
if not p.is_file():
print("Error: the file '{0}' doesn't exist".format(str(p)), file=sys.stderr)
sys.exit(1)
else:
return str(p)
# else
print("Error: the user dir. was not found in '{0}'".format(firefox_dir), file=sys.stderr)
sys.exit(1)
def extract_cookies(host):
"""
Extract cookies from cookies.sqlite.
"""
cookie_db = get_cookie_db_path(str(FIREFOX_DIR))
print("# working with", cookie_db)
conn = db.connect(cookie_db)
cursor = conn.cursor()
sql = "SELECT {c} FROM moz_cookies WHERE host LIKE '%{h}%'".format(c=CONTENTS, h=host)
cursor.execute(sql)
out = open(COOKIES_TXT, 'w')
cnt = 0
for row in cursor.fetchall():
s = "{0}\tTRUE\t{1}\t{2}\t{3}\t{4}\t{5}\n".format(row[0], row[1],
str(bool(row[2])).upper(), row[3], str(row[4]), str(row[5]))
out.write(s)
cnt += 1
print("Search term: {0}".format(host))
print("Exported: {0}".format(cnt))
out.close()
conn.close()
def extract_session_cookies(host):
"""
Extract session cookies from recovery.js.
"""
fname = get_recovery_js_path(str(FIREFOX_DIR))
print("# working with", fname)
with open(fname) as f:
d = json.load(f)
cookies = d['windows'][0]['cookies']
cnt = 0
with open(SESSION_COOKIES_TXT, "w") as f:
for c in cookies:
if host in c['host']:
res = {
c['name']: c['value'],
}
print(json.dumps(res, indent=2), file=f)
cnt += 1
#
#
#
print("Exported: {0}".format(cnt))
#############################################################################
if __name__ == "__main__":
if len(sys.argv) == 1:
print("{0}: specify the host".format(Path(sys.argv[0]).name))
sys.exit(1)
# else
host = sys.argv[1]
extract_cookies(host)
extract_session_cookies(host)
| [
"[email protected]"
]
| |
fddabc87a3bd3b32b19da51b8a145cb38e9f1ca1 | 7bcec8a9c6a240ec0888bec4179f536046464005 | /moviesys/moviesys/.history/library/admin_20210318134349.py | 76ed2874fbcd5d1186ba17a3a66fef09063aae9c | []
| no_license | yifanzhang13/MovieManagementSystem_group5 | c64e5810914c3d33ae6cd94e8eed5dc5a3962181 | 4cca1a4299311681d69b2347ca8d7b02e0846ebc | refs/heads/main | 2023-03-29T08:30:26.655108 | 2021-04-01T15:42:52 | 2021-04-01T15:42:52 | 344,417,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | from django.contrib import admin
# Register your models here.
from .models import Movies, Users, Ratings, Links, Tags
# register models
# admin.site.register(Movies)
admin.site.register(Users)
admin.site.register(Ratings)
admin.site.register(Links)
admin.site.register(Tags)
@admin.register(Movies)
class MoviesAdmin(admin.ModelAdmin):
list_display = ('MovieID', 'MovieTitle', 'MovieGenres')
| [
"[email protected]"
]
| |
ade285186c539e4e1bfac84b045c7a325362486d | 51a37b7108f2f69a1377d98f714711af3c32d0df | /src/leetcode/P5667.py | b91f9dbfe363084ec778918ed185877ab603d56c | []
| no_license | stupidchen/leetcode | 1dd2683ba4b1c0382e9263547d6c623e4979a806 | 72d172ea25777980a49439042dbc39448fcad73d | refs/heads/master | 2022-03-14T21:15:47.263954 | 2022-02-27T15:33:15 | 2022-02-27T15:33:15 | 55,680,865 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,218 | py | import bisect
class Solution:
def canEat(self, candiesCount, queries):
n = len(candiesCount)
s = [0] * (n + 1)
for i, v in enumerate(candiesCount):
s[i + 1] = s[i] + v
ret = []
for q in queries:
t, d, c = q
l = 1 * (d + 1)
r = c * (d + 1)
cl = bisect.bisect_left(s, l)
cr = bisect.bisect_left(s, r)
if cl - 1 <= t <= cr - 1:
ret.append(True)
else:
ret.append(False)
return ret
if __name__ == '__main__':
print(Solution().canEat(
[46, 5, 47, 48, 43, 34, 15, 26, 11, 25, 41, 47, 15, 25, 16, 50, 32, 42, 32, 21, 36, 34, 50, 45, 46, 15, 46, 38,
50, 12, 3, 26, 26, 16, 23, 1, 4, 48, 47, 32, 47, 16, 33, 23, 38, 2, 19, 50, 6, 19, 29, 3, 27, 12, 6, 22, 33,
28, 7, 10, 12, 8, 13, 24, 21, 38, 43, 26, 35, 18, 34, 3, 14, 48, 50, 34, 38, 4, 50, 26, 5, 35, 11, 2, 35, 9,
11, 31, 36, 20, 21, 37, 18, 34, 34, 10, 21, 8, 5],
[[80, 2329, 69], [14, 1485, 76], [33, 2057, 83], [13, 1972, 27], [11, 387, 25], [24, 1460, 47], [22, 1783,
35], [1, 513,
33], [66,
2124,
85], [
19, 642, 26], [15, 1963, 79], [93, 722, 96], [15, 376, 88], [60, 1864, 89], [86, 608, 4], [98, 257, 35], [
35, 651, 47], [96, 795, 73], [62, 2077, 18], [27, 1724, 57], [34, 1984, 75], [49, 2413, 95], [76, 1664,
5], [28, 38,
13], [
85, 54, 42], [12, 301, 3], [62, 2016, 29], [45, 2316, 37], [43, 2360, 28], [87, 192, 98], [27, 2082, 21], [
74, 762, 37], [51, 35, 17], [73, 2193, 4], [60, 425, 65], [11, 1522, 58], [21, 1699, 66], [42, 1473, 5], [
30, 2010, 48], [91, 796, 74], [82, 2162, 31], [23, 2569, 65], [24, 684, 23], [70, 1219, 51], [5, 1817,
15], [81,
2446,
34], [
96, 771, 60], [49, 1171, 60], [41, 567, 67], [39, 799, 59], [90, 957, 81], [84, 2122, 27], [82, 1707,
44], [11, 1889,
20], [80,
1697,
83], [
24, 1786, 60], [90, 1847, 99], [51, 114, 21], [44, 466, 85], [56, 469, 20], [44, 350, 96], [66, 1946,
10], [14, 2470,
12], [69,
1175,
18], [
98, 1804, 25], [77, 2187, 40], [89, 2265, 45], [19, 2246, 45], [40, 2373, 79], [60, 2222, 17], [37, 385,
5], [97,
1759,
97], [
10, 903, 5], [87, 842, 45], [74, 2398, 66], [62, 49, 94], [48, 156, 77], [76, 2310, 80], [64, 2360, 95], [
70, 1699, 83], [39, 1241, 66], [92, 2312, 21], [63, 2148, 29], [95, 594, 74], [89, 90, 51], [82, 137,
70], [54, 301,
97], [
15, 819, 43], [47, 1402, 60], [17, 2377, 43], [50, 1937, 95], [62, 1174, 74], [67, 1411, 87], [39, 1151,
48]]))
| [
"[email protected]"
]
| |
e1488fdab650b9fd0136aa331c3c6462879aeb1a | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.0_rd=0.8_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=37/params.py | 9cc08e99d9f5bb4bdff5449574abf1e1a429bfd7 | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | {'cpus': 4,
'duration': 30,
'final_util': '2.027810',
'max_util': '2.0',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.8',
'res_nmb': '4',
'res_weight': '0.06',
'scheduler': 'GSN-EDF',
'trial': 37,
'utils': 'uni-medium-3'}
| [
"[email protected]"
]
| |
e3386f799b3627d0602e35e0985396f0d83ec41e | 8be3c3df6cfb9795d91dac3aef1ca770cb13bd50 | /tools/compute_metric.py | 096b34e4ef3f079f9598e1efb58a174ce0286757 | []
| no_license | elda27/deepnet | ba1d7c50c84c31161e25bfdf15c9415f2ae7308a | 71db3441c789e5bc5a121c19c03b586663ec891a | refs/heads/master | 2021-07-08T04:14:50.167106 | 2019-02-01T04:48:01 | 2019-02-01T04:48:01 | 138,707,614 | 1 | 0 | null | 2018-07-20T03:02:23 | 2018-06-26T08:25:00 | Python | UTF-8 | Python | false | false | 9,552 | py | import auto_path
import sys
import argparse
from functools import reduce
from deepnet import utils
from deepnet.utils import mhd, visualizer, dataset
import process
import os
import os.path
xp = None
as_gpu_array = None
as_cpu_array = None
import numpy as np
try:
import cupy as cp
xp = cp
as_cpu_array = cp.asnumpy
as_gpu_array = cp.array
except ImportError:
xp = np
as_cpu_array = np.asarray
as_gpu_array = np.asarray
from chainer.iterators import MultiprocessIterator, SerialIterator
import tqdm
import pandas as pd
import xarray as xr
import numba
try:
import line_profiler
AVAILABLE_PROFILER=True
except:
AVAILABLE_PROFILER=False
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, required=True)
parser.add_argument('--truth-dir', type=str, required=True, help='Groundtruth dataset directory')
parser.add_argument('--test-dir', type=str, nargs='+', required=True, help='Test prediciton result directory (Ex. <Fold 1, Case 1 dir> <Fold1, Case 2 dir> <Fold2, Case 1 dir> <Fold2, Case 2 dir>)')
parser.add_argument('--test-names', type=str, nargs='+', required=True, help='Case name of test prediction (This arugment is same length about --test-dir)')
parser.add_argument('--test-index', type=str, nargs='+', required=True, help='Test index')
parser.add_argument('--output', type=str, default='metrics.nc')
parser.add_argument('--num-parallel', type=int, default=5)
parser.add_argument('--metrics', type=str, nargs='*', default=None)
parser.add_argument('--use-profile', action='store_true')
parser.add_argument('--n-image', type=int, default=None)
if len(sys.argv) < 2:
parser.print_help()
args = parser.parse_args()
if args.gpu >= 0:
cp.cuda.Device(args.gpu).use()
if args.metrics is None:
args.metrics = list(RegistedMetrics.keys())
pr = None
if args.use_profile:
assert AVAILABLE_PROFILER
pr = line_profiler.LineProfiler()
# Profiling about dataset
for func in RegistedMetrics.values():
pr.add_function(func)
# Profiling about io and ip function
pr.add_module(utils.dataset)
# Profiling about trainer
pr.add_function(_main)
# Start profiling
pr.enable()
_main(args)
if args.use_profile:
pr.dump_stats('compute_metric.py.lprof')
print('Finished to profile the time.\nTo show the result of the profile run this command.\n "python -m line_profiler compute_metric.py.lprof"')
def _main(args):
global xp
if args.gpu >= 0:
xp = cp
else:
xp = np
test_index_list = []
for test_index in args.test_index:
if test_index == '*':
test_index_list.append(test_index_list[-1])
else:
test_index_list.append(utils.parse_index_file(test_index))
assert len(args.test_dir) % len(test_index_list) == 0, 'The number of test directory is multiply of the number of test_index list:{}'
all_metrics = []
num_fold = len(args.test_dir) // len(args.test_names)
for fold_index in tqdm.trange(num_fold):
test_index = test_index_list[fold_index]
test_dirs = args.test_dir[fold_index * len(args.test_names):(fold_index+1) * len(args.test_names)]
all_metrics.append(compute_all_metrics(args, test_dirs, test_index))
#for test_name, metrics in all_metrics_in_case.items():
# all_metrics[(fold_index, test_name)] = metrics
# #all_metrics.update({ (i, test_name, key): value for key, value in metrics.items() })
metric_da = xr.concat(all_metrics, pd.Index(list(range(len(all_metrics))), name='fold'))
output_dir = os.path.dirname(args.output)
if output_dir is not None and output_dir != '':
os.makedirs(output_dir, exist_ok=True)
metric_da.to_netcdf(args.output)
def compute_all_metrics(args, test_dir, test_index):
truth_dataset = dataset.XpDataset(args.truth_dir, test_index, image=False, label=True)
pred_datasets = [dataset.XpDataset(td, test_index, image=False, label=True) for td in test_dir]
truth_iter = MultiprocessIterator(truth_dataset, args.num_parallel, shuffle=False, repeat=False)
pred_iters = [ MultiprocessIterator(pd, args.num_parallel, shuffle=False, repeat=False) for pd in pred_datasets ]
#truth_iter = SerialIterator(truth_dataset, args.num_parallel, shuffle=False, repeat=False)
#pred_iters = [ SerialIterator(pd, args.num_parallel, shuffle=False, repeat=False) for pd in pred_datasets ]
#all_metrics = { }
batch_results_dict = {}
for i, batches in tqdm.tqdm(enumerate(zip(truth_iter, *pred_iters)), total = len(truth_dataset) // args.num_parallel):
if args.n_image is not None and args.n_image <= i:
break
truth_vars = utils.batch_to_vars(batches[0])[0]
pred_vars_list = [ utils.batch_to_vars(batch)[0] for batch in batches[1:] ]
truth_vars['label'] = xp.concatenate( [ xp.expand_dims(xp.asarray(label), axis=0) for label in truth_vars['label'] ], axis=0)
# Compute metrics
for pred_vars, test_name in zip(pred_vars_list, args.test_names):
pred_vars['label'] = xp.concatenate( [ xp.expand_dims(xp.asarray(label), axis=0) for label in pred_vars['label'] ], axis=0)
values = computeMetrics(truth_vars, pred_vars, args.metrics)
batch_results_dict.setdefault(test_name,[]).append(values)
#all_metrics.setdefault(test_name, dict())
new_axis = list(batch_results_dict.keys())
new_data = []
for test_name, batch_results in batch_results_dict.items():
data_dict = {}
for batch_result in batch_results:
for key, values in batch_result.items():
data_dict.setdefault(key, []).append(values)
new_data.append(data_dict)
new_data = [ xr.Dataset({ key: xr.concat(value, dim='case_name') for key, value in dataset_.items() }) for dataset_ in new_data ]
return xr.concat(new_data, pd.Index(new_axis, name='test_name'))
RegistedMetrics = {}
RegisterKeys = {}
def register_metric(name, keys=None):
def _register_metric(f):
RegistedMetrics[name] = f
RegisterKeys[name] = [ name ] if keys is None else keys
return f
return _register_metric
#@numba.jit
def computeMetrics(truth_vars, pred_vars, metrics, num_separate = 20):
cases = truth_vars['case_name']
thresholds = (xp.arange(num_separate + 1) / num_separate).astype(xp.float32)
truth_labels = truth_vars['label']
pred_labels = pred_vars['label']
# (num thresh, cases, num rib bones)
shape = (
len(thresholds),
truth_labels.shape[0],
truth_labels.shape[1],
)
result_metrics = {}
for i, thresh in enumerate(thresholds):
truth_bin_labels = xp.squeeze(truth_labels > 0)
pred_bin_labels = xp.squeeze(pred_labels > thresh)
for metric_name in metrics:
_metrics = RegistedMetrics[metric_name](truth_bin_labels, pred_bin_labels, axes=tuple(i for i in range(2, truth_bin_labels.ndim)))
for key, values in _metrics.items():
if key not in result_metrics:
result_metrics[key] = xp.zeros(shape)
result_metrics[key][i] = values
dims = ['threshold', 'case_name', 'rib_index']
coords = {
'threshold': [float('{:.3f}'.format(float(as_cpu_array(t)))) for t in thresholds],
'case_name': cases,
'rib_index': list(range(shape[2])),
}
return { key: xr.DataArray(as_cpu_array(values), dims=dims, coords=coords) for key, values in result_metrics.items() }
@register_metric('Dice')
def calcDice(im1, im2, axes):
if im1.shape != im2.shape:
raise ValueError("Shape mismatch: im1 and im2 must have the same shape. {} != {}".format(im1.shape, im2.shape))
im_sum = xp.sum(im1, axis=axes) + xp.sum(im2, axis=axes)
intersection = xp.logical_and(im1, im2)
dice = 2.0 * xp.sum(intersection, axis=axes) / (im_sum + 1e-8)
return { 'Dice':dice }
@register_metric('Jaccard')
def calcJaccard(im1, im2, axes):
im_or_sum = xp.logical_or(im1, im2).sum(axis=axes)
im_and_sum = xp.logical_and(im1, im2).sum(axis=axes)
jaccard = im_and_sum.astype(xp.float32) / (im_or_sum + 1e-8)
return { 'Jaccard':jaccard }
@register_metric('F_measure', keys=[
'TPR',
'FPR',
'precesion',
'recall',
'F_measurement',
])
def calcStatisticalMetrics(bin_truth_label, bin_pred_label, axes):
truth_positive = (bin_truth_label == 1)
truth_negative = (bin_truth_label == 0)
pred_positive = (bin_pred_label == 1)
pred_negative = (bin_pred_label == 0)
tp_indices = xp.logical_and(truth_positive, pred_positive)
fn_indices = xp.logical_and(truth_positive, pred_negative)
fp_indices = xp.logical_and(truth_negative, pred_positive)
tn_indices = xp.logical_and(truth_negative, pred_negative)
TP = tp_indices.sum(axis=axes)
FN = fn_indices.sum(axis=axes)
FP = fp_indices.sum(axis=axes)
TN = tn_indices.sum(axis=axes)
precesion = TP / (TP + FP + 1e-8)
recall = TP / (TP + FN + 1e-8)
metrics = {
'TPR': TP / (TP + FN + 1e-8),
'FPR': FP / (FP + TN + 1e-8),
'precesion': precesion,
'recall': recall,
'F_measurement': 2* precesion * recall / (precesion + recall + 1e-8)
}
return metrics
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
58ac4d3b7103bda054d0570437db103f52fab1b4 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/P/pallih/eyjanis-kommentarar2.py | 59d477447c9e7f83b4a946f102e24c0b26201b2b | []
| no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,366 | py | ###############################################################################
# Eyjan Kommentarar - allir skradir kommentarar a eyjan.is
###############################################################################
import scraperwiki, urllib2, re, urllib, json, time
from BeautifulSoup import BeautifulSoup, SoupStrainer
kommentari = {}
def scrape_notandi(userid):
notandi_sed_adur = scraperwiki.metadata.get(userid)
if notandi_sed_adur is not None:
#print "Sleppum notanda (sed adur): " + userid
return
else:
#if userid is None or userid in seen:
# print "Sed adur - sleppum: " + userid
# return
#seen.add(userid)
#print seen
url= "http://i.eyjan.is/notandi/" + userid
print "Saekjum notanda: " + url
html = scraperwiki.scrape(url)
soup = BeautifulSoup(html)
soup.prettify()
fjoldi = soup.find('h2')
fjoldi = fjoldi.string
fjoldi = extract(fjoldi, "Ummæli (",")")
rest = soup.findAll('div','column2')
for n in rest:
notandi = n.findNext('h3')
nafn = notandi.string
mynd = n.findNext ('img')
mynd = mynd["src"]
mynd = re.sub("\./","http://i.eyjan.is/",mynd)
kommentari['nafn'] = nafn
kommentari['userid'] = userid
kommentari['url'] = url
kommentari['kommentafjoldi'] = fjoldi
kommentari['mynd'] = mynd
scraperwiki.datastore.save(["userid"], kommentari)
### SOFA
#time.sleep(0.2)
print "vistum notanda metatada: " + userid
scraperwiki.metadata.save(userid, '1')
def extract(text, sub1, sub2):
"""extract a substring between two substrings sub1 and sub2 in text"""
return text.split(sub1)[-1].split(sub2)[0]
def scrape_frett(url):
kommentari['from_url'] = url
print "Saekjum frett: " + url
html = scraperwiki.scrape(url)
time.sleep(0.2)
ckey = extract(html, "window.cKey = ",";")
ckey = re.sub("\'",'',ckey)
ckey = urllib.quote(ckey,'&')
kommentjsonurl = "http://i.eyjan.is/c/action.php?n=getcomments&key="+ckey+"&o=time-asc"
#time.sleep(0.2)
kommentjson = scraperwiki.scrape(kommentjsonurl)
kommentjson=kommentjson[1:-1]
userid = set(re.findall("userid\":([\d+]+)",kommentjson))
for n in userid:
scrape_notandi(n)
print "vistum frett metatada: " + url
scraperwiki.metadata.save(url,'1')
def scrape_manudur(url):
manudur_sed_adur = scraperwiki.metadata.get(url)
if manudur_sed_adur is not None:
print "Sleppum manudi (sed adur): " + url
return
else:
print "****** Saekjum manud: " + url
html = scraperwiki.scrape(url)
soup = BeautifulSoup(html)
soup.prettify()
tr = soup.findAll('tr')
for tr in tr:
tengill = tr.findNext('a', href = re.compile(r".*comments.*"))
kommentafjoldi = tengill.text
if kommentafjoldi != "0":
tengill = tengill['href']
tengill = re.sub("#comments","",tengill)
frett_sed_adur = scraperwiki.metadata.get(tengill)
if frett_sed_adur is not None:
print "Sleppum frett (sed adur): " + tengill
continue
else:
#frett_seen.add(tengill)
scrape_frett(tengill)
else:
print "Sleppum frett - engin komment: " + tengill['href']
#Thegar allir frettatenglar bunir - vista manudinn sem metadata
print "Vistum manudinn sem metatadata og done: " + url
scraperwiki.metadata.save(url,'1')
def start(url):
#starting_url = 'http://eyjan.is/frettir/sarpur/'
html = scraperwiki.scrape(url)
print html
soup = BeautifulSoup(html)
soup.prettify()
manudir = soup.findAll('a', href = re.compile(r".*mnth.*"))
for a in manudir:
manadar_tengill = a['href']
if re.match(r"http://eyjan.is.*", manadar_tengill, re.VERBOSE):
manadar_tengill = manadar_tengill
else:
manadar_tengill = "http://eyjan.is/frettir/sarpur/" + manadar_tengill
print "Saekjum: " + manadar_tengill
scrape_manudur(manadar_tengill)
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=5&yr=2010') #- DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=6&yr=2010') #- DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=7&yr=2010') # DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=8&yr=2010') # - Tharf ad keyra aftur - endudum 11.8
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=9&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=10&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=11&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=12&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=1&yr=2011')
scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=2&yr=2011')
#start('http://eyjan.is/frettir/sarpur/')###############################################################################
# Eyjan Kommentarar - allir skradir kommentarar a eyjan.is
###############################################################################
import scraperwiki, urllib2, re, urllib, json, time
from BeautifulSoup import BeautifulSoup, SoupStrainer
kommentari = {}
def scrape_notandi(userid):
notandi_sed_adur = scraperwiki.metadata.get(userid)
if notandi_sed_adur is not None:
#print "Sleppum notanda (sed adur): " + userid
return
else:
#if userid is None or userid in seen:
# print "Sed adur - sleppum: " + userid
# return
#seen.add(userid)
#print seen
url= "http://i.eyjan.is/notandi/" + userid
print "Saekjum notanda: " + url
html = scraperwiki.scrape(url)
soup = BeautifulSoup(html)
soup.prettify()
fjoldi = soup.find('h2')
fjoldi = fjoldi.string
fjoldi = extract(fjoldi, "Ummæli (",")")
rest = soup.findAll('div','column2')
for n in rest:
notandi = n.findNext('h3')
nafn = notandi.string
mynd = n.findNext ('img')
mynd = mynd["src"]
mynd = re.sub("\./","http://i.eyjan.is/",mynd)
kommentari['nafn'] = nafn
kommentari['userid'] = userid
kommentari['url'] = url
kommentari['kommentafjoldi'] = fjoldi
kommentari['mynd'] = mynd
scraperwiki.datastore.save(["userid"], kommentari)
### SOFA
#time.sleep(0.2)
print "vistum notanda metatada: " + userid
scraperwiki.metadata.save(userid, '1')
def extract(text, sub1, sub2):
"""extract a substring between two substrings sub1 and sub2 in text"""
return text.split(sub1)[-1].split(sub2)[0]
def scrape_frett(url):
kommentari['from_url'] = url
print "Saekjum frett: " + url
html = scraperwiki.scrape(url)
time.sleep(0.2)
ckey = extract(html, "window.cKey = ",";")
ckey = re.sub("\'",'',ckey)
ckey = urllib.quote(ckey,'&')
kommentjsonurl = "http://i.eyjan.is/c/action.php?n=getcomments&key="+ckey+"&o=time-asc"
#time.sleep(0.2)
kommentjson = scraperwiki.scrape(kommentjsonurl)
kommentjson=kommentjson[1:-1]
userid = set(re.findall("userid\":([\d+]+)",kommentjson))
for n in userid:
scrape_notandi(n)
print "vistum frett metatada: " + url
scraperwiki.metadata.save(url,'1')
def scrape_manudur(url):
manudur_sed_adur = scraperwiki.metadata.get(url)
if manudur_sed_adur is not None:
print "Sleppum manudi (sed adur): " + url
return
else:
print "****** Saekjum manud: " + url
html = scraperwiki.scrape(url)
soup = BeautifulSoup(html)
soup.prettify()
tr = soup.findAll('tr')
for tr in tr:
tengill = tr.findNext('a', href = re.compile(r".*comments.*"))
kommentafjoldi = tengill.text
if kommentafjoldi != "0":
tengill = tengill['href']
tengill = re.sub("#comments","",tengill)
frett_sed_adur = scraperwiki.metadata.get(tengill)
if frett_sed_adur is not None:
print "Sleppum frett (sed adur): " + tengill
continue
else:
#frett_seen.add(tengill)
scrape_frett(tengill)
else:
print "Sleppum frett - engin komment: " + tengill['href']
#Thegar allir frettatenglar bunir - vista manudinn sem metadata
print "Vistum manudinn sem metatadata og done: " + url
scraperwiki.metadata.save(url,'1')
def start(url):
#starting_url = 'http://eyjan.is/frettir/sarpur/'
html = scraperwiki.scrape(url)
print html
soup = BeautifulSoup(html)
soup.prettify()
manudir = soup.findAll('a', href = re.compile(r".*mnth.*"))
for a in manudir:
manadar_tengill = a['href']
if re.match(r"http://eyjan.is.*", manadar_tengill, re.VERBOSE):
manadar_tengill = manadar_tengill
else:
manadar_tengill = "http://eyjan.is/frettir/sarpur/" + manadar_tengill
print "Saekjum: " + manadar_tengill
scrape_manudur(manadar_tengill)
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=5&yr=2010') #- DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=6&yr=2010') #- DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=7&yr=2010') # DONE
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=8&yr=2010') # - Tharf ad keyra aftur - endudum 11.8
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=9&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=10&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=11&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=12&yr=2010')
#scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=1&yr=2011')
scrape_manudur('http://eyjan.is/frettir/sarpur/?mnth=2&yr=2011')
#start('http://eyjan.is/frettir/sarpur/') | [
"[email protected]"
]
| |
a71f07c2f02f4b0b318fd2cd140f634cbc394487 | 82a9077bcb5a90d88e0a8be7f8627af4f0844434 | /google-cloud-sdk/lib/tests/lib/surface/apigee/base.py | ea6208f2afffb0c8f08517c2c13a3704255c8215 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | piotradamczyk5/gcloud_cli | 1ae2553595e569fad6ce84af62b91a7ee5489017 | 384ece11040caadcd64d51da74e0b8491dd22ca3 | refs/heads/master | 2023-01-01T23:00:27.858583 | 2020-10-21T04:21:23 | 2020-10-21T04:21:23 | 290,238,061 | 0 | 0 | null | 2020-10-19T16:43:36 | 2020-08-25T14:31:00 | Python | UTF-8 | Python | false | false | 9,821 | py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes for all gcloud apigee tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import json
import os
from googlecloudsdk.core import config
from googlecloudsdk.core import yaml
from googlecloudsdk.core.util import files
from tests.lib import cli_test_base
from tests.lib import e2e_base
from tests.lib import sdk_test_base
def _JsonDataBogoType(json_data):
"""Returns whether a JSON data value is an object, list, or primitive."""
if isinstance(json_data, collections.Mapping):
return "object"
# Strings look like sequences even though they aren't.
# Get types from literals so that the code can be the same on py2 and py3.
if isinstance(json_data, type("")) or isinstance(json_data, type(b"")):
return "primitive"
if isinstance(json_data, collections.Sequence):
return "list"
return "primitive"
MismatchInfo = collections.namedtuple("MismatchInfo",
"path description expected actual")
def _JsonDataMismatches(expected, actual, parent_path=None):
"""Yields MismatchInfo for each mismatch between `expected` and `actual`."""
if expected == actual:
return
json_type = _JsonDataBogoType(actual)
expected_type = _JsonDataBogoType(expected)
if json_type != expected_type:
yield MismatchInfo(parent_path, "Unexpected type", expected_type, json_type)
return
if json_type == "primitive":
yield MismatchInfo(parent_path, "Mismatched values", expected, actual)
return
if json_type == "object":
for key in expected:
path = parent_path + "." + key if parent_path else key
if key in actual:
for mismatch in _JsonDataMismatches(expected[key], actual[key], path):
yield mismatch
else:
yield MismatchInfo(path, "Missing object member", expected[key], None)
for key in actual:
if key not in expected:
yield MismatchInfo(path, "Unexpected object member", None, actual[key])
return
# Values are lists. Attempt to determine how the actual list differs from the
# expected one.
expected_items = expected[:]
actual_items = actual[:]
while len(expected_items) and len(actual_items):
next_expected = expected_items[0]
next_actual = actual_items[0]
if next_expected == next_actual:
expected_items = expected_items[1:]
actual_items = actual_items[1:]
continue
# The rest of this code is optimized for readability, not performance.
actual_is_unexpected = (next_actual not in expected_items)
expected_is_missing = (next_expected not in actual_items)
if actual_is_unexpected and expected_is_missing:
# If neither value is recognized, assume one replaced the other.
index_in_expected = len(expected) - len(expected_items)
index_in_actual = len(actual) - len(actual_items)
if index_in_expected == index_in_actual:
index_string = "[%d]" % index_in_expected
else:
index_string = "[%d -> %d]" % (index_in_expected, index_in_actual)
for mismatch in _JsonDataMismatches(
next_expected, next_actual,
parent_path + index_string if parent_path else index_string):
yield mismatch
expected_items = expected_items[1:]
actual_items = actual_items[1:]
continue
# If one value is recognized but the other is not, assume the unrecognized
# value is the abnormal one.
if actual_is_unexpected:
yield MismatchInfo(parent_path, "Unexpected list item", None, next_actual)
actual_items = actual_items[1:]
continue
if expected_is_missing:
yield MismatchInfo(parent_path, "Missing list item", next_expected, None)
expected_items = expected_items[1:]
continue
# Both values are recognized; they just shouldn't be "here". Choose the
# least disruptive reordering.
actual_position_of_expected = actual_items.index(next_expected)
expected_position_of_actual = expected_items.index(next_actual)
if actual_position_of_expected < expected_position_of_actual:
expected_items = expected_items[1:]
del actual_items[actual_position_of_expected]
else:
del expected_items[expected_position_of_actual]
actual_items = actual_items[1:]
# At most one of the two lists remain. Process each item as a mismatch.
for next_actual in actual_items:
yield MismatchInfo(parent_path, "Unexpected list item", None, next_actual)
for next_expected in expected_items:
yield MismatchInfo(parent_path, "Missing list item", next_expected, None)
class ApigeeBaseTest(cli_test_base.CliTestBase, sdk_test_base.WithTempCWD):
"""Base class for tests of gcloud Apigee support."""
def SetUp(self):
self._already_seen_output_prefix = ""
def GetJsonOutput(self):
output_text = self.GetOutput()
# Trim off the part at the front that has already been checked by previous
# calls to GetJsonOutput().
if output_text.startswith(self._already_seen_output_prefix):
relevant_output_text = output_text[len(self._already_seen_output_prefix):]
else:
relevant_output_text = output_text
self._already_seen_output_prefix = output_text
try:
return json.loads(relevant_output_text)
except ValueError as e:
self.fail("Output is not valid JSON.\n%s" % (e))
def AssertJsonOutputMatches(self, expected_output, message=None):
if message is None:
message = ""
else:
message += ": "
output_data = self.GetJsonOutput()
for mismatch in _JsonDataMismatches(expected_output, output_data):
self.fail("%s%s for %s\nExpected: %s\nActual: %s" %
(message, mismatch.description, mismatch.path or "[root]",
yaml.dump(mismatch.expected), yaml.dump(mismatch.actual)))
class WithRunApigee(cli_test_base.CliTestBase):
"""Tests that invoke `gcloud apigee` commands."""
def SetUp(self):
super(WithRunApigee, self).SetUp()
# Wipe out any config or cache state that might be left over from previous
# tests.
config_dir = config.Paths().global_config_dir
for filename in os.listdir(config_dir):
if not filename.startswith(".apigee"):
continue
full_path = os.path.join(config_dir, filename)
if os.path.isdir(full_path):
files.RmTree(full_path)
else:
os.unlink(full_path)
self.Run("config unset project")
def RunApigee(self, command):
"""Runs `command` in the most current available apigee surface."""
# TODO(b/150206546): At GA launch, remove "alpha" here.
return self.Run("alpha apigee " + command)
class WithJSONBodyValidation(e2e_base.WithMockHttp):
"""Tests that can check the JSON contents of HTTP request bodies."""
def SetUp(self):
self._expected_json_bodies = {}
def AddHTTPResponse(self, url, *args, **kwargs):
if url not in self._expected_json_bodies:
self._expected_json_bodies[url] = []
if "expected_json_body" in kwargs:
self._expected_json_bodies[url].append(kwargs["expected_json_body"])
del kwargs["expected_json_body"]
else:
self._expected_json_bodies[url].append(None)
return super(WithJSONBodyValidation,
self).AddHTTPResponse(url, *args, **kwargs)
def _request(self, uri, *args, **kwargs):
if "?" in uri:
cut_uri = uri.split("?", 1)[0]
else:
cut_uri = uri
response = super(WithJSONBodyValidation,
self)._request(uri, *args, **kwargs)
self.assertIn(
cut_uri, self._expected_json_bodies,
"Unexpected request to %s. Only expected: %s" %
(uri, self._expected_json_bodies.keys()))
self.assertNotEqual(self._expected_json_bodies[cut_uri], [],
"Unexpected additional request to %s" % uri)
expected_json_body = self._expected_json_bodies[cut_uri].pop(0)
if expected_json_body is not None:
body = None
if "body" in kwargs:
body = kwargs["body"]
elif len(args) > 1:
body = args[1]
self.assertIsNotNone(body, "Expected a body for %s but saw none." % uri)
try:
actual_body = json.loads(body)
except (ValueError, TypeError) as e:
self.fail("Body is not valid JSON.\n%s" % e)
for mismatch in _JsonDataMismatches(expected_json_body, actual_body):
self.fail("Request body mismatch: %s for %s\nExpected: %s\nActual: %s" %
(mismatch.description, mismatch.path or "[root]",
yaml.dump(mismatch.expected), yaml.dump(mismatch.actual)))
return response
class ApigeeServiceAccountTest(ApigeeBaseTest, e2e_base.WithServiceAuth,
WithRunApigee):
"""End-to-end tests of `gcloud apigee` surface commands.
These tests run against the cloud-sdk-integration-testing Cloud Platform
project via a service account.
"""
class ApigeeIsolatedTest(e2e_base.WithMockHttp, ApigeeBaseTest,
sdk_test_base.WithFakeAuth):
"""Isolated tests of gcloud Apigee support."""
class ApigeeSurfaceTest(ApigeeIsolatedTest, WithRunApigee):
"""Isolated tests of `gcloud apigee` surface commands."""
| [
"[email protected]"
]
| |
f32820f7dc5afdb06d623413c51c3fa851319acd | ab5cdf8f2de94c327e4679da84f941b1f3c04db4 | /kubernetes/client/models/v1beta1_host_port_range.py | 580abcc558942c90a636d2190bec5623cb540642 | [
"Apache-2.0"
]
| permissive | diannaowa/client-python | a4a92a125178db26004eaef5062f9b1b581b49a8 | 5e268fb0b6f21a535a14a7f968b84ed4486f6774 | refs/heads/master | 2020-12-02T22:06:03.687696 | 2017-06-30T21:42:50 | 2017-06-30T21:42:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,825 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1HostPortRange(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, max=None, min=None):
"""
V1beta1HostPortRange - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'max': 'int',
'min': 'int'
}
self.attribute_map = {
'max': 'max',
'min': 'min'
}
self._max = max
self._min = min
@property
def max(self):
"""
Gets the max of this V1beta1HostPortRange.
max is the end of the range, inclusive.
:return: The max of this V1beta1HostPortRange.
:rtype: int
"""
return self._max
@max.setter
def max(self, max):
"""
Sets the max of this V1beta1HostPortRange.
max is the end of the range, inclusive.
:param max: The max of this V1beta1HostPortRange.
:type: int
"""
if max is None:
raise ValueError("Invalid value for `max`, must not be `None`")
self._max = max
@property
def min(self):
"""
Gets the min of this V1beta1HostPortRange.
min is the start of the range, inclusive.
:return: The min of this V1beta1HostPortRange.
:rtype: int
"""
return self._min
@min.setter
def min(self, min):
"""
Sets the min of this V1beta1HostPortRange.
min is the start of the range, inclusive.
:param min: The min of this V1beta1HostPortRange.
:type: int
"""
if min is None:
raise ValueError("Invalid value for `min`, must not be `None`")
self._min = min
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1HostPortRange):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
]
| |
f5f364ce6f2c28ed585f25e0559cd6f7cccfc170 | 4302fd10583ccff63ff5693bd2ae5903323cb769 | /curate/migrations/0005_remove_study_is_replication.py | b6ec819551ad0d855253b9dfe55fef304f4a0d25 | [
"MIT"
]
| permissive | ScienceCommons/curate_science | 1faf742c8de1e9c9180e4d8ec6a7457ad95bb705 | 4e4072e8c000df0d2e80637016f8f0e667f4df54 | refs/heads/master | 2022-02-12T19:56:51.730534 | 2022-01-25T16:44:54 | 2022-01-25T16:44:54 | 149,122,317 | 14 | 7 | MIT | 2021-03-23T17:27:05 | 2018-09-17T12:32:25 | HTML | UTF-8 | Python | false | false | 334 | py | # Generated by Django 2.1.1 on 2018-09-27 02:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('curate', '0004_auto_20180921_0646'),
]
operations = [
migrations.RemoveField(
model_name='study',
name='is_replication',
),
]
| [
"[email protected]"
]
| |
fd4b65d59d854cc6badda13d543369570401228a | f26937e8cd0b07589ba1cf6275596d97488cda7e | /scrapySpider/fenbu/fenbu/spiders/ff.py | aa080ce347819359cc930f96fdb95135e9cc23bc | []
| no_license | HezhouW/hive | 4aa46a045d22de121e2903075e74c3c9fd75ec1f | 3a7de0c18cbe0ec81e0b40c3217dd5b1a15cf464 | refs/heads/master | 2022-02-27T04:52:42.704501 | 2019-05-24T02:40:49 | 2019-05-24T02:40:49 | 123,524,369 | 1 | 0 | null | 2018-03-02T03:18:07 | 2018-03-02T03:18:07 | null | UTF-8 | Python | false | false | 308 | py | # -*- coding: utf-8 -*-
import scrapy
from scrapy_redis.spiders import RedisCrawlSpider
from fenbu.items import FenbuItem
import re
import redis
class MoviespiderSpider(RedisCrawlSpider):
name = 'ff'
redis_key = 'fenbuSpider:start_urls'
def parse(self , response):
print(response.text) | [
"[email protected]"
]
| |
e2383962ea844f757294461227b086b8a268e840 | 9fce5f629873ef5c43274fdae1d49a270ec78f38 | /venv/lib/python3.6/site-packages/pony/orm/tests/test_diagram_attribute.py | dab290d4f0f4b52a598af72e6653aa4b0f13fb16 | []
| no_license | makkar-nishant123/Pythonselenium | 2c9273d81915bc1f7724de93d7b87c76b5f9066b | 173f49e6522b80f13e6e406756130c0b1376a139 | refs/heads/master | 2021-06-12T09:30:24.097297 | 2021-04-22T02:48:56 | 2021-04-22T02:48:56 | 182,635,591 | 0 | 0 | null | 2021-04-22T02:52:08 | 2019-04-22T06:26:41 | Python | UTF-8 | Python | false | false | 27,721 | py | from __future__ import absolute_import, print_function, division
from pony.py23compat import PY2
from datetime import date
import unittest
from pony.orm.core import *
from pony.orm.core import Attribute
from pony.orm.tests.testutils import *
class TestAttribute(unittest.TestCase):
@raises_exception(TypeError, "Attribute Entity1.id has unknown option 'another_option'")
def test_attribute1(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, another_option=3)
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, 'Cannot link attribute Entity1.b to abstract Entity class. Use specific Entity subclass instead')
def test_attribute2(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
b = Required(db.Entity)
db.generate_mapping()
@raises_exception(TypeError, 'Default value for required attribute Entity1.b cannot be None')
def test_attribute3(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
b = Required(int, default=None)
def test_attribute4(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required('Entity2', reverse='attr2')
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Optional(Entity1)
db.generate_mapping(create_tables=True)
self.assertEqual(Entity1.attr1.reverse, Entity2.attr2)
def test_attribute5(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Optional(Entity1, reverse=Entity1.attr1)
self.assertEqual(Entity2.attr2.reverse, Entity1.attr1)
@raises_exception(TypeError, "Value of 'reverse' option must be name of reverse attribute). Got: 123")
def test_attribute6(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required('Entity2', reverse=123)
@raises_exception(TypeError, "Reverse option cannot be set for this type: %r" % str)
def test_attribute7(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required(str, reverse='attr1')
@raises_exception(TypeError, "'Attribute' is abstract type")
def test_attribute8(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Attribute(str)
@raises_exception(ERDiagramError, "Attribute name cannot both start and end with underscore. Got: _attr1_")
def test_attribute9(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
_attr1_ = Required(str)
@raises_exception(ERDiagramError, "Duplicate use of attribute Entity1.attr1 in entity Entity2")
def test_attribute10(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required(str)
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Entity1.attr1
@raises_exception(ERDiagramError, "Invalid use of attribute Entity1.a in entity Entity2")
def test_attribute11(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(str)
class Entity2(db.Entity):
b = Required(str)
composite_key(Entity1.a, b)
@raises_exception(ERDiagramError, "Cannot create default primary key attribute for Entity1 because name 'id' is already in use."
" Please create a PrimaryKey attribute for entity Entity1 or rename the 'id' attribute")
def test_attribute12(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = Optional(str)
@raises_exception(ERDiagramError, "Reverse attribute for Entity1.attr1 not found")
def test_attribute13(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Required('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
db.generate_mapping()
@raises_exception(ERDiagramError, "Reverse attribute Entity1.attr1 not found")
def test_attribute14(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Required(Entity1, reverse='attr1')
db.generate_mapping()
@raises_exception(ERDiagramError, "Inconsistent reverse attributes Entity3.attr3 and Entity2.attr2")
def test_attribute15(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Optional('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Required(Entity1)
class Entity3(db.Entity):
id = PrimaryKey(int)
attr3 = Required(Entity2, reverse='attr2')
db.generate_mapping()
@raises_exception(ERDiagramError, "Inconsistent reverse attributes Entity3.attr3 and Entity2.attr2")
def test_attribute16(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Optional('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Required(Entity1)
class Entity3(db.Entity):
id = PrimaryKey(int)
attr3 = Required(Entity2, reverse=Entity2.attr2)
db.generate_mapping()
@raises_exception(ERDiagramError, 'Reverse attribute for Entity2.attr2 not found')
def test_attribute18(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Required('Entity1')
db.generate_mapping()
@raises_exception(ERDiagramError, "Ambiguous reverse attribute for Entity1.a. Use the 'reverse' parameter for pointing to right attribute")
def test_attribute19(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
a = Required('Entity2')
b = Optional('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
c = Set(Entity1)
d = Set(Entity1)
db.generate_mapping()
@raises_exception(ERDiagramError, "Ambiguous reverse attribute for Entity1.c. Use the 'reverse' parameter for pointing to right attribute")
def test_attribute20(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
c = Set('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
a = Required(Entity1, reverse='c')
b = Optional(Entity1, reverse='c')
db.generate_mapping()
def test_attribute21(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
a = Required('Entity2', reverse='c')
b = Optional('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
c = Set(Entity1)
d = Set(Entity1)
def test_attribute22(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
a = Required('Entity2', reverse='c')
b = Optional('Entity2')
class Entity2(db.Entity):
id = PrimaryKey(int)
c = Set(Entity1, reverse='a')
d = Set(Entity1)
@raises_exception(ERDiagramError, 'Inconsistent reverse attributes Entity1.a and Entity2.b')
def test_attribute23(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required('Entity2', reverse='b')
class Entity2(db.Entity):
b = Optional('Entity3')
class Entity3(db.Entity):
c = Required('Entity2')
db.generate_mapping()
@raises_exception(ERDiagramError, 'Inconsistent reverse attributes Entity1.a and Entity2.c')
def test_attribute23(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required('Entity2', reverse='c')
b = Required('Entity2', reverse='d')
class Entity2(db.Entity):
c = Optional('Entity1', reverse='b')
d = Optional('Entity1', reverse='a')
db.generate_mapping()
def test_attribute24(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = PrimaryKey(str, auto=True)
db.generate_mapping(create_tables=True)
self.assertTrue('AUTOINCREMENT' not in db.schema.tables['Entity1'].get_create_command())
@raises_exception(TypeError, "Parameters 'column' and 'columns' cannot be specified simultaneously")
def test_columns1(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int)
attr1 = Optional("Entity2", column='a', columns=['b', 'c'])
class Entity2(db.Entity):
id = PrimaryKey(int)
attr2 = Optional(Entity1)
db.generate_mapping(create_tables=True)
def test_columns2(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, column='a')
self.assertEqual(Entity1.id.columns, ['a'])
def test_columns3(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, columns=['a'])
self.assertEqual(Entity1.id.column, 'a')
@raises_exception(MappingError, "Too many columns were specified for Entity1.id")
def test_columns5(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, columns=['a', 'b'])
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'columns' must be a list. Got: %r'" % {'a'})
def test_columns6(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, columns={'a'})
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'column' must be a string. Got: 4")
def test_columns7(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
id = PrimaryKey(int, column=4)
db.generate_mapping(create_tables=True)
def test_columns8(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int)
b = Required(int)
attr1 = Optional('Entity2')
PrimaryKey(a, b)
class Entity2(db.Entity):
attr2 = Required(Entity1, columns=['x', 'y'])
self.assertEqual(Entity2.attr2.column, None)
self.assertEqual(Entity2.attr2.columns, ['x', 'y'])
@raises_exception(MappingError, 'Invalid number of columns specified for Entity2.attr2')
def test_columns9(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int)
b = Required(int)
attr1 = Optional('Entity2')
PrimaryKey(a, b)
class Entity2(db.Entity):
attr2 = Required(Entity1, columns=['x', 'y', 'z'])
db.generate_mapping(create_tables=True)
@raises_exception(MappingError, 'Invalid number of columns specified for Entity2.attr2')
def test_columns10(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int)
b = Required(int)
attr1 = Optional('Entity2')
PrimaryKey(a, b)
class Entity2(db.Entity):
attr2 = Required(Entity1, column='x')
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Items of parameter 'columns' must be strings. Got: [1, 2]")
def test_columns11(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int)
b = Required(int)
attr1 = Optional('Entity2')
PrimaryKey(a, b)
class Entity2(db.Entity):
attr2 = Required(Entity1, columns=[1, 2])
def test_columns12(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_column='column2', reverse_columns=['column2'])
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameters 'reverse_column' and 'reverse_columns' cannot be specified simultaneously")
def test_columns13(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_column='column2', reverse_columns=['column3'])
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'reverse_column' must be a string. Got: 5")
def test_columns14(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_column=5)
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'reverse_columns' must be a list. Got: 'column3'")
def test_columns15(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_columns='column3')
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'reverse_columns' must be a list of strings. Got: [5]")
def test_columns16(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_columns=[5])
db.generate_mapping(create_tables=True)
def test_columns17(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', column='column1', reverse_columns=['column2'])
db.generate_mapping(create_tables=True)
def test_columns18(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', table='T1')
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Parameter 'table' must be a string. Got: 5")
def test_columns19(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', table=5)
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "Each part of table name must be a string. Got: 1")
def test_columns20(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
attr1 = Set('Entity1', reverse='attr1', table=[1, 'T1'])
db.generate_mapping(create_tables=True)
def test_columns_21(self):
db = Database('sqlite', ':memory:')
class Stat(db.Entity):
webinarshow = Optional('WebinarShow')
class WebinarShow(db.Entity):
stats = Required('Stat')
db.generate_mapping(create_tables=True)
self.assertEqual(Stat.webinarshow.column, None)
self.assertEqual(WebinarShow.stats.column, 'stats')
def test_columns_22(self):
db = Database('sqlite', ':memory:')
class ZStat(db.Entity):
webinarshow = Optional('WebinarShow')
class WebinarShow(db.Entity):
stats = Required('ZStat')
db.generate_mapping(create_tables=True)
self.assertEqual(ZStat.webinarshow.column, None)
self.assertEqual(WebinarShow.stats.column, 'stats')
def test_nullable1(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(unicode, unique=True)
db.generate_mapping(create_tables=True)
self.assertEqual(Entity1.a.nullable, True)
def test_nullable2(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(unicode, unique=True)
db.generate_mapping(create_tables=True)
with db_session:
Entity1()
commit()
Entity1()
commit()
def test_lambda_1(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(lambda: db.Entity2)
class Entity2(db.Entity):
b = Set(lambda: db.Entity1)
db.generate_mapping(create_tables=True)
self.assertEqual(Entity1.a.py_type, Entity2)
self.assertEqual(Entity2.b.py_type, Entity1)
@raises_exception(TypeError, "Invalid type of attribute Entity1.a: expected entity class, got 'Entity2'")
def test_lambda_2(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(lambda: 'Entity2')
class Entity2(db.Entity):
b = Set(lambda: db.Entity1)
db.generate_mapping(create_tables=True)
@raises_exception(ERDiagramError, 'Interrelated entities must belong to same database. '
'Entities Entity1 and Entity2 belongs to different databases')
def test_lambda_3(self):
db1 = Database('sqlite', ':memory:')
class Entity1(db1.Entity):
a = Required(lambda: db2.Entity2)
db2 = Database('sqlite', ':memory:')
class Entity2(db2.Entity):
b = Set(lambda: db1.Entity1)
db1.generate_mapping(create_tables=True)
@raises_exception(ValueError, 'Check for attribute Entity1.a failed. Value: 1')
def test_py_check_1(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int, py_check=lambda val: val > 5 and val < 10)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=1)
def test_py_check_2(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(int, py_check=lambda val: val > 5 and val < 10)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=7)
def test_py_check_3(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(date, py_check=lambda val: val.year >= 2000)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=None)
@raises_exception(ValueError, 'Check for attribute Entity1.a failed. Value: datetime.date(1999, 1, 1)')
def test_py_check_4(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(date, py_check=lambda val: val.year >= 2000)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=date(1999, 1, 1))
def test_py_check_5(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(date, py_check=lambda val: val.year >= 2000)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=date(2010, 1, 1))
@raises_exception(ValueError, 'Should be positive number')
def test_py_check_6(self):
def positive_number(val):
if val <= 0: raise ValueError('Should be positive number')
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(int, py_check=positive_number)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=-1)
def test_py_check_7(self):
def positive_number(val):
if val <= 0: raise ValueError('Should be positive number')
return True
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Optional(int, py_check=positive_number)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a=1)
@raises_exception(NotImplementedError, "'py_check' parameter is not supported for collection attributes")
def test_py_check_8(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required('Entity2')
class Entity2(db.Entity):
a = Set('Entity1', py_check=lambda val: True)
db.generate_mapping(create_tables=True)
def test_py_check_truncate(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(str, py_check=lambda val: False)
db.generate_mapping(create_tables=True)
with db_session:
try:
obj = Entity1(a='1234567890' * 1000)
except ValueError as e:
error_message = "Check for attribute Entity1.a failed. Value: " + (
"u'12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345..." if PY2
else "'123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456...")
self.assertEqual(str(e), error_message)
else:
self.assert_(False)
@raises_exception(ValueError, 'Value for attribute Entity1.a is too long. Max length is 10, value length is 10000')
def test_str_max_len(self):
db = Database('sqlite', ':memory:')
class Entity1(db.Entity):
a = Required(str, 10)
db.generate_mapping(create_tables=True)
with db_session:
obj = Entity1(a='1234567890' * 1000)
def test_foreign_key_sql_type_1(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
id = PrimaryKey(unicode, sql_type='SOME_TYPE')
bars = Set('Bar')
class Bar(db.Entity):
foo = Required(Foo)
db.generate_mapping(create_tables=True)
table = db.schema.tables.get(Bar._table_)
sql_type = table.column_list[1].sql_type
self.assertEqual(sql_type, 'SOME_TYPE')
def test_foreign_key_sql_type_2(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
id = PrimaryKey(unicode, sql_type='SOME_TYPE')
bars = Set('Bar')
class Bar(db.Entity):
foo = Required(Foo, sql_type='ANOTHER_TYPE')
db.generate_mapping(create_tables=True)
table = db.schema.tables.get(Bar._table_)
sql_type = table.column_list[1].sql_type
self.assertEqual(sql_type, 'ANOTHER_TYPE')
def test_foreign_key_sql_type_3(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
id = PrimaryKey(unicode, sql_type='SERIAL')
bars = Set('Bar')
class Bar(db.Entity):
foo = Required(Foo, sql_type='ANOTHER_TYPE')
db.generate_mapping(create_tables=True)
table = db.schema.tables.get(Bar._table_)
sql_type = table.column_list[1].sql_type
self.assertEqual(sql_type, 'ANOTHER_TYPE')
def test_foreign_key_sql_type_4(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
id = PrimaryKey(unicode, sql_type='SERIAL')
bars = Set('Bar')
class Bar(db.Entity):
foo = Required(Foo)
db.generate_mapping(create_tables=True)
table = db.schema.tables.get(Bar._table_)
sql_type = table.column_list[1].sql_type
self.assertEqual(sql_type, 'INTEGER')
def test_foreign_key_sql_type_5(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
id = PrimaryKey(unicode, sql_type='serial')
bars = Set('Bar')
class Bar(db.Entity):
foo = Required(Foo)
db.generate_mapping(create_tables=True)
table = db.schema.tables.get(Bar._table_)
sql_type = table.column_list[1].sql_type
self.assertEqual(sql_type, 'integer')
def test_self_referenced_m2m_1(self):
db = Database('sqlite', ':memory:')
class Node(db.Entity):
id = PrimaryKey(int)
prev_nodes = Set("Node")
next_nodes = Set("Node")
db.generate_mapping(create_tables=True)
def test_implicit_1(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
name = Required(str)
bar = Required("Bar")
class Bar(db.Entity):
id = PrimaryKey(int)
name = Optional(str)
foos = Set("Foo")
db.generate_mapping(create_tables=True)
self.assertTrue(Foo.id.is_implicit)
self.assertFalse(Foo.name.is_implicit)
self.assertFalse(Foo.bar.is_implicit)
self.assertFalse(Bar.id.is_implicit)
self.assertFalse(Bar.name.is_implicit)
self.assertFalse(Bar.foos.is_implicit)
def test_implicit_2(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
x = Required(str)
class Bar(Foo):
y = Required(str)
db.generate_mapping(create_tables=True)
self.assertTrue(Foo.id.is_implicit)
self.assertTrue(Foo.classtype.is_implicit)
self.assertFalse(Foo.x.is_implicit)
self.assertTrue(Bar.id.is_implicit)
self.assertTrue(Bar.classtype.is_implicit)
self.assertFalse(Bar.x.is_implicit)
self.assertFalse(Bar.y.is_implicit)
@raises_exception(TypeError, 'Attribute Foo.x has invalid type NoneType')
def test_none_type(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
x = Required(type(None))
db.generate_mapping(create_tables=True)
@raises_exception(TypeError, "'sql_default' option value cannot be empty string, "
"because it should be valid SQL literal or expression. "
"Try to use \"''\", or just specify default='' instead.")
def test_none_type(self):
db = Database('sqlite', ':memory:')
class Foo(db.Entity):
x = Required(str, sql_default='')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
feb0de67b1279c42a7f67cadd8c3b7566685ac4c | 2a28a94fc8eb08961e76c61ab73889135153502b | /asposecellscloud/requests/delete_horizontal_page_break_request.py | b8b9619e62e7eae4af87a1eeb338ddd0627b0f6f | [
"MIT"
]
| permissive | aspose-cells-cloud/aspose-cells-cloud-python | 45fc7e686b442302a29a8223e7dbddb71950438c | 270d70ce7f8f3f2ecd9370b1dacfc4789293097e | refs/heads/master | 2023-09-04T01:29:44.242037 | 2023-08-23T13:13:30 | 2023-08-23T13:13:30 | 123,092,364 | 6 | 5 | null | null | null | null | UTF-8 | Python | false | false | 4,339 | py | # coding: utf-8
"""
<copyright company="Aspose" file="DeleteHorizontalPageBreakRequest.cs">
Copyright (c) 2023 Aspose.Cells Cloud
</copyright>
<summary>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
</summary>
"""
import json
from six import iteritems
from asposecellscloud import *
from asposecellscloud.models import *
from asposecellscloud.requests import *
from six.moves.urllib.parse import quote
class DeleteHorizontalPageBreakRequest(object):
def __init__(self , name ,sheet_name ,index ,folder =None ,storage_name =None ):
self.name = name
self.sheet_name = sheet_name
self.index = index
self.folder = folder
self.storage_name = storage_name
def create_http_request(self, api_client):
# verify the required parameter 'name' is set
if self.name is None:
raise ValueError("Missing the required parameter `name` when calling `delete_horizontal_page_break`")
# verify the required parameter 'sheet_name' is set
if self.sheet_name is None:
raise ValueError("Missing the required parameter `sheet_name` when calling `delete_horizontal_page_break`")
# verify the required parameter 'index' is set
if self.index is None:
raise ValueError("Missing the required parameter `index` when calling `delete_horizontal_page_break`")
collection_formats = {}
path_params = {}
if self.name is not None:
path_params['name'] = self.name
if self.sheet_name is not None:
path_params['sheetName'] = self.sheet_name
if self.index is not None:
path_params['index'] = self.index
query_params = []
if self.folder is not None:
query_params.append(('folder',self.folder ))
if self.storage_name is not None:
query_params.append(('storageName',self.storage_name ))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
resource_path = "/cells/{name}/worksheets/{sheetName}/horizontalpagebreaks/{index}"
# path parameters
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace('{%s}' % k, quote(str(v), safe='/'))
return {
"method": "DELETE",
"path":resource_path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"files":local_var_files,
"auth_settings":auth_settings,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'CellsCloudResponse'
}
| [
"[email protected]"
]
| |
da0594e90c26aaae98ad9da7493093c03cf47b0b | 0a11a15cf64e25585d28f484bb2118e8f858cfeb | /알고리즘/알고리즘문제/4869_종이붙이기.py | 288c845e4383a2dd1780008c74d376f54243f413 | []
| no_license | seoul-ssafy-class-2-studyclub/GaYoung_SSAFY | 7d9a44afd0dff13fe2ba21f76d0d99c082972116 | 23e0b491d95ffd9c7a74b7f3f74436fe71ed987d | refs/heads/master | 2021-06-30T09:09:00.646827 | 2020-11-30T14:09:03 | 2020-11-30T14:09:03 | 197,476,649 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | def paper(N):
if N == 10:
return 1
if (N // 10) % 2 == 1: # 홀수
return paper(N - 10) * 2 - 1
else:
return paper(N - 10) * 2 + 1
for t in range(int(input())):
N = int(input())
print('#{} {}'.format(t+1, paper(N)))
| [
"[email protected]"
]
| |
11f3ef974d5abdbfdc9ca0f8a54234575470961f | fe2ce1b060ff4e9533385095ad13108f2b06d23e | /vnpy/trader/vtEngine.py | 8d827555716587e0126e1b86a850f41d3d2ded48 | []
| no_license | ukamoy/trader-monitor | 70c95eca465065c299337e5c6e29b3d9eb31e9a5 | 6c222db9a63661b864d504b54387ce7b77aaea0c | refs/heads/master | 2020-04-01T09:23:24.661434 | 2018-12-04T07:38:41 | 2018-12-04T07:38:41 | 153,072,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45,045 | py | # encoding: UTF-8
import os
import shelve
import logging
from logging import handlers
from collections import OrderedDict
from datetime import datetime
from copy import copy
# from pymongo import MongoClient, ASCENDING
# from pymongo.errors import ConnectionFailure
from vnpy.event import Event
from vnpy.trader.vtGlobal import globalSetting
from vnpy.trader.vtEvent import *
from vnpy.trader.vtGateway import *
from vnpy.trader.language import text
from vnpy.trader.vtFunction import getTempPath
########################################################################
class MainEngine(object):
"""主引擎"""
#----------------------------------------------------------------------
def __init__(self, eventEngine):
"""Constructor"""
# 记录今日日期
self.todayDate = datetime.now().strftime('%Y%m%d')
# 绑定事件引擎
self.eventEngine = eventEngine
self.eventEngine.start()
# 创建数据引擎
self.dataEngine = DataEngine(self.eventEngine)
# MongoDB数据库相关
# self.dbClient = None # MongoDB客户端对象
# 接口实例
self.gatewayDict = OrderedDict()
self.gatewayDetailList = []
# 应用模块实例
self.appDict = OrderedDict()
self.appDetailList = []
# 风控引擎实例(特殊独立对象)
self.rmEngine = None
# 日志引擎实例
self.logEngine = None
self.initLogEngine()
#----------------------------------------------------------------------
def addGateway(self, gatewayModule):
"""添加底层接口"""
gatewayName = gatewayModule.gatewayName
gatewayTypeMap = {}
# 创建接口实例
if type(gatewayName) == list:
for i in range(len(gatewayName)):
self.gatewayDict[gatewayName[i]] = gatewayModule.gatewayClass(self.eventEngine,
gatewayName[i])
# 设置接口轮询
if gatewayModule.gatewayQryEnabled:
self.gatewayDict[gatewayName[i]].setQryEnabled(gatewayModule.gatewayQryEnabled)
# 保存接口详细信息
d = {
'gatewayName': gatewayModule.gatewayName[i],
'gatewayDisplayName': gatewayModule.gatewayDisplayName[i],
'gatewayType': gatewayModule.gatewayType
}
self.gatewayDetailList.append(d)
else:
self.gatewayDict[gatewayName] = gatewayModule.gatewayClass(self.eventEngine,
gatewayName)
# 设置接口轮询
if gatewayModule.gatewayQryEnabled:
self.gatewayDict[gatewayName].setQryEnabled(gatewayModule.gatewayQryEnabled)
# 保存接口详细信息
d = {
'gatewayName': gatewayModule.gatewayName,
'gatewayDisplayName': gatewayModule.gatewayDisplayName,
'gatewayType': gatewayModule.gatewayType
}
self.gatewayDetailList.append(d)
for i in range(len(self.gatewayDetailList)):
s = self.gatewayDetailList[i]['gatewayName'].split('_connect.json')[0]
gatewayTypeMap[s]=self.gatewayDetailList[i]['gatewayType']
path = os.getcwd()
# 遍历当前目录下的所有文件
for root, subdirs, files in os.walk(path):
for name in files:
# 只有文件名中包含_connect.json的文件,才是密钥配置文件
if '_connect.json' in name:
gw = name.replace('_connect.json', '')
if not gw in gatewayTypeMap.keys():
for existnames in list(gatewayTypeMap.keys()):
if existnames in gw and existnames!=gw:
d= {
'gatewayName' : gw,
'gatewayDisplayName' : gw,
'gatewayType': gatewayTypeMap[existnames]
}
self.gatewayDetailList.append(d)
self.gatewayDict[gw] = gatewayModule.gatewayClass(self.eventEngine,
gw)
#----------------------------------------------------------------------
def addApp(self, appModule):
"""添加上层应用"""
appName = appModule.appName
# 创建应用实例
self.appDict[appName] = appModule.appEngine(self, self.eventEngine)
# 将应用引擎实例添加到主引擎的属性中
self.__dict__[appName] = self.appDict[appName]
# 保存应用信息
d = {
'appName': appModule.appName,
'appDisplayName': appModule.appDisplayName,
'appWidget': appModule.appWidget,
'appIco': appModule.appIco
}
self.appDetailList.append(d)
#----------------------------------------------------------------------
def getGateway(self, gatewayName):
"""获取接口"""
if gatewayName in self.gatewayDict:
return self.gatewayDict[gatewayName]
else:
self.writeLog(text.GATEWAY_NOT_EXIST.format(gateway=gatewayName))
self.writeLog(gatewayName)
return None
#----------------------------------------------------------------------
def connect(self, gatewayName):
"""连接特定名称的接口"""
gateway = self.getGateway(gatewayName)
if gateway:
gateway.connect()
# 接口连接后自动执行数据库连接的任务
# self.dbConnect()
#----------------------------------------------------------------------
def subscribe(self, subscribeReq, gatewayName):
"""订阅特定接口的行情"""
gateway = self.getGateway(gatewayName)
if gateway:
gateway.subscribe(subscribeReq)
#----------------------------------------------------------------------
def sendOrder(self, orderReq, gatewayName):
"""对特定接口发单"""
# 如果创建了风控引擎,且风控检查失败则不发单
if self.rmEngine and not self.rmEngine.checkRisk(orderReq, gatewayName):
return ''
gateway = self.getGateway(gatewayName)
if gateway:
vtOrderID = gateway.sendOrder(orderReq)
self.dataEngine.updateOrderReq(orderReq, vtOrderID) # 更新发出的委托请求到数据引擎中
return vtOrderID
else:
return ''
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq, gatewayName):
"""对特定接口撤单"""
gateway = self.getGateway(gatewayName)
if gateway:
gateway.cancelOrder(cancelOrderReq)
def batchCancelOrder(self,cancelOrderReqList,gatewayName):
gateway = self.getGateway(gatewayName)
if gateway:
gateway.batchCancelOrder(cancelOrderReqList)
#----------------------------------------------------------------------
def qryAccount(self, gatewayName):
"""查询特定接口的账户"""
gateway = self.getGateway(gatewayName)
if gateway:
gateway.qryAccount()
#----------------------------------------------------------------------
def qryPosition(self, gatewayName):
"""查询特定接口的持仓"""
gateway = self.getGateway(gatewayName)
if gateway:
gateway.qryPosition()
#------------------------------------------------
def initPosition(self, vtSymbol):
"""策略初始化时查询特定接口的持仓"""
contract = self.getContract(vtSymbol)
gatewayName = contract.gatewayName
gateway = self.getGateway(gatewayName)
if gateway:
gateway.initPosition(vtSymbol)
def loadHistoryBar(self,vtSymbol,type_,size = None, since = None):
"""策略初始化时下载历史数据"""
contract = self.getContract(vtSymbol)
gatewayName = contract.gatewayName
gateway = self.getGateway(gatewayName)
if gateway:
data = gateway.loadHistoryBar(vtSymbol,type_,size,since)
return data
def qryAllOrders(self, vtSymbol,orderId,status=None):
contract = self.getContract(vtSymbol)
gatewayName = contract.gatewayName
gateway = self.getGateway(gatewayName)
if gateway:
gateway.qryAllOrders(vtSymbol,orderId,status)
#----------------------------------------------------------------------
def exit(self):
"""退出程序前调用,保证正常退出"""
# 安全关闭所有接口
for gateway in list(self.gatewayDict.values()):
gateway.close()
# 停止事件引擎
self.eventEngine.stop()
# 停止上层应用引擎
for appEngine in list(self.appDict.values()):
appEngine.stop()
# 保存数据引擎里的合约数据到硬盘
self.dataEngine.saveContracts()
#----------------------------------------------------------------------
def writeLog(self, content):
"""快速发出日志事件"""
log = VtLogData()
log.logContent = content
log.gatewayName = 'MAIN_ENGINE'
event = Event(type_=EVENT_LOG)
event.dict_['data'] = log
self.eventEngine.put(event)
#----------------------------------------------------------------------
# def dbConnect(self):
# """连接MongoDB数据库"""
# if not self.dbClient:
# # 读取MongoDB的设置
# try:
# # 设置MongoDB操作的超时时间为0.5秒
# self.dbClient = MongoClient(globalSetting['mongoHost'], globalSetting['mongoPort'], connectTimeoutMS=500)
# # 调用server_info查询服务器状态,防止服务器异常并未连接成功
# self.dbClient.server_info()
# self.writeLog(text.DATABASE_CONNECTING_COMPLETED)
# # 如果启动日志记录,则注册日志事件监听函数
# if globalSetting['mongoLogging']:
# self.eventEngine.register(EVENT_LOG, self.dbLogging)
# except ConnectionFailure:
# self.writeLog(text.DATABASE_CONNECTING_FAILED)
# #----------------------------------------------------------------------
# def dbInsert(self, dbName, collectionName, d):
# """向MongoDB中插入数据,d是具体数据"""
# if self.dbClient:
# db = self.dbClient[dbName]
# collection = db[collectionName]
# collection.insert_one(d)
# else:
# self.writeLog(text.DATA_INSERT_FAILED)
# #----------------------------------------------------------------------
# def dbQuery(self, dbName, collectionName, d, sortKey='', sortDirection=ASCENDING):
# """从MongoDB中读取数据,d是查询要求,返回的是数据库查询的指针"""
# if self.dbClient:
# db = self.dbClient[dbName]
# collection = db[collectionName]
# if sortKey:
# cursor = collection.find(d).sort(sortKey, sortDirection) # 对查询出来的数据进行排序
# else:
# cursor = collection.find(d)
# if cursor:
# return list(cursor)
# else:
# return []
# else:
# self.writeLog(text.DATA_QUERY_FAILED)
# return []
# #----------------------------------------------------------------------
# def dbUpdate(self, dbName, collectionName, d, flt, upsert=False):
# """向MongoDB中更新数据,d是具体数据,flt是过滤条件,upsert代表若无是否要插入"""
# if self.dbClient:
# db = self.dbClient[dbName]
# collection = db[collectionName]
# collection.replace_one(flt, d, upsert)
# else:
# self.writeLog(text.DATA_UPDATE_FAILED)
# #----------------------------------------------------------------------
# def dbDelete(self, dbName, collectionName, flt):
# """从数据库中删除数据,flt是过滤条件"""
# if self.dbClient:
# db = self.dbClient[dbName]
# collection = db[collectionName]
# collection.delete_one(flt)
# print("c=============shanchu================")
# else:
# self.writeLog(text.DATA_DELETE_FAILED)
# #----------------------------------------------------------------------
# def dbLogging(self, event):
# """向MongoDB中插入日志"""
# log = event.dict_['data']
# d = {
# 'content': log.logContent,
# 'time': log.logTime,
# 'gateway': log.gatewayName
# }
# self.dbInsert(LOG_DB_NAME, self.todayDate, d)
#----------------------------------------------------------------------
def getContract(self, vtSymbol):
"""查询合约"""
return self.dataEngine.getContract(vtSymbol)
#----------------------------------------------------------------------
def getAllContracts(self):
"""查询所有合约(返回列表)"""
return self.dataEngine.getAllContracts()
#----------------------------------------------------------------------
def getOrder(self, vtOrderID):
"""查询委托"""
return self.dataEngine.getOrder(vtOrderID)
#----------------------------------------------------------------------
def getAllWorkingOrders(self):
"""查询所有的活跃的委托(返回列表)"""
return self.dataEngine.getAllWorkingOrders()
#----------------------------------------------------------------------
def getAllOrders(self):
"""查询所有委托"""
return self.dataEngine.getAllOrders()
#----------------------------------------------------------------------
def getAllTrades(self):
"""查询所有成交"""
return self.dataEngine.getAllTrades()
#----------------------------------------------------------------------
def getAllAccounts(self):
"""查询所有账户"""
return self.dataEngine.getAllAccounts()
def getAllPositions(self):
"""查询所有持仓"""
return self.dataEngine.getAllPositions()
#----------------------------------------------------------------------
def getAllPositionDetails(self):
"""查询本地持仓缓存细节"""
return self.dataEngine.getAllPositionDetails()
#----------------------------------------------------------------------
def getAllGatewayDetails(self):
"""查询引擎中所有底层接口的信息"""
return self.gatewayDetailList
#----------------------------------------------------------------------
def getAllAppDetails(self):
"""查询引擎中所有上层应用的信息"""
return self.appDetailList
#----------------------------------------------------------------------
def getApp(self, appName):
"""获取APP引擎对象"""
return self.appDict[appName]
#----------------------------------------------------------------------
def initLogEngine(self):
"""初始化日志引擎"""
if not globalSetting["logActive"]:
return
# 创建引擎
self.logEngine = LogEngine()
# 设置日志级别
levelDict = {
"debug": LogEngine.LEVEL_DEBUG,
"info": LogEngine.LEVEL_INFO,
"warn": LogEngine.LEVEL_WARN,
"error": LogEngine.LEVEL_ERROR,
"critical": LogEngine.LEVEL_CRITICAL,
}
level = levelDict.get(globalSetting["logLevel"], LogEngine.LEVEL_CRITICAL)
self.logEngine.setLogLevel(level)
# 设置输出
if globalSetting['logConsole']:
self.logEngine.addConsoleHandler()
if globalSetting['logFile']:
self.logEngine.addFileHandler()
# 注册事件监听
self.registerLogEvent(EVENT_LOG)
#----------------------------------------------------------------------
def registerLogEvent(self, eventType):
"""注册日志事件监听"""
if self.logEngine:
self.eventEngine.register(eventType, self.logEngine.processLogEvent)
#----------------------------------------------------------------------
def convertOrderReq(self, req):
"""转换委托请求"""
return self.dataEngine.convertOrderReq(req)
#----------------------------------------------------------------------
def getLog(self):
"""查询日志"""
return self.dataEngine.getLog()
#----------------------------------------------------------------------
def getError(self):
"""查询错误"""
return self.dataEngine.getError()
########################################################################
class DataEngine(object):
"""数据引擎"""
contractFileName = 'ContractData.vt'
contractFilePath = getTempPath(contractFileName)
FINISHED_STATUS = [STATUS_ALLTRADED, STATUS_REJECTED, STATUS_CANCELLED]
#----------------------------------------------------------------------
def __init__(self, eventEngine):
"""Constructor"""
self.eventEngine = eventEngine
# 保存数据的字典和列表
self.tickDict = {}
self.contractDict = {}
self.orderDict = {}
self.workingOrderDict = {} # 可撤销委托
self.tradeDict = {}
self.accountDict = {}
self.positionDict= {}
self.logList = []
self.errorList = []
# 持仓细节相关
self.detailDict = {} # vtSymbol:PositionDetail
self.tdPenaltyList = globalSetting['tdPenalty'] # 平今手续费惩罚的产品代码列表
# 读取保存在硬盘的合约数据
self.loadContracts()
# 注册事件监听
self.registerEvent()
#----------------------------------------------------------------------
def registerEvent(self):
"""注册事件监听"""
self.eventEngine.register(EVENT_TICK, self.processTickEvent)
self.eventEngine.register(EVENT_CONTRACT, self.processContractEvent)
self.eventEngine.register(EVENT_ORDER, self.processOrderEvent)
self.eventEngine.register(EVENT_TRADE, self.processTradeEvent)
self.eventEngine.register(EVENT_POSITION, self.processPositionEvent)
self.eventEngine.register(EVENT_ACCOUNT, self.processAccountEvent)
self.eventEngine.register(EVENT_LOG, self.processLogEvent)
self.eventEngine.register(EVENT_ERROR, self.processErrorEvent)
#----------------------------------------------------------------------
def processTickEvent(self, event):
"""处理成交事件"""
tick = event.dict_['data']
self.tickDict[tick.vtSymbol] = tick
#----------------------------------------------------------------------
def processContractEvent(self, event):
"""处理合约事件"""
contract = event.dict_['data']
self.contractDict[contract.vtSymbol] = contract
self.contractDict[contract.symbol] = contract # 使用常规代码(不包括交易所)可能导致重复
#----------------------------------------------------------------------
def processOrderEvent(self, event):
"""处理委托事件"""
order = event.dict_['data']
self.orderDict[order.vtOrderID] = order
# 如果订单的状态是全部成交或者撤销,则需要从workingOrderDict中移除
if order.status in self.FINISHED_STATUS:
if order.vtOrderID in self.workingOrderDict:
del self.workingOrderDict[order.vtOrderID]
# 否则则更新字典中的数据
else:
self.workingOrderDict[order.vtOrderID] = order
# 更新到持仓细节中
detail = self.getPositionDetail(order.vtSymbol)
detail.updateOrder(order)
#----------------------------------------------------------------------
def processTradeEvent(self, event):
"""处理成交事件"""
trade = event.dict_['data']
self.tradeDict[trade.vtTradeID] = trade
# 更新到持仓细节中
detail = self.getPositionDetail(trade.vtSymbol)
detail.updateTrade(trade)
#----------------------------------------------------------------------
def processPositionEvent(self, event):
"""处理持仓事件"""
pos = event.dict_['data']
self.positionDict[pos.vtPositionName] = pos
# 更新到持仓细节中
detail = self.getPositionDetail(pos.vtSymbol)
detail.updatePosition(pos)
#----------------------------------------------------------------------
def processAccountEvent(self, event):
"""处理账户事件"""
account = event.dict_['data']
self.accountDict[account.vtAccountID] = account
#----------------------------------------------------------------------
def processLogEvent(self, event):
"""处理日志事件"""
log = event.dict_['data']
self.logList.append(log)
#----------------------------------------------------------------------
def processErrorEvent(self, event):
"""处理错误事件"""
error = event.dict_['data']
self.errorList.append(error)
#----------------------------------------------------------------------
def getTick(self, vtSymbol):
"""查询行情对象"""
try:
return self.tickDict[vtSymbol]
except KeyError:
return None
#----------------------------------------------------------------------
def getContract(self, vtSymbol):
"""查询合约对象"""
try:
return self.contractDict[vtSymbol]
except KeyError:
return None
#----------------------------------------------------------------------
def getAllContracts(self):
"""查询所有合约对象(返回列表)"""
return self.contractDict.values()
#----------------------------------------------------------------------
def saveContracts(self):
"""保存所有合约对象到硬盘"""
f = shelve.open(self.contractFilePath)
f['data'] = self.contractDict
f.close()
#----------------------------------------------------------------------
def loadContracts(self):
"""从硬盘读取合约对象"""
f = shelve.open(self.contractFilePath)
if 'data' in f:
d = f['data']
for key, value in d.items():
self.contractDict[key] = value
f.close()
#----------------------------------------------------------------------
def getOrder(self, vtOrderID):
"""查询委托"""
try:
return self.orderDict[vtOrderID]
except KeyError:
return None
#----------------------------------------------------------------------
def getAllWorkingOrders(self):
"""查询所有活动委托(返回列表)"""
return self.workingOrderDict.values()
#----------------------------------------------------------------------
def getAllOrders(self):
"""获取所有委托"""
return self.orderDict.values()
#----------------------------------------------------------------------
def getAllTrades(self):
"""获取所有成交"""
return self.tradeDict.values()
#----------------------------------------------------------------------
def getAllPositions(self):
"""获取所有持仓"""
return self.positionDict.values()
#----------------------------------------------------------------------
def getAllAccounts(self):
"""获取所有资金"""
return self.accountDict.values()
#----------------------------------------------------------------------
def getPositionDetail(self, vtSymbol):
"""查询持仓细节"""
if vtSymbol in self.detailDict:
detail = self.detailDict[vtSymbol]
else:
contract = self.getContract(vtSymbol)
detail = PositionDetail(vtSymbol, contract)
self.detailDict[vtSymbol] = detail
# 设置持仓细节的委托转换模式
contract = self.getContract(vtSymbol)
if contract:
detail.exchange = contract.exchange
# 上期所合约
if contract.exchange == EXCHANGE_SHFE:
detail.mode = detail.MODE_SHFE
# 检查是否有平今惩罚
for productID in self.tdPenaltyList:
if str(productID) in contract.symbol:
detail.mode = detail.MODE_TDPENALTY
return detail
#----------------------------------------------------------------------
def getAllPositionDetails(self):
"""查询所有本地持仓缓存细节"""
return self.detailDict.values()
#----------------------------------------------------------------------
def updateOrderReq(self, req, vtOrderID):
"""委托请求更新"""
vtSymbol = req.vtSymbol
detail = self.getPositionDetail(vtSymbol)
detail.updateOrderReq(req, vtOrderID)
#----------------------------------------------------------------------
def convertOrderReq(self, req):
"""根据规则转换委托请求"""
detail = self.detailDict.get(req.vtSymbol, None)
if not detail:
return [req]
else:
return detail.convertOrderReq(req)
#----------------------------------------------------------------------
def getLog(self):
"""获取日志"""
return self.logList
#----------------------------------------------------------------------
def getError(self):
"""获取错误"""
return self.errorList
########################################################################
class LogEngine(object):
"""日志引擎"""
# 日志级别
LEVEL_DEBUG = logging.DEBUG
LEVEL_INFO = logging.INFO
LEVEL_WARN = logging.WARN
LEVEL_ERROR = logging.ERROR
LEVEL_CRITICAL = logging.CRITICAL
# 单例对象
instance = None
#----------------------------------------------------------------------
def __new__(cls, *args, **kwargs):
"""创建对象,保证单例"""
if not cls.instance:
cls.instance = super(LogEngine, cls).__new__(cls, *args, **kwargs)
return cls.instance
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.logger = logging.getLogger()
self.formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
self.level = self.LEVEL_CRITICAL
self.consoleHandler = None
self.fileHandler = None
# 添加NullHandler防止无handler的错误输出
nullHandler = logging.NullHandler()
self.logger.addHandler(nullHandler)
# 日志级别函数映射
self.levelFunctionDict = {
self.LEVEL_DEBUG: self.debug,
self.LEVEL_INFO: self.info,
self.LEVEL_WARN: self.warn,
self.LEVEL_ERROR: self.error,
self.LEVEL_CRITICAL: self.critical,
}
#----------------------------------------------------------------------
def setLogLevel(self, level):
"""设置日志级别"""
self.logger.setLevel(level)
self.level = level
#----------------------------------------------------------------------
def addConsoleHandler(self):
"""添加终端输出"""
if not self.consoleHandler:
self.consoleHandler = logging.StreamHandler()
self.consoleHandler.setLevel(self.level)
self.consoleHandler.setFormatter(self.formatter)
self.logger.addHandler(self.consoleHandler)
#----------------------------------------------------------------------
def addFileHandler(self):
"""添加文件输出"""
if not self.fileHandler:
filename = 'vt_' + datetime.now().strftime('%Y%m%d') + '.log'
filepath = getTempPath(filename)
# self.fileHandler = logging.FileHandler(filepath) # 引擎原有的handler
# 限制日志文件大小为20M,一天最多 400 MB
self.fileHandler = logging.handlers.RotatingFileHandler(
filepath,maxBytes = 20971520,backupCount = 20)
self.fileHandler.setLevel(self.level)
self.fileHandler.setFormatter(self.formatter)
self.logger.addHandler(self.fileHandler)
#----------------------------------------------------------------------
def debug(self, msg):
"""开发时用"""
self.logger.debug(msg)
#----------------------------------------------------------------------
def info(self, msg):
"""正常输出"""
self.logger.info(msg)
#----------------------------------------------------------------------
def warn(self, msg):
"""警告信息"""
self.logger.warn(msg)
#----------------------------------------------------------------------
def error(self, msg):
"""报错输出"""
self.logger.error(msg)
#----------------------------------------------------------------------
def exception(self, msg):
"""报错输出+记录异常信息"""
self.logger.exception(msg)
#----------------------------------------------------------------------
def critical(self, msg):
"""影响程序运行的严重错误"""
self.logger.critical(msg)
#----------------------------------------------------------------------
def processLogEvent(self, event):
"""处理日志事件"""
log = event.dict_['data']
function = self.levelFunctionDict[log.logLevel] # 获取日志级别对应的处理函数
msg = '\t'.join([log.gatewayName, log.logContent])
function(msg)
########################################################################
class PositionDetail(object):
"""本地维护的持仓信息"""
WORKING_STATUS = [STATUS_UNKNOWN, STATUS_NOTTRADED, STATUS_PARTTRADED]
MODE_NORMAL = 'normal' # 普通模式
MODE_SHFE = 'shfe' # 上期所今昨分别平仓
MODE_TDPENALTY = 'tdpenalty' # 平今惩罚
#----------------------------------------------------------------------
def __init__(self, vtSymbol, contract=None):
"""Constructor"""
self.vtSymbol = vtSymbol
self.symbol = EMPTY_STRING
self.exchange = EMPTY_STRING
self.name = EMPTY_UNICODE
self.size = 1
if contract:
self.symbol = contract.symbol
self.exchange = contract.exchange
self.name = contract.name
self.size = contract.size
self.longPos = EMPTY_INT
self.longYd = EMPTY_INT
self.longTd = EMPTY_INT
self.longPosFrozen = EMPTY_INT
self.longYdFrozen = EMPTY_INT
self.longTdFrozen = EMPTY_INT
self.longPnl = EMPTY_FLOAT
self.longPrice = EMPTY_FLOAT
self.shortPos = EMPTY_INT
self.shortYd = EMPTY_INT
self.shortTd = EMPTY_INT
self.shortPosFrozen = EMPTY_INT
self.shortYdFrozen = EMPTY_INT
self.shortTdFrozen = EMPTY_INT
self.shortPnl = EMPTY_FLOAT
self.shortPrice = EMPTY_FLOAT
self.lastPrice = EMPTY_FLOAT
self.mode = self.MODE_NORMAL
self.exchange = EMPTY_STRING
self.workingOrderDict = {}
#----------------------------------------------------------------------
def updateTrade(self, trade):
"""成交更新"""
# 多头
if trade.direction is DIRECTION_LONG:
# 开仓
if trade.offset is OFFSET_OPEN:
self.longTd += trade.volume
# 平今
elif trade.offset is OFFSET_CLOSETODAY:
self.shortTd -= trade.volume
# 平昨
elif trade.offset is OFFSET_CLOSEYESTERDAY:
self.shortYd -= trade.volume
# 平仓
elif trade.offset is OFFSET_CLOSE:
# 上期所等同于平昨
if self.exchange is EXCHANGE_SHFE:
self.shortYd -= trade.volume
# 非上期所,优先平今
else:
self.shortTd -= trade.volume
if self.shortTd < 0:
self.shortYd += self.shortTd
self.shortTd = 0
# 空头
elif trade.direction is DIRECTION_SHORT:
# 开仓
if trade.offset is OFFSET_OPEN:
self.shortTd += trade.volume
# 平今
elif trade.offset is OFFSET_CLOSETODAY:
self.longTd -= trade.volume
# 平昨
elif trade.offset is OFFSET_CLOSEYESTERDAY:
self.longYd -= trade.volume
# 平仓
elif trade.offset is OFFSET_CLOSE:
# 上期所等同于平昨
if self.exchange is EXCHANGE_SHFE:
self.longYd -= trade.volume
# 非上期所,优先平今
else:
self.longTd -= trade.volume
if self.longTd < 0:
self.longYd += self.longTd
self.longTd = 0
# 汇总
self.calculatePrice(trade)
self.calculatePosition()
self.calculatePnl()
#----------------------------------------------------------------------
def updateOrder(self, order):
"""委托更新"""
# 将活动委托缓存下来
if order.status in self.WORKING_STATUS:
self.workingOrderDict[order.vtOrderID] = order
# 移除缓存中已经完成的委托
else:
if order.vtOrderID in self.workingOrderDict:
del self.workingOrderDict[order.vtOrderID]
# 计算冻结
self.calculateFrozen()
#----------------------------------------------------------------------
def updatePosition(self, pos):
"""持仓更新"""
if pos.direction is DIRECTION_LONG:
self.longPos = pos.position
self.longYd = pos.ydPosition
self.longTd = self.longPos - self.longYd
self.longPnl = pos.positionProfit
self.longPrice = pos.price
elif pos.direction is DIRECTION_SHORT:
self.shortPos = pos.position
self.shortYd = pos.ydPosition
self.shortTd = self.shortPos - self.shortYd
self.shortPnl = pos.positionProfit
self.shortPrice = pos.price
#----------------------------------------------------------------------
def updateOrderReq(self, req, vtOrderID):
"""发单更新"""
vtSymbol = req.vtSymbol
# 基于请求生成委托对象
order = VtOrderData()
order.vtSymbol = vtSymbol
order.symbol = req.symbol
order.exchange = req.exchange
order.offset = req.offset
order.direction = req.direction
order.totalVolume = req.volume
order.status = STATUS_UNKNOWN
# 缓存到字典中
self.workingOrderDict[vtOrderID] = order
# 计算冻结量
self.calculateFrozen()
#----------------------------------------------------------------------
def updateTick(self, tick):
"""行情更新"""
self.lastPrice = tick.lastPrice
self.calculatePnl()
#----------------------------------------------------------------------
def calculatePnl(self):
"""计算持仓盈亏"""
self.longPnl = self.longPos * (self.lastPrice - self.longPrice) * self.size
self.shortPnl = self.shortPos * (self.shortPrice - self.lastPrice) * self.size
#----------------------------------------------------------------------
def calculatePrice(self, trade):
"""计算持仓均价(基于成交数据)"""
# 只有开仓会影响持仓均价
if trade.offset == OFFSET_OPEN:
if trade.direction == DIRECTION_LONG:
cost = self.longPrice * self.longPos
cost += trade.volume * trade.price
newPos = self.longPos + trade.volume
if newPos:
self.longPrice = cost / newPos
else:
self.longPrice = 0
else:
cost = self.shortPrice * self.shortPos
cost += trade.volume * trade.price
newPos = self.shortPos + trade.volume
if newPos:
self.shortPrice = cost / newPos
else:
self.shortPrice = 0
#----------------------------------------------------------------------
def calculatePosition(self):
"""计算持仓情况"""
self.longPos = self.longTd + self.longYd
self.shortPos = self.shortTd + self.shortYd
#----------------------------------------------------------------------
def calculateFrozen(self):
"""计算冻结情况"""
# 清空冻结数据
self.longPosFrozen = EMPTY_INT
self.longYdFrozen = EMPTY_INT
self.longTdFrozen = EMPTY_INT
self.shortPosFrozen = EMPTY_INT
self.shortYdFrozen = EMPTY_INT
self.shortTdFrozen = EMPTY_INT
# 遍历统计
for order in self.workingOrderDict.values():
# 计算剩余冻结量
frozenVolume = order.totalVolume - order.tradedVolume
# 多头委托
if order.direction is DIRECTION_LONG:
# 平今
if order.offset is OFFSET_CLOSETODAY:
self.shortTdFrozen += frozenVolume
# 平昨
elif order.offset is OFFSET_CLOSEYESTERDAY:
self.shortYdFrozen += frozenVolume
# 平仓
elif order.offset is OFFSET_CLOSE:
self.shortTdFrozen += frozenVolume
if self.shortTdFrozen > self.shortTd:
self.shortYdFrozen += (self.shortTdFrozen - self.shortTd)
self.shortTdFrozen = self.shortTd
# 空头委托
elif order.direction is DIRECTION_SHORT:
# 平今
if order.offset is OFFSET_CLOSETODAY:
self.longTdFrozen += frozenVolume
# 平昨
elif order.offset is OFFSET_CLOSEYESTERDAY:
self.longYdFrozen += frozenVolume
# 平仓
elif order.offset is OFFSET_CLOSE:
self.longTdFrozen += frozenVolume
if self.longTdFrozen > self.longTd:
self.longYdFrozen += (self.longTdFrozen - self.longTd)
self.longTdFrozen = self.longTd
# 汇总今昨冻结
self.longPosFrozen = self.longYdFrozen + self.longTdFrozen
self.shortPosFrozen = self.shortYdFrozen + self.shortTdFrozen
#----------------------------------------------------------------------
def convertOrderReq(self, req):
"""转换委托请求"""
# 普通模式无需转换
if self.mode is self.MODE_NORMAL:
return [req]
# 上期所模式拆分今昨,优先平今
elif self.mode is self.MODE_SHFE:
# 开仓无需转换
if req.offset is OFFSET_OPEN:
return [req]
# 多头
if req.direction is DIRECTION_LONG:
posAvailable = self.shortPos - self.shortPosFrozen
tdAvailable = self.shortTd- self.shortTdFrozen
ydAvailable = self.shortYd - self.shortYdFrozen
# 空头
else:
posAvailable = self.longPos - self.longPosFrozen
tdAvailable = self.longTd - self.longTdFrozen
ydAvailable = self.longYd - self.longYdFrozen
# 平仓量超过总可用,拒绝,返回空列表
if req.volume > posAvailable:
return []
# 平仓量小于今可用,全部平今
elif req.volume <= tdAvailable:
req.offset = OFFSET_CLOSETODAY
return [req]
# 平仓量大于今可用,平今再平昨
else:
l = []
if tdAvailable > 0:
reqTd = copy(req)
reqTd.offset = OFFSET_CLOSETODAY
reqTd.volume = tdAvailable
l.append(reqTd)
reqYd = copy(req)
reqYd.offset = OFFSET_CLOSEYESTERDAY
reqYd.volume = req.volume - tdAvailable
l.append(reqYd)
return l
# 平今惩罚模式,没有今仓则平昨,否则锁仓
elif self.mode is self.MODE_TDPENALTY:
# 多头
if req.direction is DIRECTION_LONG:
td = self.shortTd
ydAvailable = self.shortYd - self.shortYdFrozen
# 空头
else:
td = self.longTd
ydAvailable = self.longYd - self.longYdFrozen
# 这里针对开仓和平仓委托均使用一套逻辑
# 如果有今仓,则只能开仓(或锁仓)
if td:
req.offset = OFFSET_OPEN
return [req]
# 如果平仓量小于昨可用,全部平昨
elif req.volume <= ydAvailable:
if self.exchange is EXCHANGE_SHFE:
req.offset = OFFSET_CLOSEYESTERDAY
else:
req.offset = OFFSET_CLOSE
return [req]
# 平仓量大于昨可用,平仓再反向开仓
else:
l = []
if ydAvailable > 0:
reqClose = copy(req)
if self.exchange is EXCHANGE_SHFE:
reqClose.offset = OFFSET_CLOSEYESTERDAY
else:
reqClose.offset = OFFSET_CLOSE
reqClose.volume = ydAvailable
l.append(reqClose)
reqOpen = copy(req)
reqOpen.offset = OFFSET_OPEN
reqOpen.volume = req.volume - ydAvailable
l.append(reqOpen)
return l
# 其他情况则直接返回空
return [] | [
"[email protected]"
]
| |
d20b982fe3c60329974052d5ba1eeb74eab893e8 | 9c84f9d5dc15a7aa5d1caf05b6ae5ea83e39be3a | /python_stack/django/django_full_stack/BellReview/BellReview/settings.py | e601fdec278ca591be2c1b97ab65472513bdbd3b | []
| no_license | tomnguyen103/Coding_Dojo | 0fc4007296feb775b4bcd6ee98f66286b2786adb | ec46b866fc7e58a37d07b63b26b38d19eaeb96f6 | refs/heads/master | 2022-12-28T03:47:57.172540 | 2020-06-15T23:03:50 | 2020-06-15T23:03:50 | 212,214,976 | 1 | 0 | null | 2022-12-11T18:36:51 | 2019-10-01T22:59:37 | Python | UTF-8 | Python | false | false | 3,123 | py | """
Django settings for BellReview project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kffzdwq*+%@j2rnp%f7h!8447u%h-4cm7@$9(4%zm0k$roc&zl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.main',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'BellReview.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'BellReview.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.