blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
236cf4532f3fdde162ba6752e286002ebdff0b32 | 039c2e60b859d88bb686c0e66bc6dab2ab723b8e | /环境控制系统/wsgi.py | 507ea5c297691da4776aee67c4084fe4aea07c47 | []
| no_license | ccc-0/ECS | 850613971e4c6fd9cbb6ddcbe2c51b5285d622ac | ef4d69cb4c6fd1b1bbd40ba9c754c8e50c56d8ee | refs/heads/master | 2020-09-13T21:50:42.033517 | 2020-02-13T03:47:10 | 2020-02-13T03:47:10 | 222,913,137 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | """
WSGI config for 环境控制系统 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '环境控制系统.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
9fd3a856cee9a4a7617e5f48b2ee83857fea54f9 | 34f3cfeac7fd5a7bbbc5e362bef8bc316f81c1d0 | /asn1tools/source/rust/utils.py | d0de494f331cace95b3a0f8f06fbabb9633d7889 | [
"MIT"
]
| permissive | eerimoq/asn1tools | 860b3623955c12dfb9763ff4e20a805beb7436ba | de25657f7c79100d1ba5312dd7474ff3e0d0ad2e | refs/heads/master | 2023-03-16T09:28:11.924274 | 2023-03-10T20:24:34 | 2023-03-10T20:24:34 | 99,156,277 | 272 | 98 | MIT | 2023-01-03T13:40:36 | 2017-08-02T20:05:05 | Python | UTF-8 | Python | false | false | 18,890 | py | import re
from ...errors import Error
TYPE_DECLARATION_FMT = '''\
/// Type {type_name} in module {module_name}.
{members}
'''
DEFINITION_FMT = '''
impl {module_name}{type_name} {{
pub fn encode(&mut self, mut dst: &mut [u8]) -> Result<usize, Error> {{
let mut encoder = Encoder::new(&mut dst);
self.encode_inner(&mut encoder);
encoder.get_result()
}}
pub fn decode(&mut self, src: &[u8]) -> Result<usize, Error> {{
let mut decoder = Decoder::new(&src);
self.decode_inner(&mut decoder);
decoder.get_result()
}}
fn encode_inner(&mut self, encoder: &mut Encoder) {{
{encode_body}\
}}
fn decode_inner(&mut self, decoder: &mut Decoder) {{
{decode_body}\
}}
}}
'''
ENCODER_AND_DECODER_STRUCTS = '''\
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum Error {
BadChoice,
BadEnum,
BadLength,
OutOfData,
OutOfMemory
}
struct Encoder<'a> {
buf: &'a mut [u8],
size: usize,
pos: usize,
error: Option<Error>
}
struct Decoder<'a> {
buf: &'a[u8],
size: usize,
pos: usize,
error: Option<Error>
}
'''
ENCODER_ABORT = '''
fn abort(&mut self, error: Error) {
if self.error.is_none() {
self.error = Some(error);
}
}\
'''
DECODER_ABORT = '''
fn abort(&mut self, error: Error) {
if self.error.is_none() {
self.error = Some(error);
}
}\
'''
class _MembersBacktracesContext(object):
def __init__(self, backtraces, member_name):
self.backtraces = backtraces
self.member_name = member_name
def __enter__(self):
for backtrace in self.backtraces:
backtrace.append(self.member_name)
def __exit__(self, *args):
for backtrace in self.backtraces:
backtrace.pop()
class _UserType(object):
def __init__(self,
type_name,
module_name,
type_code,
used_user_types):
self.type_name = type_name
self.module_name = module_name
self.type_code = type_code
self.used_user_types = used_user_types
class Generator(object):
def __init__(self):
self.namespace = 'a'
self.asn1_members_backtrace = []
self.c_members_backtrace = []
self.module_name = None
self.type_name = None
self.helper_lines = []
self.base_variables = set()
self.used_suffixes_by_base_variables = {}
self.encode_variable_lines = []
self.decode_variable_lines = []
self.used_user_types = []
def reset_type(self):
self.helper_lines = []
self.base_variables = set()
self.used_suffixes_by_base_variables = {}
self.encode_variable_lines = []
self.decode_variable_lines = []
self.used_user_types = []
@property
def module_name_snake(self):
return camel_to_snake_case(self.module_name)
@property
def type_name_snake(self):
return camel_to_snake_case(self.type_name)
def type_length(self, minimum, maximum):
# Make sure it fits in 64 bits.
if minimum < -9223372036854775808:
raise self.error(
'{} does not fit in int64_t.'.format(minimum))
elif maximum > 18446744073709551615:
raise self.error(
'{} does not fit in uint64_t.'.format(maximum))
elif minimum < 0 and maximum > 9223372036854775807:
raise self.error(
'{} does not fit in int64_t.'.format(maximum))
# Calculate the number of bytes needed.
if minimum < -4294967296:
minimum_length = 64
elif minimum < -65536:
minimum_length = 32
elif minimum < -256:
minimum_length = 16
elif minimum < 0:
minimum_length = 8
else:
minimum_length = 0
if maximum > 4294967295:
maximum_length = 64
elif maximum > 65535:
maximum_length = 32
elif maximum > 255:
maximum_length = 16
elif maximum > 0:
maximum_length = 8
else:
maximum_length = 0
if minimum_length == maximum_length == 0:
length = 8
else:
length = max(minimum_length, maximum_length)
return length
def format_type_name(self, minimum, maximum):
length = self.type_length(minimum, maximum)
if minimum >= 0:
type_name = 'u{}'.format(length)
else:
type_name = 'i{}'.format(length)
return type_name
@property
def location(self):
location = '{}{}'.format(self.module_name,
self.type_name)
for member in self.asn1_members_backtrace:
location += make_camel_case(member)
return location
def location_inner(self, default='value', end=''):
if self.c_members_backtrace:
return '.'.join(self.c_members_backtrace) + end
else:
return default
def location_error(self):
location = '{}.{}'.format(self.module_name, self.type_name)
if self.asn1_members_backtrace:
location += '.{}'.format('.'.join(self.asn1_members_backtrace))
return location
def members_backtrace_push(self, member_name):
backtraces = [
self.asn1_members_backtrace,
self.c_members_backtrace
]
return _MembersBacktracesContext(backtraces, member_name)
def asn1_members_backtrace_push(self, member_name):
backtraces = [self.asn1_members_backtrace]
return _MembersBacktracesContext(backtraces, member_name)
def c_members_backtrace_push(self, member_name):
backtraces = [self.c_members_backtrace]
return _MembersBacktracesContext(backtraces, member_name)
def get_member_checker(self, checker, name):
for member in checker.members:
if member.name == name:
return member
raise Error('No member checker found for {}.'.format(name))
def add_unique_variable(self, name):
if name in self.base_variables:
try:
suffix = self.used_suffixes_by_base_variables[name]
suffix += 1
except KeyError:
suffix = 2
self.used_suffixes_by_base_variables[name] = suffix
unique_name = '{}_{}'.format(name, suffix)
else:
self.base_variables.add(name)
unique_name = name
return unique_name
def error(self, message):
return Error('{}: {}'.format(self.location_error(), message))
def format_integer(self, checker):
if not checker.has_lower_bound():
raise self.error('INTEGER has no minimum value.')
if not checker.has_upper_bound():
raise self.error('INTEGER has no maximum value.')
type_name = self.format_type_name(checker.minimum, checker.maximum)
return [type_name]
def format_boolean(self):
return ['bool']
def format_octet_string(self, checker):
if not checker.has_upper_bound():
raise self.error('OCTET STRING has no maximum length.')
if checker.minimum == checker.maximum:
lines = []
elif checker.maximum < 256:
lines = [' let length: u8;']
else:
lines = [' let length: u32;']
return [
'#[derive(Debug, Default, PartialEq, Copy, Clone)]',
'pub struct {} {{'.format(self.location)
] + lines + [
' pub buf: [u8; {}]'.format(checker.maximum),
'}'
]
def format_sequence(self, type_, checker):
helper_lines = []
lines = []
for member in type_.root_members:
member_checker = self.get_member_checker(checker, member.name)
if member.optional:
lines += ['pub is_{}_present: bool,'.format(member.name)]
with self.members_backtrace_push(member.name):
member_lines = self.format_type(member, member_checker)
member_location = self.location
if not member_lines:
continue
if is_inline_member_lines(member_lines):
member_lines[-1] = 'pub {}: {},'.format(member.name,
member_lines[-1])
else:
helper_lines += member_lines + ['']
member_lines = ['pub {}: {},'.format(member.name,
member_location)]
lines += member_lines
if lines:
lines[-1] = lines[-1].strip(',')
return helper_lines + [
'#[derive(Debug, Default, PartialEq, Copy, Clone)]',
'pub struct {} {{'.format(self.location)
] + indent_lines(lines) + [
'}'
]
def format_sequence_of(self, type_, checker):
if not checker.is_bound():
raise self.error('SEQUENCE OF has no maximum length.')
with self.asn1_members_backtrace_push('elem'):
lines = self.format_type(type_.element_type,
checker.element_type)
if lines:
lines[-1] += ' elements[{}];'.format(checker.maximum)
if checker.minimum == checker.maximum:
length_lines = []
elif checker.maximum < 256:
length_lines = ['let length: u8;']
else:
length_lines = ['let length: u32;']
return ['struct {'] + indent_lines(length_lines + lines) + ['}']
def format_enumerated(self, type_):
lines = [
'#[derive(Debug, PartialEq, Copy, Clone)]',
'pub enum {} {{'.format(self.location)
] + [
' {},'.format(make_camel_case(value))
for value in self.get_enumerated_values(type_)
] + [
'}',
'',
'impl Default for {} {{'.format(self.location),
' fn default() -> Self {',
' {}::{}'.format(self.location,
self.get_enumerated_values(type_)[0].upper()),
' }',
'}'
]
return lines
def format_choice(self, type_, checker):
helper_lines = []
lines = []
for member in self.get_choice_members(type_):
member_checker = self.get_member_checker(checker,
member.name)
with self.members_backtrace_push(member.name):
member_lines = self.format_type(member, member_checker)
member_location = self.location
if not member_lines:
continue
if is_inline_member_lines(member_lines):
member_lines[-1] = '{}({}),'.format(make_camel_case(member.name),
member_lines[-1])
else:
helper_lines += member_lines + ['']
member_lines = ['pub {}: {},'.format(member.name,
member_location)]
lines += member_lines
if lines:
lines[-1] = lines[-1].strip(',')
return helper_lines + [
'#[derive(Debug, PartialEq, Copy, Clone)]',
'pub enum {} {{'.format(self.location)
] + indent_lines(lines) + [
'}'
]
def format_user_type(self, type_name, module_name):
self.used_user_types.append((type_name, module_name))
return ['{}{}'.format(module_name, type_name)]
def format_sequence_inner_member(self,
member,
checker,
default_condition_by_member_name):
member_checker = self.get_member_checker(checker, member.name)
with self.members_backtrace_push(member.name):
encode_lines, decode_lines = self.format_type_inner(
member,
member_checker)
location = self.location_inner('', '.')
if member.optional:
is_present = '{}is_{}_present'.format(location, member.name)
encode_lines = [
'',
'if src.{} {{'.format(is_present)
] + indent_lines(encode_lines) + [
'}',
''
]
decode_lines = [
'',
'if dst.{} {{'.format(is_present)
] + indent_lines(decode_lines) + [
'}',
''
]
elif member.default is not None:
name = '{}{}'.format(location, member.name)
encode_lines = [
'',
'if src.{} != {} {{'.format(name, member.default)
] + indent_lines(encode_lines) + [
'}',
''
]
decode_lines = [
'',
'if {} {{'.format(default_condition_by_member_name[member.name])
] + indent_lines(decode_lines) + [
'} else {',
' dst.{} = {};'.format(name, member.default),
'}',
''
]
return encode_lines, decode_lines
def generate_type_declaration(self, compiled_type):
type_ = compiled_type.type
checker = compiled_type.constraints_checker.type
lines = self.generate_type_declaration_process(type_, checker)
if not lines:
lines = ['dummy: u8;']
if self.helper_lines:
self.helper_lines.append('')
return TYPE_DECLARATION_FMT.format(module_name=self.module_name,
type_name=self.type_name,
members='\n'.join(lines))
def generate_definition(self, compiled_type):
encode_lines, decode_lines = self.generate_definition_inner_process(
compiled_type.type,
compiled_type.constraints_checker.type)
if self.encode_variable_lines:
encode_lines = self.encode_variable_lines + [''] + encode_lines
if self.decode_variable_lines:
decode_lines = self.decode_variable_lines + [''] + decode_lines
encode_lines = indent_lines(indent_lines(encode_lines)) + ['']
decode_lines = indent_lines(indent_lines(decode_lines)) + ['']
return DEFINITION_FMT.format(module_name=self.module_name,
type_name=self.type_name,
encode_body='\n'.join(encode_lines),
decode_body='\n'.join(decode_lines))
def generate(self, compiled):
user_types = []
for module_name, module in sorted(compiled.modules.items()):
self.module_name = module_name
for type_name, compiled_type in sorted(module.items()):
self.type_name = type_name
self.reset_type()
type_declaration = self.generate_type_declaration(compiled_type)
definition = self.generate_definition(compiled_type)
user_type = _UserType(type_name,
module_name,
type_declaration + definition,
self.used_user_types)
user_types.append(user_type)
user_types = sort_user_types_by_used_user_types(user_types)
types_code = []
for user_type in user_types:
types_code.append(user_type.type_code)
types_code = '\n'.join(types_code)
helpers = '\n'.join(self.generate_helpers(types_code))
return helpers, types_code
def format_type(self, type_, checker):
raise NotImplementedError('To be implemented by subclasses.')
def format_type_inner(self, type_, checker):
raise NotImplementedError('To be implemented by subclasses.')
def get_enumerated_values(self, type_):
raise NotImplementedError('To be implemented by subclasses.')
def get_choice_members(self, type_):
raise NotImplementedError('To be implemented by subclasses.')
def generate_type_declaration_process(self, type_, checker):
raise NotImplementedError('To be implemented by subclasses.')
def generate_definition_inner_process(self, type_, checker):
raise NotImplementedError('To be implemented by subclasses.')
def generate_helpers(self, definitions):
raise NotImplementedError('To be implemented by subclasses.')
def canonical(value):
"""Replace anything but 'a-z', 'A-Z' and '0-9' with '_'.
"""
return re.sub(r'[^a-zA-Z0-9]', '_', value)
def camel_to_snake_case(value):
value = re.sub(r'(.)([A-Z][a-z]+)', r'\1_\2', value)
value = re.sub(r'(_+)', '_', value)
value = re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', value).lower()
value = canonical(value)
return value
def make_camel_case(value):
return value[0].upper() + value[1:]
def join_lines(lines, suffix):
return[line + suffix for line in lines[:-1]] + lines[-1:]
def is_user_type(type_):
return type_.module_name is not None
def strip_blank_lines(lines):
try:
while lines[0] == '':
del lines[0]
while lines[-1] == '':
del lines[-1]
except IndexError:
pass
stripped = []
for line in lines:
if line == '' and stripped[-1] == '':
continue
stripped.append(line)
return stripped
def indent_lines(lines, width=4):
indented_lines = []
for line in lines:
if line:
indented_line = width * ' ' + line
else:
indented_line = line
indented_lines.append(indented_line)
return strip_blank_lines(indented_lines)
def dedent_lines(lines, width=4):
return [line[width:] for line in lines]
def sort_user_types_by_used_user_types(user_types):
reversed_sorted_user_types = []
for user_type in user_types:
user_type_name_tuple = (user_type.type_name, user_type.module_name)
# Insert first in the reversed list if there are no types
# using this type.
insert_index = 0
for i, reversed_sorted_user_type in enumerate(reversed_sorted_user_types, 1):
if user_type_name_tuple in reversed_sorted_user_type.used_user_types:
if i > insert_index:
insert_index = i
reversed_sorted_user_types.insert(insert_index, user_type)
return reversed(reversed_sorted_user_types)
def is_inline_member_lines(member_lines):
return len(member_lines) == 1
| [
"[email protected]"
]
| |
8910012782c16a46416210e118ba3994642a3c27 | 480a05a61cc2708e0f6eacb7024333a076009201 | /identYwaf.py | 707a3647fb5a59b3be8f4175c36cac796b0da4f5 | [
"MIT"
]
| permissive | ver007/identYwaf | a9e494ff7a1735184c4926fdd7618852b4f9b3e5 | 52f47dfcd932329326e5d535d62e931e6b9b7d65 | refs/heads/master | 2020-06-04T16:31:41.268847 | 2019-01-15T08:34:17 | 2019-01-15T08:34:17 | 192,104,963 | 1 | 0 | null | 2019-06-15T17:17:29 | 2019-06-15T17:17:29 | null | UTF-8 | Python | false | false | 15,688 | py | #!/usr/bin/env python
"""
Copyright (c) 2019 Miroslav Stampar (@stamparm), MIT
See the file 'LICENSE' for copying permission
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
"""
import base64
import cookielib
import httplib
import json
import optparse
import os
import random
import re
import ssl
import socket
import string
import struct
import subprocess
import sys
import time
import urllib
import urllib2
import zlib
NAME = "identYwaf"
VERSION = "1.0.17"
BANNER = """
` __ __ `
____ ___ ___ ____ ______ `| T T` __ __ ____ _____
l j| \ / _]| \ | T`| | |`| T__T T / T| __|
| T | \ / [_ | _ Yl_j l_j`| ~ |`| | | |Y o || l_
| | | D YY _]| | | | | `|___ |`| | | || || _|
j l | || [_ | | | | | `| !` \ / | | || ]
|____jl_____jl_____jl__j__j l__j `l____/ ` \_/\_/ l__j__jl__j (%s)%s""".strip("\n") % (VERSION, "\n")
RAW, TEXT, HTTPCODE, TITLE, HTML, URL = xrange(6)
COOKIE, UA, REFERER = "Cookie", "User-Agent", "Referer"
GET, POST = "GET", "POST"
GENERIC_PROTECTION_KEYWORDS = ('rejected', 'forbidden', 'suspicious', 'malicious', 'captcha', 'invalid', 'your ip', 'please contact', 'terminated', 'protected', 'unauthorized', 'blocked', 'protection', 'incident', 'denied', 'detected', 'dangerous', 'firewall', 'fw_block', 'unusual activity', 'bad request', 'request id', 'injection', 'permission', 'not acceptable', 'security policy', 'security reasons')
GENERIC_PROTECTION_REGEX = r"(?i)\b(%s)\b"
GENERIC_ERROR_MESSAGE_REGEX = r"\b[A-Z][\w, '-]*(protected by|security|unauthorized|detected|attack|error|rejected|allowed|suspicious|automated|blocked|invalid|denied|permission)[\w, '!-]*"
HEURISTIC_PAYLOAD = "1 AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#"
PAYLOADS = []
SIGNATURES = {}
DATA_JSON = {}
DATA_JSON_FILE = "data.json"
MAX_HELP_OPTION_LENGTH = 18
IS_TTY = sys.stdout.isatty()
COLORIZE = not subprocess.mswindows and IS_TTY
LEVEL_COLORS = {"o": "\033[00;94m", "x": "\033[00;91m", "!": "\033[00;93m", "i": "\033[00;95m", "=": "\033[00;93m", "+": "\033[00;92m", "-": "\033[00;91m"}
VERIFY_OK_INTERVAL = 5
VERIFY_RETRY_TIMES = 3
DEFAULTS = {"timeout": 10}
MAX_MATCHES = 5
if COLORIZE:
for _ in re.findall(r"`.+?`", BANNER):
BANNER = BANNER.replace(_, "\033[01;92m%s\033[00;49m" % _.strip('`'))
for _ in re.findall(r" [Do] ", BANNER):
BANNER = BANNER.replace(_, "\033[01;93m%s\033[00;49m" % _.strip('`'))
BANNER = re.sub(VERSION, r"\033[01;91m%s\033[00;49m" % VERSION, BANNER)
else:
BANNER = BANNER.replace('`', "")
REVISION = random.randint(20, 64)
PLATFORM = random.sample(("X11; %s %s" % (random.sample(("Linux", "Ubuntu; Linux", "U; Linux", "U; OpenBSD", "U; FreeBSD"), 1)[0], random.sample(("amd64", "i586", "i686", "amd64"), 1)[0]), "Windows NT %s%s" % (random.sample(("5.0", "5.1", "5.2", "6.0", "6.1", "6.2", "6.3", "10.0"), 1)[0], random.sample(("", "; Win64", "; WOW64"), 1)[0]), "Macintosh; Intel Mac OS X 10.%s" % random.randint(1, 11)), 1)[0]
USER_AGENT = "Mozilla/5.0 (%s; rv:%d.0) Gecko/20100101 Firefox/%d.0" % (PLATFORM, REVISION, REVISION)
HEADERS = {"User-Agent": USER_AGENT, "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "identity", "Cache-Control": "max-age=0"}
original = None
options = None
intrusive = None
_exit = exit
def exit(message):
print "%s%s" % (message, ' ' * 20) # identYwaf requires usage of Python 2.x
_exit(1)
def retrieve(url, data=None):
retval = {}
try:
req = urllib2.Request("".join(url[_].replace(' ', "%20") if _ > url.find('?') else url[_] for _ in xrange(len(url))), data, HEADERS)
resp = urllib2.urlopen(req, timeout=options.timeout)
retval[URL] = resp.url
retval[HTML] = resp.read()
retval[HTTPCODE] = resp.code
retval[RAW] = "%s %d %s\n%s\n%s" % (httplib.HTTPConnection._http_vsn_str, retval[HTTPCODE], resp.msg, "".join(resp.headers.headers), retval[HTML])
except Exception, ex:
retval[URL] = getattr(ex, "url", url)
retval[HTTPCODE] = getattr(ex, "code", None)
try:
retval[HTML] = ex.read() if hasattr(ex, "read") else getattr(ex, "msg", "")
except:
retval[HTML] = ""
retval[RAW] = "%s %s %s\n%s\n%s" % (httplib.HTTPConnection._http_vsn_str, retval[HTTPCODE] or "", getattr(ex, "msg", ""), "".join(ex.headers.headers) if hasattr(ex, "headers") else "", retval[HTML])
match = re.search(r"<title>(?P<result>[^<]+)</title>", retval[HTML], re.I)
retval[TITLE] = match.group("result") if match and "result" in match.groupdict() else None
retval[TEXT] = re.sub(r"(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>|<[^>]+>|\s+", " ", retval[HTML])
return retval
def calc_hash(line, binary=True):
result = zlib.crc32(line) & 0xffffL
if binary:
result = struct.pack(">H", result)
return result
def check_payload(payload, protection_regex=GENERIC_PROTECTION_REGEX % '|'.join(GENERIC_PROTECTION_KEYWORDS)):
global intrusive
time.sleep(options.delay or 0)
_ = "%s%s%s=%s" % (options.url, '?' if '?' not in options.url else '&', "".join(random.sample(string.letters, 3)), urllib.quote(payload))
intrusive = retrieve(_)
result = intrusive[HTTPCODE] != original[HTTPCODE] or (intrusive[HTTPCODE] != 200 and intrusive[TITLE] != original[TITLE]) or (re.search(protection_regex, intrusive[HTML]) is not None and re.search(protection_regex, original[HTML]) is None)
return result
def colorize(message):
if COLORIZE:
message = re.sub(r"\[(.)\]", lambda match: "[%s%s\033[00;49m]" % (LEVEL_COLORS[match.group(1)], match.group(1)), message)
if "rejected summary" in message:
for match in re.finditer(r"[^\w]'([^)]+)'", message):
message = message.replace("'%s'" % match.group(1), "'\033[37m%s\033[00;49m'" % match.group(1), 1)
else:
for match in re.finditer(r"[^\w]'([^']+)'", message):
message = message.replace("'%s'" % match.group(1), "'\033[37m%s\033[00;49m'" % match.group(1), 1)
if "blind match" in message:
for match in re.finditer(r"\(((\d+)%)\)", message):
message = message.replace(match.group(1), "\033[%dm%s\033[00;49m" % (92 if int(match.group(2)) >= 95 else (93 if int(match.group(2)) > 80 else 90), match.group(1)))
if "hardness" in message:
for match in re.finditer(r"\(((\d+)%)\)", message):
message = message.replace(match.group(1), "\033[%dm%s\033[00;49m" % (91 if " insane " in message else (95 if " hard " in message else (93 if " moderate " in message else 92)), match.group(1)))
return message
def parse_args():
global options
parser = optparse.OptionParser(version=VERSION)
parser.add_option("--delay", dest="delay", type=int, help="Delay (sec) between tests (default: 0)")
parser.add_option("--timeout", dest="timeout", type=int, help="Response timeout (sec) (default: 10)")
parser.add_option("--proxy", dest="proxy", help="HTTP proxy address (e.g. \"http://127.0.0.1:8080\")")
# Dirty hack(s) for help message
def _(self, *args):
retval = parser.formatter._format_option_strings(*args)
if len(retval) > MAX_HELP_OPTION_LENGTH:
retval = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % retval
return retval
parser.usage = "python %s <host|url>" % parser.usage
parser.formatter._format_option_strings = parser.formatter.format_option_strings
parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser))
for _ in ("-h", "--version"):
option = parser.get_option(_)
option.help = option.help.capitalize()
try:
options, _ = parser.parse_args()
except SystemExit:
raise
if len(sys.argv) > 1:
url = sys.argv[-1]
if not url.startswith("http"):
url = "http://%s" % url
options.url = url
else:
parser.print_help()
raise SystemExit
for key in DEFAULTS:
if getattr(options, key, None) is None:
setattr(options, key, DEFAULTS[key])
def init():
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if os.path.isfile(DATA_JSON_FILE):
print colorize("[o] loading data...")
content = open(DATA_JSON_FILE, "rb").read()
DATA_JSON.update(json.loads(content))
for waf in DATA_JSON["wafs"]:
for signature in DATA_JSON["wafs"][waf]["signatures"]:
SIGNATURES[signature] = waf
else:
exit(colorize("[x] file '%s' is missing" % DATA_JSON_FILE))
print colorize("[o] initializing handlers...")
# Reference: https://stackoverflow.com/a/28052583
if hasattr(ssl, "_create_unverified_context"):
ssl._create_default_https_context = ssl._create_unverified_context
cookie_jar = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
urllib2.install_opener(opener)
if options.proxy:
opener = urllib2.build_opener(urllib2.ProxyHandler({"http": options.proxy, "https": options.proxy}))
urllib2.install_opener(opener)
def run():
global original
hostname = options.url.split("//")[-1].split('/')[0]
if not hostname.replace('.', "").isdigit():
print colorize("[i] checking hostname '%s'..." % hostname)
try:
socket.getaddrinfo(hostname, None)
except socket.gaierror:
exit(colorize("[x] host '%s' does not exist" % hostname))
results = ""
signature = ""
counter = 0
original = retrieve(options.url)
if 300 <= original[HTTPCODE] < 400 and original[URL]:
original = retrieve(original[URL])
options.url = original[URL]
#if re.search(r"(?i)captcha", original[HTML]) is not None:
#exit(colorize("[x] there seems to be an activated captcha"))
if original[HTTPCODE] is None:
exit(colorize("[x] missing valid response"))
if original[HTTPCODE] >= 400:
for waf in DATA_JSON["wafs"]:
if re.search(DATA_JSON["wafs"][waf]["regex"], original[RAW]):
found = True
print colorize("[+] non-blind match: '%s'" % DATA_JSON["wafs"][waf]["name"])
break
exit(colorize("[x] access to host '%s' seems to be restricted%s" % (hostname, (" (%d: '<title>%s</title>')" % (original[HTTPCODE], original[TITLE].strip())) if original[TITLE] else "")))
protection_keywords = GENERIC_PROTECTION_KEYWORDS
protection_regex = GENERIC_PROTECTION_REGEX % '|'.join(keyword for keyword in protection_keywords if keyword not in original[HTML].lower())
print colorize("[i] running basic heuristic test...")
if not check_payload(HEURISTIC_PAYLOAD):
check = False
if options.url.startswith("https://"):
options.url = options.url.replace("https://", "http://")
check = check_payload(HEURISTIC_PAYLOAD)
if not check:
exit(colorize("[x] host '%s' does not seem to be protected" % hostname))
if not intrusive[HTTPCODE]:
print colorize("[i] rejected summary: RST|DROP")
else:
_ = "...".join(match.group(0) for match in re.finditer(GENERIC_ERROR_MESSAGE_REGEX, intrusive[HTML])).strip().replace(" ", " ")
print colorize(("[i] rejected summary: %d ('%s%s')" % (intrusive[HTTPCODE], ("<title>%s</title>" % intrusive[TITLE]) if intrusive[TITLE] else "", "" if not _ or intrusive[HTTPCODE] < 400 else ("...%s" % _))).replace(" ('')", ""))
found = False
for waf in DATA_JSON["wafs"]:
if re.search(DATA_JSON["wafs"][waf]["regex"], intrusive[RAW] if intrusive[HTTPCODE] is not None else original[RAW]):
found = True
print colorize("[+] non-blind match: '%s'" % DATA_JSON["wafs"][waf]["name"])
break
if not found:
print colorize("[-] non-blind match: -")
for payload in DATA_JSON["payloads"]:
counter += 1
if IS_TTY:
sys.stdout.write(colorize("\r[i] running payload tests... (%d/%d)\r" % (counter, len(DATA_JSON["payloads"]))))
sys.stdout.flush()
if counter % VERIFY_OK_INTERVAL == 0:
for i in xrange(VERIFY_RETRY_TIMES):
if not check_payload(str(random.randint(1, 9)), protection_regex):
break
elif i == VERIFY_RETRY_TIMES - 1:
exit(colorize("[x] host '%s' seems to be (also) rejecting benign requests%s" % (hostname, (" (%d: '<title>%s</title>')" % (intrusive[HTTPCODE], intrusive[TITLE].strip())) if intrusive[TITLE] else "")))
else:
time.sleep(5)
last = check_payload(payload, protection_regex)
signature += struct.pack(">H", ((calc_hash(payload, binary=False) << 1) | last) & 0xffff)
results += 'x' if last else '.'
signature = "%s:%s" % (calc_hash(signature).encode("hex"), base64.b64encode(signature))
print colorize("%s[=] results: '%s'" % ("\n" if IS_TTY else "", results))
hardness = 100 * results.count('x') / len(results)
print colorize("[=] hardness: %s (%d%%)" % ("insane" if hardness >= 80 else ("hard" if hardness >= 50 else ("moderate" if hardness >= 30 else "easy")), hardness))
if not results.strip('.'):
print colorize("[-] blind match: -")
else:
print colorize("[=] signature: '%s'" % signature)
if signature in SIGNATURES:
print colorize("[+] blind match: '%s' (100%%)" % DATA_JSON["wafs"][SIGNATURES[signature]]["name"])
elif results.count('x') < 3:
print colorize("[-] blind match: -")
else:
matches = {}
markers = set()
decoded = signature.split(':')[-1].decode("base64")
for i in xrange(0, len(decoded), 2):
part = struct.unpack(">H", decoded[i: i + 2])[0]
markers.add(part)
for candidate in SIGNATURES:
counter_y, counter_n = 0, 0
decoded = candidate.split(':')[-1].decode("base64")
for i in xrange(0, len(decoded), 2):
part = struct.unpack(">H", decoded[i: i + 2])[0]
if part in markers:
counter_y += 1
elif any(_ in markers for _ in (part & ~1, part | 1)):
counter_n += 1
result = int(round(100 * counter_y / (counter_y + counter_n)))
if SIGNATURES[candidate] in matches:
if result > matches[SIGNATURES[candidate]]:
matches[SIGNATURES[candidate]] = result
else:
matches[SIGNATURES[candidate]] = result
matches = [(_[1], _[0]) for _ in matches.items()]
matches.sort(reverse=True)
print colorize("[+] blind match: %s" % ", ".join("'%s' (%d%%)" % (DATA_JSON["wafs"][matches[i][1]]["name"], matches[i][0]) for i in xrange(MAX_MATCHES if matches[0][0] != 100 else 1)))
print
def main():
if "--version" not in sys.argv:
print BANNER
parse_args()
init()
run()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
exit(colorize("\r[x] Ctrl-C pressed"))
| [
"[email protected]"
]
| |
a302291624c13fd9a1f6808e9c8885774baf1374 | 8b4ca76a9c1e9aba74ce9ca3008f78b0293a8df2 | /algorithms/policy.py | b391e7e85011ff0e3975adf670d34f866c3670ab | []
| no_license | sebastiengilbert73/ReinforcementLearning | 4d2eb94327ee56568216d673b1a90a928e79be55 | b45578ec7603be37968d95c216d4169c276c0ab4 | refs/heads/master | 2021-06-18T06:57:04.815045 | 2021-02-21T18:08:31 | 2021-02-21T18:08:31 | 164,341,263 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,676 | py | import abc
import random
import copy
class LegalActionsAuthority(abc.ABC):
"""
Abstract class that filters the legal actions in a state, among the actions set
"""
def __init__(self):
super().__init__()
@abc.abstractmethod
def LegalActions(self, state):
pass # return legal_actions_set
class AllActionsLegalAuthority(LegalActionsAuthority):
"""
Utility class that always allows all actions
"""
def __init__(self, actions_set):
super().__init__()
self.actions_set = actions_set
def LegalActions(self, state):
return copy.deepcopy(self.actions_set)
class Policy(abc.ABC):
"""
Abstract class that selects an action from a state
"""
def __init__(self, legal_actions_authority):
super().__init__()
self.legal_actions_authority = legal_actions_authority
@abc.abstractmethod
def ActionProbabilities(self, state):
pass # return action_to_probability_dict
def Probability(self, state, action):
action_to_probability_dict = self.ActionProbabilities(state)
if action in action_to_probability_dict:
return action_to_probability_dict[action]
else:
return 0
def Select(self, state):
action_to_probability_dict = self.ActionProbabilities(state)
action_running_sum_list = []
running_sum = 0
for action, probability in action_to_probability_dict.items():
running_sum += probability
action_running_sum_list.append((action, running_sum))
random_0to1 = random.random()
for action_running_sum in action_running_sum_list:
if action_running_sum[1] >= random_0to1:
return action_running_sum[0]
raise ValueError("Policy.Select(): Reached the end of the loop without returning. state = {}; action_running_sum_list = {}; random_0to1 = {}".format(state, action_running_sum_list, random_0to1))
class Random(Policy): # Selects randomly one of the legal actions
def __init__(self, legal_actions_authority):
super().__init__(legal_actions_authority)
def ActionProbabilities(self, state):
legal_actions_set = self.legal_actions_authority.LegalActions(state)
action_to_probability_dict = {}
for action in legal_actions_set:
action_to_probability_dict[action] = 1/len(legal_actions_set)
return action_to_probability_dict
class Greedy(Policy):
"""
Always selects the most valuable action, as kept in a table
"""
def __init__(self, state_to_most_valuable_action, legal_actions_authority):
super().__init__(legal_actions_authority)
self.state_to_most_valuable_action = copy.deepcopy(state_to_most_valuable_action)
def ActionProbabilities(self, state):
legal_actions_set = self.legal_actions_authority.LegalActions(state)
if self.state_to_most_valuable_action[state] not in legal_actions_set: # Initialization error: Attribute an arbitrary legal action
self.state_to_most_valuable_action[state] = list(legal_actions_set)[0]
return {self.state_to_most_valuable_action[state]: 1}
class EpsilonGreedy(Policy):
"""
Selects the most valuable action with probability (1 - epsilon). Otherwise, randomly selects an action
"""
def __init__(self, epsilon, stateAction_to_value):
self.epsilon = epsilon
self.stateAction_to_value = stateAction_to_value
self.state_to_stateActions = {} # Build in advance the dictionary of state to state-action pairs
for ((state, action), value) in self.stateAction_to_value.items():
if state in self.state_to_stateActions:
self.state_to_stateActions[state].append((state, action))
else:
self.state_to_stateActions[state] = [(state, action)]
def ActionProbabilities(self, state):
stateActions_list = self.state_to_stateActions[state]
if len(stateActions_list) == 0:
return {}
most_valuable_action = None
highest_value = float('-inf')
for (_state, action) in stateActions_list:
value = self.stateAction_to_value[(_state, action)]
if value > highest_value:
highest_value = value
most_valuable_action = action
number_of_actions = len(stateActions_list)
action_to_probability = {}
for (_state, action) in stateActions_list:
action_to_probability[action] = self.epsilon/number_of_actions
action_to_probability[most_valuable_action] += (1.0 - self.epsilon)
return action_to_probability | [
"[email protected]"
]
| |
efba4b2d600c69a51bb39a34812f080182f4990d | 8b301e17d5f42e1050bb15cde9b28a2db33d0662 | /mysite/myAPI/checkcode.py | f92f73c41b11dab9987fad65e488cce789056e4d | [
"Apache-2.0"
]
| permissive | wuchunlong0/blog_uk_vue_mylogin | 413bd482b649f2bf0e45cdfe5dc964ac0f75e72b | eece41870822a38c130318c10e6dc348a088a864 | refs/heads/master | 2020-05-09T18:04:45.718255 | 2019-04-14T15:13:01 | 2019-04-14T15:13:01 | 181,323,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,418 | py | # -*- coding: utf-8 -*-
import os,sys
from io import BytesIO as StringIO
from django.shortcuts import render
import random
from django.http.response import HttpResponseRedirect, HttpResponse
from PIL import Image, ImageDraw, ImageFont, ImageFilter
FONT_TYPE = "static_common/home/fonts/DroidSans.ttf"
_letter_cases = "abcdefghnpqrstuvxy".upper()
_upper_cases = _letter_cases
_numbers = ''.join(map(str, range(3, 8)))
init_chars = ''.join((_letter_cases, _upper_cases, _numbers))
def get_chars(chars=init_chars, length=4):
return random.sample(chars, length)
def create_validate_code(request,size=(120, 30), mode="RGB",
bg_color=(255, 255, 255),
fg_color=(255, 0, 0),
font_size=22,
font_type=FONT_TYPE,
draw_lines=True,
n_line=(1, 3),
draw_points=True,
point_chance = 2):
width, height = size
img = Image.new(mode, size, bg_color)
draw = ImageDraw.Draw(img)
def create_lines():
line_num = random.randint(*n_line)
for i in range(line_num):
begin = (random.randint(0, size[0]), random.randint(0, size[1]))
end = (random.randint(0, size[0]), random.randint(0, size[1]))
draw.line([begin, end], fill=(0, 0, 0))
def create_points():
chance = min(100, max(0, int(point_chance)))
for w in range(width):
for h in range(height):
tmp = random.randint(0, 100)
if tmp > 100 - chance:
draw.point((w, h), fill=(0, 0, 0))
def create_strs():
c_chars =request.session['checkcode']
strs = ' %s ' % ' '.join(c_chars)
font = ImageFont.truetype(font_type, font_size)
font_width, font_height = font.getsize(strs)
draw.text(((width - font_width) / 3, (height - font_height) / 3),
strs, font=font, fill=fg_color)
return ''.join(c_chars)
if draw_lines:
create_lines()
if draw_points:
create_points()
strs = create_strs()
params = [1 - float(random.randint(1, 12)) / 100,
0,
0,
0,
1 - float(random.randint(1, 10)) / 100,
float(random.randint(1, 2)) / 500,
0.001,
float(random.randint(1, 2)) / 500
]
img = img.transform(size, Image.PERSPECTIVE, params)
img = img.filter(ImageFilter.EDGE_ENHANCE_MORE)
return img, strs
def gcheckcode(request):
listchar = get_chars()
request.session['checkcode'] = listchar
return ''.join(listchar)
# http://localhost:9000/home/checkcodeGIF/
def checkcodeGIF(request):
if not request.session.get('checkcode',''):
request.session['checkcode'] = '1234'
img_type="GIF"
checkcode = create_validate_code(request)
mstream = StringIO()
checkcode[0].save(mstream, img_type) #图片保存在内存中
codeImg = mstream.getvalue() #获得保存图片
mstream.close()#关闭保存
return HttpResponse(codeImg, img_type) #网页显示内存图片
# http://localhost:8000/home/getcheckcode/
def getcheckcode(request):
g_checkcode = gcheckcode(request)
path = request.GET.get('path','__base__.html')
return render(request, path, context=locals())
| [
"[email protected]"
]
| |
b786a8bf22bcc9fa6769a2bdd445c84df32550ce | c2c8915d745411a0268ee5ce18d8bf7532a09e1a | /cybox-2.1.0.5/cybox/bindings/domain_name_object.py | a65065255c04bcb38b3bf5ca7c47ea1efa9b991a | [
"BSD-3-Clause"
]
| permissive | asealey/crits_dependencies | 581d44e77f297af7edb78d08f0bf11ad6712b3ab | a8049c214c4570188f6101cedbacf669168f5e52 | refs/heads/master | 2021-01-17T11:50:10.020346 | 2014-12-28T06:53:01 | 2014-12-28T06:53:01 | 28,555,464 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,281 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Wed Jan 15 13:08:03 2014 by generateDS.py version 2.9a.
#
import sys
import getopt
import re as re_
import cybox_common
import base64
from datetime import datetime, tzinfo, timedelta
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError(
"Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(tzinfo):
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return input_data
def gds_validate_datetime(self, input_data, node, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = input_data.strftime('%Y-%m-%dT%H:%M:%S')
else:
_svalue = input_data.strftime('%Y-%m-%dT%H:%M:%S.%f')
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_parse_datetime(self, input_data, node, input_name=''):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'GMT')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
return dt.replace(tzinfo = tz)
def gds_validate_date(self, input_data, node, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = input_data.strftime('%Y-%m-%d')
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_parse_date(self, input_data, node, input_name=''):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'GMT')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
return datetime.strptime(input_data,
'%Y-%m-%d').replace(tzinfo = tz)
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf-8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (
msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace, pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace, name, pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' %
(self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' %
(self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' %
(self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' %
(self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' %
(self.name, base64.b64encode(self.value), self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n'
% (self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n'
% (self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class DomainNameObjectType(cybox_common.ObjectPropertiesType):
"""The DomainNameObjectType type is intended to characterize network
domain names.The type field specifies the type of Domain name
that is being defined."""
member_data_items_ = {
'type': MemberSpec_('type', 'DomainNameObj:DomainNameTypeEnum', 0),
'Value': MemberSpec_('Value', 'cybox_common.StringObjectPropertyType', 0),
}
subclass = None
superclass = cybox_common.ObjectPropertiesType
def __init__(self, object_reference=None, Custom_Properties=None, xsi_type=None, type_=None, Value=None):
super(DomainNameObjectType, self).__init__(object_reference, Custom_Properties, xsi_type)
self.type_ = _cast(None, type_)
self.Value = Value
def factory(*args_, **kwargs_):
if DomainNameObjectType.subclass:
return DomainNameObjectType.subclass(*args_, **kwargs_)
else:
return DomainNameObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Value(self): return self.Value
def set_Value(self, Value): self.Value = Value
def validate_StringObjectPropertyType(self, value):
# Validate type cybox_common.StringObjectPropertyType, a restriction on None.
pass
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def hasContent_(self):
if (
self.Value is not None or
super(DomainNameObjectType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespace_='DomainNameObj:', name_='DomainNameObjectType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DomainNameObjectType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='DomainNameObj:', name_='DomainNameObjectType'):
super(DomainNameObjectType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='DomainNameObjectType')
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (quote_attrib(self.type_), ))
def exportChildren(self, outfile, level, namespace_='DomainNameObj:', name_='DomainNameObjectType', fromsubclass_=False, pretty_print=True):
super(DomainNameObjectType, self).exportChildren(outfile, level, 'DomainNameObj:', name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Value is not None:
self.Value.export(outfile, level, 'DomainNameObj:', name_='Value', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
super(DomainNameObjectType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Value':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_Value(obj_)
super(DomainNameObjectType, self).buildChildren(child_, node, nodeName_, True)
# end class DomainNameObjectType
GDSClassesMapping = {
'Build_Utility': cybox_common.BuildUtilityType,
'Byte_Order': cybox_common.EndiannessType,
'Errors': cybox_common.ErrorsType,
'Time': cybox_common.TimeType,
'Certificate_Issuer': cybox_common.StringObjectPropertyType,
'Metadata': cybox_common.MetadataType,
'Hash': cybox_common.HashType,
'Information_Source_Type': cybox_common.ControlledVocabularyStringType,
'Internal_Strings': cybox_common.InternalStringsType,
'Fuzzy_Hash_Structure': cybox_common.FuzzyHashStructureType,
'SubDatum': cybox_common.MetadataType,
'Segment_Hash': cybox_common.HashValueType,
'Digital_Signature': cybox_common.DigitalSignatureInfoType,
'Code_Snippets': cybox_common.CodeSnippetsType,
'Value': cybox_common.StringObjectPropertyType,
'Length': cybox_common.IntegerObjectPropertyType,
'Produced_Time': cybox_common.DateTimeWithPrecisionType,
'Reference': cybox_common.ToolReferenceType,
'Encoding': cybox_common.ControlledVocabularyStringType,
'Internationalization_Settings': cybox_common.InternationalizationSettingsType,
'Tool_Configuration': cybox_common.ToolConfigurationType,
'English_Translation': cybox_common.StringObjectPropertyType,
'Start_Date': cybox_common.DateWithPrecisionType,
'Functions': cybox_common.FunctionsType,
'String_Value': cybox_common.StringObjectPropertyType,
'Build_Utility_Platform_Specification': cybox_common.PlatformSpecificationType,
'Compiler_Informal_Description': cybox_common.CompilerInformalDescriptionType,
'Start_Time': cybox_common.DateTimeWithPrecisionType,
'System': cybox_common.ObjectPropertiesType,
'Platform': cybox_common.PlatformSpecificationType,
'Usage_Context_Assumptions': cybox_common.UsageContextAssumptionsType,
'Type': cybox_common.ControlledVocabularyStringType,
'Compilers': cybox_common.CompilersType,
'Tool_Type': cybox_common.ControlledVocabularyStringType,
'String': cybox_common.ExtractedStringType,
'Custom_Properties': cybox_common.CustomPropertiesType,
'Build_Information': cybox_common.BuildInformationType,
'Tool_Hashes': cybox_common.HashListType,
'Observable_Location': cybox_common.LocationType,
'Error_Instances': cybox_common.ErrorInstancesType,
'End_Date': cybox_common.DateWithPrecisionType,
'Data_Segment': cybox_common.StringObjectPropertyType,
'Certificate_Subject': cybox_common.StringObjectPropertyType,
'Language': cybox_common.StringObjectPropertyType,
'Compensation_Model': cybox_common.CompensationModelType,
'Property': cybox_common.PropertyType,
'Strings': cybox_common.ExtractedStringsType,
'Contributors': cybox_common.PersonnelType,
'Code_Snippet': cybox_common.ObjectPropertiesType,
'Configuration_Settings': cybox_common.ConfigurationSettingsType,
'Simple_Hash_Value': cybox_common.SimpleHashValueType,
'Byte_String_Value': cybox_common.HexBinaryObjectPropertyType,
'Received_Time': cybox_common.DateTimeWithPrecisionType,
'Instance': cybox_common.ObjectPropertiesType,
'Import': cybox_common.StringObjectPropertyType,
'Identifier': cybox_common.PlatformIdentifierType,
'Tool_Specific_Data': cybox_common.ToolSpecificDataType,
'Execution_Environment': cybox_common.ExecutionEnvironmentType,
'Search_Distance': cybox_common.IntegerObjectPropertyType,
'Dependencies': cybox_common.DependenciesType,
'Offset': cybox_common.IntegerObjectPropertyType,
'Date': cybox_common.DateRangeType,
'Hashes': cybox_common.HashListType,
'Segments': cybox_common.HashSegmentsType,
'Segment_Count': cybox_common.IntegerObjectPropertyType,
'Usage_Context_Assumption': cybox_common.StructuredTextType,
'Block_Hash': cybox_common.FuzzyHashBlockType,
'Dependency': cybox_common.DependencyType,
'Error': cybox_common.ErrorType,
'Trigger_Point': cybox_common.HexBinaryObjectPropertyType,
'Environment_Variable': cybox_common.EnvironmentVariableType,
'Byte_Run': cybox_common.ByteRunType,
'File_System_Offset': cybox_common.IntegerObjectPropertyType,
'Image_Offset': cybox_common.IntegerObjectPropertyType,
'Imports': cybox_common.ImportsType,
'Library': cybox_common.LibraryType,
'References': cybox_common.ToolReferencesType,
'Compilation_Date': cybox_common.DateTimeWithPrecisionType,
'Block_Hash_Value': cybox_common.HashValueType,
'Configuration_Setting': cybox_common.ConfigurationSettingType,
'Observation_Location': cybox_common.LocationType,
'Libraries': cybox_common.LibrariesType,
'Function': cybox_common.StringObjectPropertyType,
'Description': cybox_common.StructuredTextType,
'User_Account_Info': cybox_common.ObjectPropertiesType,
'Build_Configuration': cybox_common.BuildConfigurationType,
'Address': cybox_common.HexBinaryObjectPropertyType,
'Search_Within': cybox_common.IntegerObjectPropertyType,
'Segment': cybox_common.HashSegmentType,
'Compiler': cybox_common.CompilerType,
'Name': cybox_common.StringObjectPropertyType,
'Signature_Description': cybox_common.StringObjectPropertyType,
'Block_Size': cybox_common.IntegerObjectPropertyType,
'Compiler_Platform_Specification': cybox_common.PlatformSpecificationType,
'Fuzzy_Hash_Value': cybox_common.FuzzyHashValueType,
'Data_Size': cybox_common.DataSizeType,
'Dependency_Description': cybox_common.StructuredTextType,
'End_Time': cybox_common.DateTimeWithPrecisionType,
'Contributor': cybox_common.ContributorType,
'Tools': cybox_common.ToolsInformationType,
'Tool': cybox_common.ToolInformationType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Domain_Name'
rootClass = DomainNameObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Domain_Name'
rootClass = DomainNameObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
rootElement = rootObj.to_etree(None, name_=rootTag)
content = etree_.tostring(rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Domain_Name'
rootClass = DomainNameObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="Domain_Name",
namespacedef_='')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"DomainNameObjectType"
] | [
"[email protected]"
]
| |
ca405a4f780ebbfd858f71371a55f11a09f0d47c | 0022918e83e63026846e7bd508098e21752bb924 | /test.py | 40b8c2ecc438ca89d099be2fa88201c521df6ab9 | []
| no_license | sebbacon/formexperiment | 5089c144d9159e889283d15ae200827507bf3e63 | 98450091ff72e4f08748076bc905fdd0f97c04ad | refs/heads/master | 2020-06-05T07:13:21.661566 | 2011-01-26T18:55:25 | 2011-01-26T18:55:25 | 1,282,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | import doctest
from test_with_peppercorn import *
import unittest
doctest.testfile("test_simple_data.txt")
unittest.main()
| [
"[email protected]"
]
| |
86c4579e69639f21cd77bf45cfc84b101d9ccfff | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /xlsxwriter/test/drawing/test_drawing_chart01.py | 5c7893a9c2c67067496a261dfe31cb992bb3ae86 | [
"BSD-2-Clause"
]
| permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,125 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
import unittest
from io import StringIO
from ..helperfunctions import _xml_to_list
from ...drawing import Drawing
class TestAssembleDrawing(unittest.TestCase):
"""
Test assembling a complete Drawing file.
"""
def test_assemble_xml_file(self):
"""Test writing a drawing with no cell data."""
self.maxDiff = None
fh = StringIO()
drawing = Drawing()
drawing._set_filehandle(fh)
dimensions = [4, 8, 457200, 104775, 12, 22, 152400, 180975, 0, 0]
drawing_object = drawing._add_drawing_object()
drawing_object['type'] = 1
drawing_object['dimensions'] = dimensions
drawing_object['width'] = 0
drawing_object['height'] = 0
drawing_object['description'] = None
drawing_object['shape'] = None
drawing_object['anchor'] = 1
drawing_object['rel_index'] = 1
drawing_object['url_rel_index'] = 0
drawing_object['tip'] = None
drawing.embedded = 1
drawing._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<xdr:wsDr xmlns:xdr="http://schemas.openxmlformats.org/drawingml/2006/spreadsheetDrawing" xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main">
<xdr:twoCellAnchor>
<xdr:from>
<xdr:col>4</xdr:col>
<xdr:colOff>457200</xdr:colOff>
<xdr:row>8</xdr:row>
<xdr:rowOff>104775</xdr:rowOff>
</xdr:from>
<xdr:to>
<xdr:col>12</xdr:col>
<xdr:colOff>152400</xdr:colOff>
<xdr:row>22</xdr:row>
<xdr:rowOff>180975</xdr:rowOff>
</xdr:to>
<xdr:graphicFrame macro="">
<xdr:nvGraphicFramePr>
<xdr:cNvPr id="2" name="Chart 1"/>
<xdr:cNvGraphicFramePr/>
</xdr:nvGraphicFramePr>
<xdr:xfrm>
<a:off x="0" y="0"/>
<a:ext cx="0" cy="0"/>
</xdr:xfrm>
<a:graphic>
<a:graphicData uri="http://schemas.openxmlformats.org/drawingml/2006/chart">
<c:chart xmlns:c="http://schemas.openxmlformats.org/drawingml/2006/chart" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" r:id="rId1"/>
</a:graphicData>
</a:graphic>
</xdr:graphicFrame>
<xdr:clientData/>
</xdr:twoCellAnchor>
</xdr:wsDr>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"[email protected]"
]
| |
7cd58dcae29db07ef376b8e7374e440ee9d0f5cf | a5597d74049fcbe1e1e3afca1f4196243f2e7c90 | /glyce/utils/crazy_finetune.py | 1e496882105744af27eb0a6cb408eb0daa8357e7 | [
"Apache-2.0"
]
| permissive | YuChen17Heaven/glyce | 72759d8699bbe37ecd2221e90b8ec06a8844fd29 | 62369e3cc37442ed191862b77d87d0c17c8454f8 | refs/heads/master | 2020-06-14T01:52:41.111642 | 2019-06-30T10:52:10 | 2019-06-30T10:52:10 | 194,857,610 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,840 | py | # encoding: utf-8
"""
@author: wuwei
@contact: [email protected]
@version: 1.0
@license: Apache Licence
@file: crazy_finetune.py
@time: 19-1-2 下午9:50
写for循环疯狂调参
python main.py --highway --nfeat 128 --use_wubi --gpu_id 3
"""
import os
import sys
root_path = "/".join(os.path.realpath(__file__).split("/")[:-3])
if root_path not in sys.path:
sys.path.insert(0, root_path)
import logging
from itertools import product
root_path = "/".join(os.path.realpath(__file__).split("/")[:-3])
print(root_path)
if root_path not in sys.path:
sys.path.insert(0, root_path)
# font_name = '/data/nfsdata/nlp/fonts/useful'
font_name = os.path.join(root_path, "fonts")
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('run.log')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
# list里的第一个元素是默认设置
finetune_options = {
'word_embsize': [2048],
'num_fonts_concat': [0],
'output_size': [2048],
'gpu_id': [2],
}
def construct_command(setting):
command = 'python -m glyph_embedding.experiments.run_lm'
for feature, option in setting.items():
if option is True:
command += F' --{feature}'
elif option is False:
command += ''
else:
command += F' --{feature} {option}'
return command
def traverse():
"""以默认配置为基准,每次只调一个参数,m个参数,每个参数n个选项,总共运行m*(n-1)次"""
default_setting = {k: v[0] for k, v in finetune_options.items()}
for feature in finetune_options:
for i, option in enumerate(finetune_options[feature]):
if i and default_setting[feature] != option: # 默认设置
setting = default_setting
setting[feature] = option
command = construct_command(setting)
logger.info(command)
try:
message = os.popen(command).read()
except:
message = '进程启动失败!!'
logger.info(message)
def grid_search():
"""以grid search的方式调参"""
for vs in product(*finetune_options.values()):
setting = {}
for k, v in zip(finetune_options.keys(), vs):
setting[k] = v
command = construct_command(setting)
logger.info(command)
try:
message = os.popen(command).read()
except:
message = '进程启动失败!!'
logger.info(message)
if __name__ == '__main__':
grid_search()
| [
"[email protected]"
]
| |
6777ff2e763c0748a5200c9729d79c3fecf1cc50 | 503d2f8f5f5f547acb82f7299d86886691966ca5 | /atcoder/hhkb2020_b.py | c8dd560750da5c973e42132f7c0e4108860b8814 | []
| no_license | Hironobu-Kawaguchi/atcoder | 3fcb649cb920dd837a1ced6713bbb939ecc090a9 | df4b55cc7d557bf61607ffde8bda8655cf129017 | refs/heads/master | 2023-08-21T14:13:13.856604 | 2023-08-12T14:53:03 | 2023-08-12T14:53:03 | 197,216,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 972 | py | # https://atcoder.jp/contests/hhkb2020/tasks/hhkb2020_b
# import sys
# # def input(): return sys.stdin.readline().rstrip()
# # input = sys.stdin.readline
# input = sys.stdin.buffer.readline
# from numba import njit
# from functools import lru_cache
# sys.setrecursionlimit(10 ** 7)
# @njit('(i8,i8[::1],i4[::1])', cache=True)
# def main():
# @lru_cache(None)
# def dfs():
# return
# return
# main()
H, W = map(int, input().split())
S = ['' for _ in range(H)]
for i in range(H):
S[i] = input()
ans = 0
for i in range(H-1):
for j in range(W):
if S[i][j] == '.' and S[i+1][j] == '.':
ans += 1
for i in range(H):
for j in range(W-1):
if S[i][j] == '.' and S[i][j+1] == '.':
ans += 1
print(ans)
# S = input()
# n = int(input())
# N, K = map(int, input().split())
# l = list(map(int, (input().split())))
# A = [[int(i) for i in input().split()] for _ in range(N)]
| [
"[email protected]"
]
| |
c17f88aad274adc6efb8f07f2e1f91def04c6283 | 28a462a28f443c285ca5efec181ebe36b147c167 | /tests/compile/basic/es2019/Array.prototype.toString.spec | 85d911ac433e5e0bb58bf5affb55278c27760195 | [
"BSD-3-Clause",
"BSD-2-Clause"
]
| permissive | kaist-plrg/jstar | 63e71f9156860dc21cccc33a9f6c638dfee448ea | 1282919127ea18a7e40c7a55e63a1ddaaf7d9db4 | refs/heads/main | 2022-07-22T08:12:34.947712 | 2022-02-27T04:19:33 | 2022-02-27T11:06:14 | 384,045,526 | 6 | 4 | NOASSERTION | 2022-02-27T11:05:26 | 2021-07-08T07:53:21 | Python | UTF-8 | Python | false | false | 256 | spec | 1. Let _array_ be ? ToObject(*this* value).
1. Let _func_ be ? Get(_array_, `"join"`).
1. If IsCallable(_func_) is *false*, set _func_ to the intrinsic function %ObjProto_toString%.
1. Return ? Call(_func_, _array_). | [
"[email protected]"
]
| |
b0f03d06d9223ce7f593796a991af26bc1c4bfd1 | 01a45aa09bd266e25dae4d2ba9fceddea2441844 | /todo_back/todos/serializer.py | e89a5ee7e7d956df2ee97ace0e468bd6dc0a0c8b | []
| no_license | gusk94/Vue-Django | 1959e75ffee39f3839fc9bafaf79eead724023fa | 82213a96e8d5bc684beb7cf3fcf212bbfcaf8019 | refs/heads/master | 2023-01-10T15:20:08.635383 | 2021-01-06T15:12:59 | 2021-01-06T15:12:59 | 222,366,577 | 0 | 0 | null | 2023-01-05T01:06:52 | 2019-11-18T04:55:18 | Python | UTF-8 | Python | false | false | 467 | py | from rest_framework import serializers
from django.contrib.auth import get_user_model
from .models import Todo
User = get_user_model()
class TodoSerializer(serializers.ModelSerializer):
class Meta:
model = Todo
fields = ('id', 'user', 'title', 'completed', )
class UserDetailSerializer(serializers.ModelSerializer):
todo_set = TodoSerializer(many=True)
class Meta:
model = User
fields = ('id', 'username', 'todo_set', )
| [
"[email protected]"
]
| |
823f39203dec17fdc778ad33dcc6296c31fcf5a4 | 86cd22354f2431087c9b3ff06188f071afb3eb72 | /113. Path Sum II.py | 702d8007e664f27151d6db9cd322c6f685000c06 | []
| no_license | tlxxzj/leetcode | 0c072a74d7e61ef4700388122f2270e46c4ac22e | 06dbf4f5b505a6a41e0d93367eedd231b611a84b | refs/heads/master | 2023-08-31T11:04:34.585532 | 2023-08-31T08:25:51 | 2023-08-31T08:25:51 | 94,386,828 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def pathSum(self, root: TreeNode, targetSum: int) -> List[List[int]]:
ret = []
q = []
if root:
q = [[root.val, root, [root.val]]]
while len(q) > 0:
q2 = []
for sum, node, path in q:
if (sum == targetSum) and (not node.left) and (not node.right):
ret.append(path)
else:
if node.left:
q2.append([sum+node.left.val, node.left, path[:] + [node.left.val]])
if node.right:
path.append(node.right.val)
q2.append([sum+node.right.val, node.right, path])
q = q2
return ret | [
"[email protected]"
]
| |
f8f7ce5994c6a5c8be5690040c6ae3e271794bd7 | 20cda6f6b14d9b91e64d43b8261f7832572be85f | /pyschema/f143_structure/ArrayULong.py | 4954b9b5940f5c8444c0ab0ad08e5b807c97ded1 | []
| no_license | ess-dmsc/lauschangriff | f9f2bacb7a5483423919fbfc8948e8a56a070800 | 3735c5f84798efc280e0931bc48129339658f400 | refs/heads/master | 2021-08-19T13:22:41.702602 | 2020-04-21T15:13:18 | 2020-04-21T15:13:18 | 168,178,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: f143_structure
import flatbuffers
class ArrayULong(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsArrayULong(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = ArrayULong()
x.Init(buf, n + offset)
return x
# ArrayULong
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# ArrayULong
def Value(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# ArrayULong
def ValueAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o)
return 0
# ArrayULong
def ValueLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.VectorLen(o)
return 0
def ArrayULongStart(builder): builder.StartObject(1)
def ArrayULongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0)
def ArrayULongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def ArrayULongEnd(builder): return builder.EndObject()
| [
"[email protected]"
]
| |
d984996776b3ea153d203518e3b9b95d6a4ce351 | fc2fa418295e015f867b26b6ab91133f26eff0bb | /ExampleCode/gathering.py | 5d31cc8d1e1f5136712e91fdadabfb3a873d7c1e | [
"LicenseRef-scancode-warranty-disclaimer"
]
| no_license | land-boards/PiCluster | 893b0809d5ceeaba2425cd3cfd79598911a65989 | e7a508ab1be25e50b79c585ea861118e37ba9bb3 | refs/heads/master | 2022-04-28T13:02:10.307315 | 2022-04-20T09:55:52 | 2022-04-20T09:55:52 | 62,474,727 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | from mpi4py import MPI
import numpy as np
comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()
numDataPerRank = 10
sendbuf = [rank, size]
print('Rank: ',rank, ', sendbuf: ',sendbuf)
recvbuf = None
if rank == 0:
recvbuf = []
comm.Gather(sendbuf, recvbuf, root=0)
if rank == 0:
print('Rank: ',rank, ', recvbuf received: ',recvbuf)
| [
"[email protected]"
]
| |
0d4b5ab246bcd2e91a31ac44a798d0bed067d702 | bc233c24523f05708dd1e091dca817f9095e6bb5 | /bitmovin_api_sdk/models/ad_analytics_contains_filter.py | 354ab8dfcae40bdd9aa2cf0bd8e53fb1c6d34042 | [
"MIT"
]
| permissive | bitmovin/bitmovin-api-sdk-python | e3d6cf8eb8bdad62cb83ec77c0fc4950b06b9cdd | b0860c0b1be7747cf22ad060985504da625255eb | refs/heads/main | 2023-09-01T15:41:03.628720 | 2023-08-30T10:52:13 | 2023-08-30T10:52:13 | 175,209,828 | 13 | 14 | MIT | 2021-04-29T12:30:31 | 2019-03-12T12:47:18 | Python | UTF-8 | Python | false | false | 3,799 | py | # coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
from bitmovin_api_sdk.models.ad_analytics_abstract_filter import AdAnalyticsAbstractFilter
from bitmovin_api_sdk.models.ad_analytics_attribute import AdAnalyticsAttribute
import pprint
import six
class AdAnalyticsContainsFilter(AdAnalyticsAbstractFilter):
@poscheck_model
def __init__(self,
name=None,
value=None):
# type: (AdAnalyticsAttribute, object) -> None
super(AdAnalyticsContainsFilter, self).__init__(name=name)
self._value = None
self.discriminator = None
if value is not None:
self.value = value
@property
def openapi_types(self):
types = {}
if hasattr(super(AdAnalyticsContainsFilter, self), 'openapi_types'):
types = getattr(super(AdAnalyticsContainsFilter, self), 'openapi_types')
types.update({
'value': 'object'
})
return types
@property
def attribute_map(self):
attributes = {}
if hasattr(super(AdAnalyticsContainsFilter, self), 'attribute_map'):
attributes = getattr(super(AdAnalyticsContainsFilter, self), 'attribute_map')
attributes.update({
'value': 'value'
})
return attributes
@property
def value(self):
# type: () -> object
"""Gets the value of this AdAnalyticsContainsFilter.
:return: The value of this AdAnalyticsContainsFilter.
:rtype: object
"""
return self._value
@value.setter
def value(self, value):
# type: (object) -> None
"""Sets the value of this AdAnalyticsContainsFilter.
:param value: The value of this AdAnalyticsContainsFilter.
:type: object
"""
if value is not None:
if not isinstance(value, object):
raise TypeError("Invalid type for `value`, type has to be `object`")
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
if hasattr(super(AdAnalyticsContainsFilter, self), "to_dict"):
result = super(AdAnalyticsContainsFilter, self).to_dict()
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if value is None:
continue
if isinstance(value, list):
if len(value) == 0:
continue
result[self.attribute_map.get(attr)] = [y.value if isinstance(y, Enum) else y for y in [x.to_dict() if hasattr(x, "to_dict") else x for x in value]]
elif hasattr(value, "to_dict"):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, "to_dict") else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AdAnalyticsContainsFilter):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
ed6cc8e324e0e9d722dcff91f07aa8abc8df16f3 | a606893da1e354c7c617d0c9247b23118be2813a | /模拟考试/7_20/t8.py | 178465ca3405c74f23f6334c46828faea35f0af3 | []
| no_license | lindo-zy/leetcode | 4ce6cb9ded7eeea0a6953b6d8152b5a9657965da | f4277c11e620ddd748c2a2f3d9f5f05ee58e5716 | refs/heads/master | 2023-07-22T06:19:00.589026 | 2023-07-16T12:35:14 | 2023-07-16T12:35:14 | 229,958,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | #!/usr/bin/python3
# -*- coding:utf-8 -*-
from itertools import product
from typing import List
class Solution:
def letterCombinations(self, digits: str) -> List[str]:
ds = {2: 'abc', 3: 'def', 4: "ghi",
5: 'jkl', 6: 'mno', 7: 'pqrs',
8: 'tuv', 9: 'wxyz'}
words = [ds[int(i)] for i in digits]
return [''.join(i) for i in list(product(*words)) if i]
if __name__ == '__main__':
s = Solution()
digits = "234"
print(s.letterCombinations(digits))
| [
"[email protected]"
]
| |
58031043f16f68a8aeec4dd903bd58ef62d5e307 | 42c63d5f9c724c99ba93f77bdead51891fcf8623 | /OpenStack-Mitaka-src/designate/designate/mdns/notify.py | 8f3de6c53a97c1fedd4eb1bf6bb8a1af254f72a6 | [
"Apache-2.0"
]
| permissive | liyongle/openstack-mitaka | 115ae819d42ed9bf0922a8c0ab584fa99a3daf92 | 5ccd31c6c3b9aa68b9db1bdafcf1b029e8e37b33 | refs/heads/master | 2021-07-13T04:57:53.488114 | 2019-03-07T13:26:25 | 2019-03-07T13:26:25 | 174,311,782 | 0 | 1 | null | 2020-07-24T01:44:47 | 2019-03-07T09:18:55 | Python | UTF-8 | Python | false | false | 13,514 | py | # Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import socket
import eventlet
import dns
import dns.rdataclass
import dns.rdatatype
import dns.exception
import dns.flags
import dns.rcode
import dns.message
import dns.opcode
from oslo_config import cfg
from oslo_log import log as logging
from designate.mdns import base
from designate.i18n import _LI
from designate.i18n import _LW
dns_query = eventlet.import_patched('dns.query')
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class NotifyEndpoint(base.BaseEndpoint):
RPC_API_VERSION = '2.0'
RPC_API_NAMESPACE = 'notify'
def notify_zone_changed(self, context, zone, host, port, timeout,
retry_interval, max_retries, delay):
"""
:param context: The user context.
:param zone: The designate zone object. This contains the zone
name.
:param host: A notify is sent to this host.
:param port: A notify is sent to this port.
:param timeout: The time (in seconds) to wait for a NOTIFY response
from server.
:param retry_interval: The time (in seconds) between retries.
:param max_retries: The maximum number of retries mindns would do for
sending a NOTIFY message. After this many retries, mindns gives up.
:param delay: The time to wait before sending the first NOTIFY request.
:return: a tuple of (response, current_retry) where
response is the response on success or None on failure.
current_retry is the current retry number.
The return value is just used for testing and not by pool manager.
"""
time.sleep(delay)
return self._make_and_send_dns_message(
zone, host, port, timeout, retry_interval, max_retries,
notify=True)
def poll_for_serial_number(self, context, zone, nameserver, timeout,
retry_interval, max_retries, delay):
"""Get the serial number of a zone on a resolver, then call update_status
on Pool Manager to update the zone status.
:param context: The user context.
:param zone: The designate zone object. This contains the zone
name. zone.serial = expected_serial
:param nameserver: Destination for the poll
:param timeout: The time (in seconds) to wait for a SOA response from
nameserver.
:param retry_interval: The time (in seconds) between retries.
:param max_retries: The maximum number of retries mindns would do for
an expected serial number. After this many retries, mindns returns
an ERROR.
:param delay: The time to wait before sending the first request.
:return: None
"""
(status, actual_serial, retries) = self.get_serial_number(
context, zone, nameserver.host, nameserver.port, timeout,
retry_interval, max_retries, delay)
self.pool_manager_api.update_status(
context, zone, nameserver, status, actual_serial)
def get_serial_number(self, context, zone, host, port, timeout,
retry_interval, max_retries, delay):
"""
:param context: The user context.
:param zone: The designate zone object. This contains the zone
name. zone.serial = expected_serial
:param host: A notify is sent to this host.
:param port: A notify is sent to this port.
:param timeout: The time (in seconds) to wait for a SOA response from
nameserver.
:param retry_interval: The time (in seconds) between retries.
:param max_retries: The maximum number of retries mindns would do for
an expected serial number. After this many retries, mindns returns
an ERROR.
:param delay: The time to wait before sending the first request.
:return: a tuple of (status, actual_serial, retries)
status is either "SUCCESS" or "ERROR".
actual_serial is either the serial number returned in the SOA
message from the nameserver or None.
retries is the number of retries left.
The return value is just used for testing and not by pool manager.
The pool manager is informed of the status with update_status.
"""
actual_serial = None
status = 'ERROR'
retries_left = max_retries
time.sleep(delay)
while True:
response, retry_cnt = self._make_and_send_dns_message(
zone, host, port, timeout, retry_interval, retries_left)
if response and (response.rcode() in (
dns.rcode.NXDOMAIN, dns.rcode.REFUSED, dns.rcode.SERVFAIL)
or not bool(response.answer)):
status = 'NO_ZONE'
if zone.serial == 0 and zone.action in ('DELETE', 'NONE'):
actual_serial = 0
break # Zone not expected to exist
elif response and len(response.answer) == 1 \
and str(response.answer[0].name) == str(zone.name) \
and response.answer[0].rdclass == dns.rdataclass.IN \
and response.answer[0].rdtype == dns.rdatatype.SOA:
# parse the SOA response and get the serial number
rrset = response.answer[0]
actual_serial = rrset.to_rdataset().items[0].serial
# TODO(vinod): Account for serial number wrap around. Unix
# timestamps are used where Designate is primary, but secondary
# zones use different values.
if actual_serial is not None and actual_serial >= zone.serial:
# Everything looks good at this point. Return SUCCESS.
status = 'SUCCESS'
break
retries_left -= retry_cnt
msg = _LW("Got lower serial for '%(zone)s' to '%(host)s:"
"%(port)s'. Expected:'%(es)d'. Got:'%(as)s'."
"Retries left='%(retries)d'") % {
'zone': zone.name, 'host': host, 'port': port,
'es': zone.serial, 'as': actual_serial,
'retries': retries_left}
if not retries_left:
# return with error
LOG.warning(msg)
break
LOG.debug(msg)
# retry again
time.sleep(retry_interval)
# Return retries_left for testing purposes.
return status, actual_serial, retries_left
def _make_and_send_dns_message(self, zone, host, port, timeout,
retry_interval, max_retries, notify=False):
"""
:param zone: The designate zone object. This contains the zone
name.
:param host: The destination host for the dns message.
:param port: The destination port for the dns message.
:param timeout: The time (in seconds) to wait for a response from
destination.
:param retry_interval: The time (in seconds) between retries.
:param max_retries: The maximum number of retries mindns would do for
a response. After this many retries, the function returns.
:param notify: If true, a notify message is constructed else a SOA
message is constructed.
:return: a tuple of (response, current_retry) where
response is the response on success or None on failure.
current_retry is the current retry number
"""
dns_message = self._make_dns_message(zone.name, notify=notify)
retry = 0
response = None
while retry < max_retries:
retry += 1
LOG.info(_LI("Sending '%(msg)s' for '%(zone)s' to '%(server)s:"
"%(port)d'."),
{'msg': 'NOTIFY' if notify else 'SOA',
'zone': zone.name, 'server': host,
'port': port})
try:
response = self._send_dns_message(dns_message, host, port,
timeout)
except socket.error as e:
if e.errno != socket.errno.EAGAIN:
raise # unknown error, let it traceback
# Initial workaround for bug #1558096
LOG.info(
_LW("Got EAGAIN while trying to send '%(msg)s' for "
"'%(zone)s' to '%(server)s:%(port)d'. Timeout="
"'%(timeout)d' seconds. Retry='%(retry)d'") %
{'msg': 'NOTIFY' if notify else 'SOA',
'zone': zone.name, 'server': host,
'port': port, 'timeout': timeout,
'retry': retry})
# retry sending the message
time.sleep(retry_interval)
continue
except dns.exception.Timeout:
LOG.warning(
_LW("Got Timeout while trying to send '%(msg)s' for "
"'%(zone)s' to '%(server)s:%(port)d'. Timeout="
"'%(timeout)d' seconds. Retry='%(retry)d'") %
{'msg': 'NOTIFY' if notify else 'SOA',
'zone': zone.name, 'server': host,
'port': port, 'timeout': timeout,
'retry': retry})
# retry sending the message if we get a Timeout.
time.sleep(retry_interval)
continue
except dns_query.BadResponse:
LOG.warning(
_LW("Got BadResponse while trying to send '%(msg)s' "
"for '%(zone)s' to '%(server)s:%(port)d'. Timeout"
"='%(timeout)d' seconds. Retry='%(retry)d'") %
{'msg': 'NOTIFY' if notify else 'SOA',
'zone': zone.name, 'server': host,
'port': port, 'timeout': timeout,
'retry': retry})
break # no retries after BadResponse
# either we have a good response or an error that we don't want to
# recover by retrying
break
# Check that we actually got a NOERROR in the rcode and and an
# authoritative answer
if response is None:
pass
elif (response.rcode() in
(dns.rcode.NXDOMAIN, dns.rcode.REFUSED,
dns.rcode.SERVFAIL)) or \
(response.rcode() == dns.rcode.NOERROR and
not bool(response.answer)):
LOG.info(_LI("%(zone)s not found on %(server)s:%(port)d") %
{'zone': zone.name, 'server': host, 'port': port})
elif not (response.flags & dns.flags.AA) or dns.rcode.from_flags(
response.flags, response.ednsflags) != dns.rcode.NOERROR:
LOG.warning(
_LW("Failed to get expected response while trying to "
"send '%(msg)s' for '%(zone)s' to '%(server)s:"
"%(port)d'.\nResponse message:\n%(resp)s\n") %
{'msg': 'NOTIFY' if notify else 'SOA',
'zone': zone.name, 'server': host,
'port': port, 'resp': str(response)})
response = None
return response, retry
def _make_dns_message(self, zone_name, notify=False):
"""
This constructs a SOA query or a dns NOTIFY message.
:param zone_name: The zone name for which a SOA/NOTIFY needs to be
sent.
:param notify: If true, a notify message is constructed else a SOA
message is constructed.
:return: The constructed message.
"""
dns_message = dns.message.make_query(zone_name, dns.rdatatype.SOA)
dns_message.flags = 0
if notify:
dns_message.set_opcode(dns.opcode.NOTIFY)
dns_message.flags |= dns.flags.AA
else:
# Setting the flags to RD causes BIND9 to respond with a NXDOMAIN.
dns_message.set_opcode(dns.opcode.QUERY)
dns_message.flags |= dns.flags.RD
return dns_message
def _send_dns_message(self, dns_message, host, port, timeout):
"""
:param dns_message: The dns message that needs to be sent.
:param host: The destination ip of dns_message.
:param port: The destination port of dns_message.
:param timeout: The timeout in seconds to wait for a response.
:return: response
"""
if not CONF['service:mdns'].all_tcp:
response = dns_query.udp(
dns_message, host, port=port, timeout=timeout)
else:
response = dns_query.tcp(
dns_message, host, port=port, timeout=timeout)
return response
| [
"[email protected]"
]
| |
185916b8c6ed4dfc3a2880a20b9cdaf3fd075629 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/8/sV-.py | dbd279c3dd10a5a0e31f79253387b280f6d56b03 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'sV-':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
88de90beb87dc98e7b6cecbef0885d4eb331071d | b01eee55884e21412a1812593996a0d9156e20bc | /cipp/parser.py | a61415f56a03d5a2ccf442dd729ad537e1406db5 | []
| no_license | JacquesLucke/cipp | 46bdb7eebaeb863f424c92542ea56b49b5f0fe2e | d4f38fd1fc84aed9cbf49b85bf6c4b96f2561f71 | refs/heads/master | 2021-10-27T18:29:23.288884 | 2019-04-18T15:36:52 | 2019-04-18T15:36:52 | 123,611,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,498 | py | from . import ast
from . lexer import Lexer
from . token_stream import TokenStream
from . tokens import (
createSingleCharToken,
IdentifierToken, IntegerToken,
CommentToken, WhitespaceToken
)
SingleCharToken = createSingleCharToken("(){}[],=+-*/@;<>!")
cippLexer = Lexer(
[IdentifierToken, IntegerToken, CommentToken,
SingleCharToken, WhitespaceToken],
ignoredTokens = [WhitespaceToken, CommentToken]
)
def parse(string):
tokens = stringToTokenStream(string)
return parseProgram(tokens)
def stringToTokenStream(string):
return TokenStream(cippLexer.tokenize(string))
def parseProgram(tokens):
functions = []
while nextIsKeyword(tokens, "def"):
function = parseFunction(tokens)
functions.append(function)
return ast.Program(functions)
def parseFunction(tokens):
acceptKeyword(tokens, "def")
retType = parseType(tokens)
acceptLetter(tokens, "@")
name = acceptIdentifier(tokens)
arguments = parseArguments(tokens)
statement = parseStatement(tokens)
return ast.Function(name, retType, arguments, statement)
def parseArguments(tokens):
return parseList(tokens, parseArgument, "(", ")", ",")
def parseArgument(tokens):
dataType = parseType(tokens)
name = acceptIdentifier(tokens)
return ast.Argument(name, dataType)
def parseType(tokens):
dataType = acceptIdentifier(tokens)
return ast.Type(dataType)
def parseStatement(tokens):
if nextIsLetter(tokens, "{"):
return parseStatement_Block(tokens)
elif nextIsKeyword(tokens, "return"):
return parseStatement_Return(tokens)
elif nextIsKeyword(tokens, "let"):
return parseStatement_Let(tokens)
elif nextIsKeyword(tokens, "while"):
return parseStatement_While(tokens)
elif nextIsKeyword(tokens, "if"):
return parseStatement_If(tokens)
elif nextIsIdentifier(tokens):
return parseStatement_Assignment(tokens)
else:
raise Exception("unknown statement type")
def parseStatement_Block(tokens, a = 0):
statements = parseList(tokens, parseStatement, "{", "}")
if len(statements) == 1:
return statements[0]
else:
return ast.BlockStmt(statements)
def parseStatement_Return(tokens):
acceptKeyword(tokens, "return")
expression = parseExpression(tokens)
acceptLetter(tokens, ";")
return ast.ReturnStmt(expression)
def parseStatement_Let(tokens):
acceptKeyword(tokens, "let")
dataType = parseType(tokens)
name = acceptIdentifier(tokens)
acceptLetter(tokens, "=")
expression = parseExpression(tokens)
acceptLetter(tokens, ";")
return ast.LetStmt(name, dataType, expression)
def parseStatement_Assignment(tokens):
targetName = acceptIdentifier(tokens)
if nextIsLetter(tokens, "["):
acceptLetter(tokens, "[")
offset = parseExpression(tokens)
acceptLetter(tokens, "]")
acceptLetter(tokens, "=")
expression = parseExpression(tokens)
acceptLetter(tokens, ";")
return ast.ArrayAssignmentStmt(targetName, offset, expression)
else:
acceptLetter(tokens, "=")
expression = parseExpression(tokens)
acceptLetter(tokens, ";")
return ast.AssignmentStmt(targetName, expression)
def parseStatement_While(tokens):
acceptKeyword(tokens, "while")
acceptLetter(tokens, "(")
condition = parseExpression(tokens)
acceptLetter(tokens, ")")
statement = parseStatement(tokens)
return ast.WhileStmt(condition, statement)
def parseStatement_If(tokens):
acceptKeyword(tokens, "if")
acceptLetter(tokens, "(")
condition = parseExpression(tokens)
acceptLetter(tokens, ")")
thenStatement = parseStatement(tokens)
if nextIsKeyword(tokens, "else"):
acceptKeyword(tokens, "else")
elseStatement = parseStatement(tokens)
return ast.IfElseStmt(condition, thenStatement, elseStatement)
else:
return ast.IfStmt(condition, thenStatement)
def parseExpression(tokens):
'''
Expression parsing happens at different levels
because of operator precedence rules.
'''
return parseExpression_ComparisonLevel(tokens)
def parseExpression_ComparisonLevel(tokens):
expressionLeft = parseExpression_AddSubLevel(tokens)
if nextIsComparisonOperator(tokens):
operator = parseComparisonOperator(tokens)
expressionRight = parseExpression_AddSubLevel(tokens)
return ast.ComparisonExpr(operator, expressionLeft, expressionRight)
else:
return expressionLeft
comparisonOperators = ("==", "<=", ">=", "!=", "<", ">")
def parseComparisonOperator(tokens):
for operator in comparisonOperators:
if nextLettersAre(tokens, operator):
acceptLetters(tokens, operator)
return operator
raise Exception("unknown comparison operator")
def parseExpression_AddSubLevel(tokens):
terms = []
term = parseExpression_MulDivLevel(tokens)
terms.append(ast.AddedTerm(term))
while nextIsOneOfLetters(tokens, "+", "-"):
if nextIsLetter(tokens, "+"):
acceptLetter(tokens, "+")
term = parseExpression_MulDivLevel(tokens)
terms.append(ast.AddedTerm(term))
elif nextIsLetter(tokens, "-"):
acceptLetter(tokens, "-")
term = parseExpression_MulDivLevel(tokens)
terms.append(ast.SubtractedTerm(term))
if len(terms) == 1 and isinstance(terms[0], ast.AddedTerm):
return terms[0].expr
else:
return ast.AddSubExpr(terms)
def parseExpression_MulDivLevel(tokens):
terms = []
factor = parseExpression_FactorLevel(tokens)
terms.append(ast.MultipliedTerm(factor))
while nextIsOneOfLetters(tokens, "*", "/"):
if nextIsLetter(tokens, "*"):
acceptLetter(tokens, "*")
factor = parseExpression_FactorLevel(tokens)
terms.append(ast.MultipliedTerm(factor))
elif nextIsLetter(tokens, "/"):
acceptLetter(tokens, "/")
factor = parseExpression_FactorLevel(tokens)
terms.append(ast.DividedTerm(factor))
if len(terms) == 1 and isinstance(terms[0], ast.MultipliedTerm):
return terms[0].expr
else:
return ast.MulDivExpr(terms)
def parseExpression_FactorLevel(tokens):
if nextIsIdentifier(tokens):
name = acceptIdentifier(tokens)
return ast.Variable(name)
elif nextIsInteger(tokens):
value = acceptInteger(tokens)
return ast.ConstInt(value)
elif nextIsLetter(tokens, "("):
acceptLetter(tokens, "(")
expression = parseExpression(tokens)
acceptLetter(tokens, ")")
return expression
elif nextIsLetter(tokens, "@"):
return parseFunctionCall(tokens)
def parseFunctionCall(tokens):
acceptLetter(tokens, "@")
name = acceptIdentifier(tokens)
arguments = parseCallArguments(tokens)
return ast.FunctionCall(name, arguments)
def parseCallArguments(tokens):
return parseList(tokens, parseExpression, "(", ")", ",")
def parseList(tokens, parseElement, start, end, separator = None):
elements = []
acceptLetter(tokens, start)
while not nextIsLetter(tokens, end):
element = parseElement(tokens)
elements.append(element)
if separator is not None:
if nextIsLetter(tokens, separator):
acceptLetter(tokens, separator)
else:
break
acceptLetter(tokens, end)
return elements
# Utility Functions
####################################################
def acceptKeyword(tokens, keyword):
if nextIsKeyword(tokens, keyword):
tokens.takeNext()
else:
raise Exception(f"expected keyword '{keyword}'")
def acceptLetters(tokens, letters):
for letter in letters:
acceptLetter(tokens, letter)
def acceptLetter(tokens, letter):
if nextIsLetter(tokens, letter):
tokens.takeNext()
else:
raise Exception(f"expected token '{letter}'")
def acceptIdentifier(tokens):
if nextIsIdentifier(tokens):
return tokens.takeNext().value
else:
raise Exception("expected identifier")
def acceptInteger(tokens):
if nextIsInteger(tokens):
return tokens.takeNext().value
else:
raise Exception("expected integer")
def nextIsKeyword(tokens, keyword):
if len(tokens) == 0: return False
nextToken = tokens.peekNext()
if isinstance(nextToken, IdentifierToken):
return nextToken.value == keyword
return False
def nextIsLetter(tokens, letter):
if len(tokens) == 0: return False
nextToken = tokens.peekNext()
if isinstance(nextToken, SingleCharToken):
return nextToken.value == letter
return False
def nextIsOneOfLetters(tokens, *letters):
return any(nextIsLetter(tokens, c) for c in letters)
def nextLettersAre(tokens, letters):
if len(tokens) < len(letters): return False
for token, letter in zip(tokens.getLookahead(len(letters)), letters):
if not isinstance(token, SingleCharToken) or token.value != letter:
return False
return True
def nextIsIdentifier(tokens):
if len(tokens) == 0: return False
return isinstance(tokens.peekNext(), IdentifierToken)
def nextIsInteger(tokens):
if len(tokens) == 0: return False
return isinstance(tokens.peekNext(), IntegerToken)
def nextIsComparisonOperator(tokens):
return any(nextLettersAre(tokens, s) for s in comparisonOperators) | [
"[email protected]"
]
| |
594cdad708a08bfdabee9afb17a6462235d503d6 | 8600ea155f279e5a8dfe5a1926038511f6b6a7ea | /base_module_quality/pep8_test/pep8_test.py | 395520a63f88c32bd97b8c0358546252a2444e06 | []
| no_license | MarkNorgate/addons-EAD | c2fff89ab16fce3ba19fbe433ee5863705a6f4e5 | 840f28642b5d328e4b86839c413e5164622295a5 | refs/heads/master | 2020-04-23T22:11:00.164438 | 2015-07-22T12:24:53 | 2015-07-22T12:24:53 | 39,501,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,797 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
from tools.translate import _
from base_module_quality import base_module_quality
class quality_test(base_module_quality.abstract_quality_check):
def __init__(self):
super(quality_test, self).__init__()
self.name = _("PEP-8 Test")
self.note = _("""
PEP-8 Test , copyright of py files check, method can not call from loops
""")
self.bool_installed_only = False
self.bad_standard = 0
self.good_standard = 0
self.result_py = {}
self.min_score = 40
def run_test(self, cr, uid, module_path):
list_files = os.listdir(module_path)
for i in list_files:
path = os.path.join(module_path, i)
if os.path.isdir(path):
for j in os.listdir(path):
list_files.append(os.path.join(i, j))
py_list = []
for file_py in list_files:
if file_py.split('.')[-1] == 'py' and not file_py.endswith('__init__.py') and not file_py.endswith('__terp__.py'):
file_path = os.path.join(module_path, file_py)
py_list.append(file_path)
open_files = map(lambda x: open(x, 'r'), py_list)
if not py_list:
self.error = True
self.result = _("No python file found")
return None
#below functions check:
#1. Imports should usually be on separate lines
#2. Imports are always put at the top of the file, just after any module comments and docstrings, and before module globals and constants
self.check_import(open_files)
#1. there should be a one space after , : ;
self.check_space(open_files)
#1. Have all the .py files a copyright?
self.check_licence(open_files)
#1. Does the module avoid unecessary queries like when we put a browse into a loop?
self.check_loop(open_files)
#1.More than one space around an assignment (or other) operator to align it with another.
# self.check_space_operator(open_files)
#1. For sequences, (strings, lists, tuples), use the fact that empty sequences are false
#for e.g : if seq: => good & if len(seq): => not good
self.check_len(open_files)
# below function checks
# 1. Don't compare boolean values to True or False using == and !=
self.check_boolean(open_files)
self.score = self.good_standard and float(self.good_standard) / float(self.good_standard + self.bad_standard)
if self.score*100 < self.min_score:
self.message = 'Score is below than minimal score(%s%%)' % self.min_score
self.result = self.get_result({ module_path: [int(self.score * 100)]})
self.result_details += self.get_result_general(self.result_py)
return None
def check_import(self, open_files):
for py in open_files:
py.seek(0)
class_or_def = False
line_counter = 0
file_name = py.name.split('/')[-1]
while True:
line_counter += 1
line = py.readline()
if not line: break
if ((line.find('class') > -1) or (line.find('def') > -1)):
class_or_def = True
import_found = line.find('import')
comment_found = line.find('#')
if comment_found == -1 and import_found != -1:
self.good_standard += 1
if (class_or_def):
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, 'Imports are always put at the top of the file, just after any module comments and docstrings, and before module globals and constants']
if (line.find('from') < 0 and line.find(',') != -1):
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, 'Imports should usually be on separate lines']
def check_licence(self, open_files):
for py in open_files:
py.seek(0)
bad_position = False
copyright_found = False
gnu_found = False
license_found = False
gnu_website_found = False
line_counter = 0
file_name = py.name.split('/')[-1]
while True:
declaration = False
flag = False
line_counter += 1
line = py.readline()
if not line: break
if ((line.find('class') > -1) or (line.find('def') > -1) or (line.find('import') > -1)):
bad_position = True
comment_found = line.find('#')
copyright_found = line.find('Copyright')
gnu_found = line.find('GNU')
license_found = line.find('License')
gnu_website_found = line.find('www.gnu.org/licenses')
if ((copyright_found > -1) or (gnu_found > -1) or (license_found > -1) or (gnu_website_found > -1)):
self.good_standard += 1
declaration = True
flag = True
break
if (comment_found > -1) and bad_position and declaration:
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, 'Declaration of copyright must be at the top of file']
break
if bad_position and (not flag):
self.bad_standard += 1
self.result_py[file_name] = [file_name, '--', 'File is not copyright']
def check_loop(self, open_files):
for py in open_files:
py.seek(0)
methods = ['browse', 'search', 'read', 'copy', 'unlink']
place_for = 1000
file_name = py.name.split('/')[-1]
line_counter = 0
counter = 0
while True:
line_counter += 1
line = py.readline()
if not line: break
place_method = 0
for i in line :
if (i == ' '):
place_method += 1
elif (i != ' '):
break
elif (place_method > 100):
break
if (line.find('for') > -1):
place_for = place_method
if (place_for < place_method):
counter += 1
for method in methods:
got = line.find(method)
if(got > -1):
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, 'puting method inside loop is not good']
self.good_standard += counter
def check_space(self, open_files):
for py in open_files:
py.seek(0)
counter_line = 0
file_name = py.name.split('/')[-1]
counter = 0
while True:
counter_line += 1
line = py.readline()
if not line: break
pos_comma = line.find(',')
pos_semicolon = line.find(';')
pos_colon = line.find(':')
space_find = -1
if (pos_comma != -1 or pos_semicolon != -1 or pos_colon != -1):
counter += 1
for i in line:
space_find += 1
if (i == ' '):
if ((space_find + 1) == pos_comma) or ((space_find + 1) == pos_semicolon) or ((space_find + 1) == pos_colon):
self.bad_standard += 1
self.result_py[file_name + str(counter_line)] = [file_name, counter_line, 'You should not have space before (: ; ,)']
self.good_standard += counter # to be check
def check_space_operator(self, open_files):
for py in open_files:
py.seek(0)
space_counter = 0
eq_found = False
operator_found = False
line_counter = 0
file_name = py.name.split('/')[-1]
while True:
line_counter += 1
line = py.readline()
if not line: break
for counter in line:
if (counter == ' '):
space_counter += 1
else:
if (space_counter > 1):
if counter in ['=', '<', '>', '!', '+', '-', '*', '/', '^', '%'] or operator_found:
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, 'More than one space around an assignment (or other) operator to align it with another']
operator_found = False
space_counter = 0
if counter in ['=', '<', '>', '!', '+', '-', '*', '/', '^', '%']:
self.good_standard += 1
operator_found = True
def check_len(self, open_files):
for py in open_files:
py.seek(0)
line_counter = 0
file_name = py.name.split('/')[-1]
while True:
line_counter += 1
line = py.readline()
if not line: break
if (line.find('if') > -1) and (line.find('len(') > -1) and (line.find(')') > -1):
self.good_standard += 1
if (line.find(':') > -1) and not line.find('<') > -1 and not line.find('>') > -1 and not line.find('=') > -1 and not line.find('!') > -1 :
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, ' For sequences, (strings, lists, tuples), use the fact that empty sequences are false']
def check_boolean(self, open_files):
for py in open_files:
py.seek(0)
line_counter = 0
file_name = py.name.split('/')[-1]
while True:
line_counter += 1
line = py.readline()
if not line: break
if (line.find('if') > -1):
self.good_standard += 1
if ((line.find('==') > -1) or (line.find('!=') > -1)) and ((line.find('True') > -1) or (line.find('False') > -1)):
self.bad_standard += 1
self.result_py[file_name + str(line_counter)] = [file_name, line_counter, "Don't compare boolean values to True or False using == or !="]
def get_result(self, dict_obj):
header = ('{| border="1" cellspacing="0" cellpadding="5" align="left" \n! %-40s \n', [_('Result of pep8_test in %')])
if not self.error:
return self.format_table(header, data_list=dict_obj)
return ""
def get_result_general(self, dict_obj):
str_html = '''<html><strong>Result</strong><head>%s</head><body><table class="tablestyle">'''%(self.get_style())
header = ('<tr><th class="tdatastyle">%s</th><th class="tdatastyle">%s</th><th class="tdatastyle">%s</th></tr>', [_('Object Name'), _('Line number'), _('Suggestion')])
if not self.error:
res = str_html + self.format_html_table(header, data_list=dict_obj) + '</table></body></html>'
res = res.replace('''<td''', '''<td class="tdatastyle" ''')
return res
return ""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
]
| |
f7c8f1c12a5b644fe553ebfdbf5c391252a507cd | 1f4852225cec8d9f954190bb36a2a5d586b3e8bd | /ITBooks/con/config.py | 9e37f40a5cefca8a3dae60173977ac91a5116492 | []
| no_license | crono/ITBooks | d8924d54e474b035a2cc54f69cf4f67a5004344a | 61648d3ab71a06f9754ebdc75e37d6d84d100605 | refs/heads/master | 2020-06-05T00:46:34.725762 | 2017-05-22T14:09:45 | 2017-05-22T14:09:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Add ITBooks to path
sys.path.append(os.path.dirname(BASE_DIR))
# DATABASE
DATABASE_DIR = os.path.join(BASE_DIR, 'database')
SQLITE_FILE = os.path.join(os.path.join(DATABASE_DIR, 'sqlite'), 'books.db')
# The value is the equal of spider's name
SEARCH_CONFIG = {'allitebooks': {'table': ''}, 'blah': {'table': ''}}
| [
"[email protected]"
]
| |
725ea7b6637e2b0187e91054b6dc387a7ab7141a | 594f60b6a536b831d0df38abea7f0ffc0a2fd3cb | /utils_xml/change_comments.py | 7cf6b3dfb4c62a9e7680dc1f63740306c8c9dee9 | []
| no_license | mh70cz/py | 1478439fe939076cca3a30be2f2d29fb4e8a3cd9 | 7fc23f2133624c787e1dd4856322d48251cc6f0e | refs/heads/master | 2022-08-12T06:08:30.720164 | 2022-08-08T23:16:19 | 2022-08-08T23:16:19 | 95,386,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,157 | py | """ konvertuje obsah elemntů xs:documentation bez xml:lang do komentářů """
from lxml import etree
f_name_in = "TradeRegisterListRequest_orig.xsd"
f_name_out = "TradeRegisterListRequest.xsd"
def main():
tree = etree.parse(f_name_in)
root = tree.getroot()
namespaces = {'xs': 'http://www.w3.org/2001/XMLSchema',}
annotations = root.findall(".//xs:annotation", namespaces)
xml_lang = '{http://www.w3.org/XML/1998/namespace}lang'
for annotation in annotations:
documentations = annotation.findall("./xs:documentation", namespaces)
for documentation in documentations:
att = documentation.attrib
if att.get(xml_lang, None) in ["cs", "en", "sk"]:
# print(documentation.text)
pass
elif att.get(xml_lang, None) is None:
txt = documentation.text
comment = etree.Comment(txt)
documentation.getparent().remove(documentation)
# print("delelted: " + str(txt))
#annotation.insert(0, comment)
annotation.append(comment)
rough_bin_string = etree.tostring(root, encoding="utf-8",
xml_declaration=True, pretty_print=True)
format_xml(rough_bin_string)
# with open(f_name_out, "wb") as wf:
# wf.write(rough_bin_string)
# #tree.write(open('output.xml', 'wb'))
#tree.write(open(f_name_out, 'wb'), encoding='utf-8', xml_declaration=True, pretty_print=True)
def format_xml(xml_bin_string):
""" přidání Comment elementu do xs:annotation nepřidá nový řádek
tato procedura doformátuje a zapíše do souboru"""
output = ""
lenght = 0
s = xml_bin_string.decode("utf-8")
s = s.replace("--><", "-->\n<")
s = s.split("\n")
for line in s:
#print(line)
if "<xs:annotation>" in line:
lenght = len(line) - 15
elif ("</xs:annotation>" in line) and (len(line) < 19):
line = str(lenght * " ") + line
output += line + "\n"
with open(f_name_out, "w", encoding="utf-8") as wf:
wf.write(output)
main()
| [
"[email protected]"
]
| |
d605004a43e9c6bfffeb41b5a4af64b8f0c32c86 | f2d7e8d536d77e786dc519fc54e13cb496663f51 | /t2t_bert/distributed_single_sentence_classification/model_interface.py | d76ef1883cd7001f4cf830209683115dcb9f99b0 | [
"Apache-2.0"
]
| permissive | CBHell/BERT | a8ecfb36e3ddf1741f1e523dbab8b5ea350c0850 | 049ba2dc1cffe8eb3dbecf13ba3aaf17f4c3293f | refs/heads/master | 2020-09-08T09:37:22.421778 | 2019-11-11T11:54:01 | 2019-11-11T11:54:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,616 | py | from distributed_encoder.bert_encoder import bert_encoder
from distributed_encoder.bert_encoder import bert_rule_encoder
from distributed_encoder.gpt_encoder import gpt_encoder
from distributed_encoder.bert_encoder import albert_encoder
from distributed_encoder.classifynet_encoder import textcnn_encoder
from distributed_encoder.classifynet_encoder import textlstm_encoder
from distributed_encoder.interaction_encoder import match_pyramid_encoder
from distributed_encoder.classifynet_encoder import dan_encoder
import tensorflow as tf
import numpy as np
import json
from bunch import Bunch
import os, sys
def model_zoo(model_config):
if model_config.get("model_type", "bert") == "bert":
print("==apply bert encoder==")
model_interface = bert_encoder
elif model_config.get("model_type", "bert") == "bert_rule":
print("==apply bert rule encoder==")
model_interface = bert_rule_encoder
elif model_config.get("model_type", "bert") in ["textcnn", "textcnn_distillation",
"textcnn_distillation_adv_adaptation"]:
print("==apply textcnn encoder==")
model_interface = textcnn_encoder
elif model_config.get("model_type", "bert_small") == "bert_small":
print("==apply bert small encoder==")
model_interface = bert_encoder
elif model_config.get("model_type", "bert") in ["textlstm", "textlstm_distillation"]:
model_interface = textlstm_encoder
elif model_config.get("model_type", "match_pyramid") in ["match_pyramid", "match_pyramid_distillation"]:
model_interface = match_pyramid_encoder
elif model_config.get("model_type", "match_pyramid") in ["dan", "dan_distillation"]:
model_interface = dan_encoder
elif model_config.get('model_type', 'gpt') in ['gpt']:
model_interface = gpt_encoder
elif model_config.get("model_type", "albert") == "albert":
model_interface = albert_encoder
return model_interface
def model_config_parser(FLAGS):
print(FLAGS.model_type)
if FLAGS.model_type in ["bert", "bert_rule", "albert"]:
config = json.load(open(FLAGS.config_file, "r"))
print(config, '==model config==')
config = Bunch(config)
config.use_one_hot_embeddings = True
config.scope = "bert"
config.dropout_prob = 0.1
config.label_type = "single_label"
config.model_type = FLAGS.model_type
config.ln_type = FLAGS.ln_type
if FLAGS.task_type in ['bert_pretrain']:
if FLAGS.load_pretrained == "yes":
config.init_lr = 2e-5
else:
config.init_lr = 1e-4
config.warmup = 0.1
print('==apply bert pretrain==', config.init_lr)
else:
if FLAGS.model_type in ['albert']:
try:
config.init_lr = FLAGS.init_lr
except:
config.init_lr = 1e-4
else:
config.init_lr = 2e-5
config.loss = "entropy"
config.rule_type_size = 2
config.lm_ratio = 1.0
config.max_length = FLAGS.max_length
config.nsp_ratio = 0.0
config.max_predictions_per_seq = FLAGS.max_predictions_per_seq
if FLAGS.task_type in ["pair_sentence_classification"]:
config.classifier = FLAGS.classifier
elif FLAGS.model_type in ["bert_small"]:
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.use_one_hot_embeddings = True
config.scope = "bert"
config.dropout_prob = 0.1
config.label_type = "single_label"
config.model_type = FLAGS.model_type
config.init_lr = 3e-5
config.num_hidden_layers = FLAGS.num_hidden_layers
config.loss = "entropy"
config.rule_type_size = 2
if FLAGS.task_type in ["pair_sentence_classification"]:
config.classifier = FLAGS.classifier
config.output_layer = FLAGS.output_layer
elif FLAGS.model_type in ["textcnn", 'textcnn_distillation',
'textcnn_distillation_adv_adaptation']:
from data_generator import load_w2v
w2v_path = os.path.join(FLAGS.buckets, FLAGS.w2v_path)
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab_file)
print(w2v_path, vocab_path)
[w2v_embed, token2id,
id2token, is_extral_symbol] = load_w2v.load_pretrained_w2v(vocab_path, w2v_path)
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.token_emb_mat = w2v_embed
config.char_emb_mat = None
config.vocab_size = w2v_embed.shape[0]
config.max_length = FLAGS.max_length
config.emb_size = w2v_embed.shape[1]
config.scope = "textcnn"
config.char_dim = w2v_embed.shape[1]
config.char_vocab_size = w2v_embed.shape[0]
config.char_embedding = None
config.model_type = FLAGS.model_type
config.dropout_prob = config.dropout_rate
config.init_lr = config.learning_rate
if is_extral_symbol == 1:
config.extra_symbol = ["<pad>", "<unk>", "<s>", "</s>"]
print("==need extra_symbol==")
if FLAGS.task_type in ["pair_sentence_classification"]:
config.classifier = FLAGS.classifier
config.output_layer = FLAGS.output_layer
elif FLAGS.model_type in ["textlstm", "textlstm_distillation"]:
from data_generator import load_w2v
w2v_path = os.path.join(FLAGS.buckets, FLAGS.w2v_path)
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab_file)
print(w2v_path, vocab_path)
[w2v_embed, token2id,
id2token, is_extral_symbol] = load_w2v.load_pretrained_w2v(vocab_path, w2v_path)
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.token_emb_mat = w2v_embed
config.char_emb_mat = None
config.vocab_size = w2v_embed.shape[0]
config.max_length = FLAGS.max_length
config.emb_size = w2v_embed.shape[1]
config.scope = "textlstm"
config.char_dim = w2v_embed.shape[1]
config.char_vocab_size = w2v_embed.shape[0]
config.char_embedding = None
config.model_type = FLAGS.model_type
config.dropout_prob = config.dropout_rate
config.init_lr = config.learning_rate
config.grad_clip = "gloabl_norm"
config.clip_norm = 5.0
if is_extral_symbol == 1:
config.extra_symbol = ["<pad>", "<unk>", "<s>", "</s>"]
print("==need extra_symbol==")
if FLAGS.task_type in ["pair_sentence_classification"]:
config.classifier = FLAGS.classifier
config.output_layer = FLAGS.output_layer
elif FLAGS.model_type in ["match_pyramid", "match_pyramid_distillation"]:
from data_generator import load_w2v
w2v_path = os.path.join(FLAGS.buckets, FLAGS.w2v_path)
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab_file)
print(w2v_path, vocab_path)
[w2v_embed, token2id,
id2token, is_extral_symbol] = load_w2v.load_pretrained_w2v(vocab_path, w2v_path)
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.token_emb_mat = w2v_embed
config.char_emb_mat = None
config.vocab_size = w2v_embed.shape[0]
config.max_length = FLAGS.max_length
config.emb_size = w2v_embed.shape[1]
config.scope = "match_pyramid"
config.char_dim = w2v_embed.shape[1]
config.char_vocab_size = w2v_embed.shape[0]
config.char_embedding = None
config.model_type = FLAGS.model_type
config.dropout_prob = config.dropout_rate
config.init_lr = config.learning_rate
config.grad_clip = "gloabl_norm"
config.clip_norm = 5.0
if is_extral_symbol == 1:
config.extra_symbol = ["<pad>", "<unk>", "<s>", "</s>"]
print("==need extra_symbol==")
config.max_seq_len = FLAGS.max_length
if FLAGS.task_type in ["interaction_pair_sentence_classification"]:
config.classifier = FLAGS.classifier
config.output_layer = FLAGS.output_layer
if config.compress_emb:
config.embedding_dim_compressed = config.cnn_num_filters
elif FLAGS.model_type in ["dan", 'dan_distillation']:
from data_generator import load_w2v
w2v_path = os.path.join(FLAGS.buckets, FLAGS.w2v_path)
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab_file)
print(w2v_path, vocab_path)
[w2v_embed, token2id,
id2token, is_extral_symbol] = load_w2v.load_pretrained_w2v(vocab_path, w2v_path)
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.token_emb_mat = w2v_embed
config.char_emb_mat = None
config.vocab_size = w2v_embed.shape[0]
config.max_length = FLAGS.max_length
config.emb_size = w2v_embed.shape[1]
config.scope = "dan"
config.char_dim = w2v_embed.shape[1]
config.char_vocab_size = w2v_embed.shape[0]
config.char_embedding = None
config.model_type = FLAGS.model_type
config.dropout_prob = config.dropout_rate
config.init_lr = config.learning_rate
if is_extral_symbol == 1:
config.extra_symbol = ["<pad>", "<unk>", "<s>", "</s>"]
print("==need extra_symbol==")
if FLAGS.task_type in ["pair_sentence_classification"]:
config.classifier = FLAGS.classifier
config.output_layer = FLAGS.output_layer
elif FLAGS.model_type in ['gpt']:
config = json.load(open(FLAGS.config_file, "r"))
config = Bunch(config)
config.dropout_prob = 0.1
config.init_lr = 1e-4
return config | [
"[email protected]"
]
| |
f802fd58fcee700b831bdb8136bc7f82023758d1 | a9b322a0d941825df73a71ad3de605978c9e778d | /virtual/bin/mailmail | 87be1a4b9000543ec5890a1ddb7f1d4876036be7 | []
| no_license | Elianehbmna/chaty-final | 172562d9d7399dc9230cc434d3c29be66a70f094 | 0b459168414da09566ea5b079a922dc1fa8694d0 | refs/heads/master | 2022-12-15T15:03:31.584467 | 2019-12-05T15:29:26 | 2019-12-05T15:29:26 | 223,245,043 | 0 | 0 | null | 2022-12-08T06:55:21 | 2019-11-21T19:07:55 | Python | UTF-8 | Python | false | false | 256 | #!/home/wecode/Documents/chat/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from twisted.mail.scripts.mailmail import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
| [
"[email protected]"
]
| ||
a818f026d27f1f4910ccaef03767ee79b55b13be | b4afd14e3b4e9cff0a99906a69587e348b243aeb | /website/《简明教程》/数据结构/ds_reference.py | ad8d28d89f29f5b7de173ab94ebdd97be12402cf | []
| no_license | zhankq/pythonlearn | d694df23826cda6ba662e852e531e96a10ab2092 | cb714fbb8257193029f958e73e0f9bd6a68d77f1 | refs/heads/master | 2021-12-16T13:51:23.381206 | 2021-12-03T01:13:36 | 2021-12-03T01:13:36 | 205,632,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | print('Simple Assignment')
shoplist = ['apple', 'mango', 'carrot', 'banana']
# mylist 只是指向同一对象的另一种名称
mylist = shoplist
# 我购买了第一项项目,所以我将其从列表中删除
del shoplist[0]
print('shoplist is', shoplist)
print('mylist is', mylist)
print('Copy by making a full slice')
# 通过生成一份完整的切片制作一份列表的副本
mylist = shoplist[:]
# 删除第一个项目
del mylist[0]
print('shoplist is', shoplist)
print('mylist is', mylist)
# 注意到现在两份列表已出现不同
| [
"[email protected]"
]
| |
d572a6814773a8b7f6c85a0e354e12f8655e0a35 | c4ee4a9d28425aa334038ad174c7b1d757ff45db | /py/trawl/ProcessCatch.py | c12d30c7ce3a588dd4ade0e94526c4bea1bcaaf5 | [
"MIT"
]
| permissive | nwfsc-fram/pyFieldSoftware | 32b3b9deb06dba4a168083a77336613704c7c262 | 477ba162b66ede2263693cda8c5a51d27eaa3b89 | refs/heads/master | 2023-08-03T07:38:24.117376 | 2021-10-20T22:49:51 | 2021-10-20T22:49:51 | 221,750,910 | 1 | 1 | MIT | 2023-07-20T13:13:25 | 2019-11-14T17:23:47 | Python | UTF-8 | Python | false | false | 56,327 | py | __author__ = 'Todd.Hay'
# -------------------------------------------------------------------------------
# Name: ProcessCatch.py
# Purpose:
#
# Author: Todd.Hay
# Email: [email protected]
#
# Created: Jan 10, 2016
# License: MIT
#-------------------------------------------------------------------------------
from PyQt5.QtCore import pyqtProperty, pyqtSignal, pyqtSlot, QObject, QVariant, Qt, QModelIndex, \
QAbstractItemModel, QByteArray, QAbstractListModel, QItemSelection, QPersistentModelIndex
from PyQt5.Qt import QJSValue, QQmlEngine
from py.common.FramListModel import FramListModel
from py.common.FramTreeModel import FramTreeModel
from py.common.FramTreeItem import FramTreeItem
import logging
import unittest
import pdb
from py.trawl.TrawlBackdeckDB_model import Specimen, Catch, Hauls, SpeciesSamplingPlanLu
from peewee import *
from playhouse.shortcuts import model_to_dict, dict_to_model
from py.common.SoundPlayer import SoundPlayer
class SpeciesListModel(FramListModel):
def __init__(self, parent=None):
super().__init__()
self.add_role_name(name='taxonomyId')
self.add_role_name(name="scientificName")
self.add_role_name(name="commonName1")
self.add_role_name(name="commonName2")
self.add_role_name(name="commonName3")
self.add_role_name(name='displayName')
self.add_role_name(name='protocol')
self.add_role_name(name='weight')
self.add_role_name(name='count')
self.add_role_name(name='depthMin')
self.add_role_name(name='depthMax')
self.add_role_name(name='latMin')
self.add_role_name(name='latMax')
self.add_role_name(name='isMostRecent')
self.add_role_name(name='isLastNOperations')
self.add_role_name(name='type')
self.add_role_name(name='sampleType')
self.add_role_name(name='catchContentId')
self.add_role_name(name='catchId')
class SpeciesTreeModel(FramTreeModel):
"""
Class used for the SelectedSpeciesTreeView. This is a custom FramTreeModel that allows one to add/remove
mixes, which are hierarchical in nature
"""
def __init__(self, headers=[], data=[]):
super().__init__(headers=headers, data=data)
@pyqtSlot(result=QVariant)
def sortCatch(self):
"""
Sort the list in-place alphabetically with the species at top, and then mixes
occurring beneath that, and then finally debris
"""
# List used to keep track of which items needs to be re-expanded after the sort operation is completed
expandedList = []
typeCol = self.getColumnNumber("type")
displayNameCol = self.getColumnNumber("displayName")
# Sort main list by item type using SORT_ORDER & then alphabetically within the Taxon, Mix, Debris lists
SORT_ORDER = {"Taxon": 0, "Mix": 1, "Debris": 2}
MIX_SORT_ORDER = {"Taxon": 0, "Submix": 1, "Debris": 2}
SUBMIX_SORT_ORDER = {"Taxon": 0, "Debris": 1}
# sorted_items = sorted(self._rootItem.childItems, key=lambda x: (SORT_ORDER[x.data(typeCol).value()], x.data(displayNameCol).value()))
sorted_items = sorted(self._rootItem.childItems, key=lambda x: (SORT_ORDER[x.data(typeCol).value()], x.data(displayNameCol).value().lower()))
self.removeRows(0, self._rootItem.childCount(), QModelIndex())
self.setChildItems(sorted_items, QModelIndex())
# Sort mixes by Taxon > Submix > Debris, and then alphabetically within each of those categories
mixes = [x for x in self._rootItem.childItems if x.data(typeCol).value() == "Mix"]
for mix in mixes:
mixIdx = self.createIndex(0, 0, mix)
# TODO Todd Hay - Fix sorting mixes when mix # >= 10
# see https://arcpy.wordpress.com/2012/05/11/sorting-alphanumeric-strings-in-python/
# logging.info('mix children: ' + str(mix.childItems))
sorted_mix = sorted(mix.childItems, key=lambda x: (MIX_SORT_ORDER[x.data(typeCol).value()], x.data(displayNameCol).value().lower()))
self.removeRows(0, mix.childCount(), mixIdx)
self.setChildItems(sorted_mix, mixIdx)
if mix.isExpanded:
expandedList.append(mix)
# Sort submixes by Taxon > Debris, and then alphabetically within each of those categories
submixes = [y for y in mix.childItems if y.data(typeCol).value() == "Submix"]
for submix in submixes:
submixIdx = self.createIndex(0, 0, submix)
# TODO Todd Hay - Fix sorting submixes when mix # >= 10
# see https://arcpy.wordpress.com/2012/05/11/sorting-alphanumeric-strings-in-python/
sorted_submix = sorted(submix.childItems, key=lambda z: (SUBMIX_SORT_ORDER[z.data(typeCol).value()], z.data(displayNameCol).value().lower()))
self.removeRows(0, submix.childCount(), submixIdx)
self.setChildItems(sorted_submix, submixIdx)
if submix.isExpanded:
expandedList.append(submix)
# Convert the expandedList to the current QModelIndex's
expandedList = [self.createIndex(x.row, 0, x) for i, x in enumerate(expandedList)]
return expandedList
class ProcessCatch(QObject):
"""
Class for the ProcessCatchScreen. Handles getting all of the species data
"""
haulIdChanged = pyqtSignal()
speciesModelChanged = pyqtSignal()
speciesCountChanged = pyqtSignal()
totalWeightChanged = pyqtSignal()
selectedIndexChanged = pyqtSignal()
activeMixChanged = pyqtSignal()
def __init__(self, app=None, db=None):
super().__init__()
self._logger = logging.getLogger(__name__)
self._app = app
self._db = db
self._mixes = dict()
self._active_mix = {"catchId": None, "displayName": None}
# Populate lists that are used in the tvAvailableSpecies TableView
self._species = self._get_species()
self._recent_species = [s for s in self._species if s["isMostRecent"].upper() == "TRUE"]
self._debris = self._get_debris()
# Create the models for the available + selected Table/Tree views
self.avFullModel = SpeciesListModel()
self.avRecentModel = SpeciesListModel()
self.avDebrisModel = SpeciesListModel()
self.seModel = self._set_selected_species_model()
self._current_species_model = self.avFullModel
self._species_count = 0
self._total_weight = 0
self._filter = ""
self._corals = self.get_coral_species()
self._salmon = self.get_salmon_species()
self._sponges = self.get_sponge_species()
self._rockfish = self.get_rockfish_species()
self._sound_player = SoundPlayer()
self._selected_index = None
# pdb.set_trace()
@pyqtProperty(QVariant, notify=activeMixChanged)
def activeMix(self):
"""
Method to return the self._active_mix. This is the currently chosen list used in combination when a user
want to a new species to a mix that species is added to this mix
:return:
"""
return self._active_mix
@activeMix.setter
def activeMix(self, value):
"""
Method to set the self._active_mix
:param value:
:return:
"""
self._active_mix = value
self.activeMixChanged.emit()
@pyqtSlot()
def initialize_lists(self):
"""
Method to reset all of the FramListModels to their original state of items. This is called when
the ProcessCatch is first initialized and then also whenever the haul id is changed, as the tree
needs to be rebuilt from the database at that point
:return: None
"""
# Establish working lists for available Full / Recent / Debris lists + filtered variants
self.avFullSpecies = list(self._species)
self.avFullSpeciesFiltered = list(self._species)
self.avRecentSpecies = list(self._recent_species)
self.avRecentSpeciesFiltered = list(self._recent_species)
self.avDebris = list(self._debris)
self.avDebrisFiltered = list(self._debris)
# Reset the list items
self.avFullModel.setItems(self.avFullSpeciesFiltered)
self.avRecentModel.setItems(self.avRecentSpeciesFiltered)
self.avDebrisModel.setItems(self.avDebrisFiltered)
def _get_fram_protocols(self):
"""
Method to gather the protocol display name for the FRAM protocols
:return: dict - containing taxon_id: protocol name
"""
protocols = []
protocol_sql = """
SELECT taxonomy_id, display_name FROM SPECIES_SAMPLING_PLAN_LU WHERE
PLAN_NAME = 'FRAM Standard Survey' AND
DISPLAY_NAME != 'Whole Specimen ID'
"""
# AND
# DISPLAY_NAME != 'Coral' AND
# DISPLAY_NAME != 'Salmon';
protocols = self._db.execute(query=protocol_sql)
protocols = {x[0]: x[1] for x in protocols}
return protocols
@pyqtSlot()
def initialize_tree(self):
"""
Method called when the haul id is changed that initializes the tree with the data from the database
:return:
"""
model = self.seModel
# Clear the tree + it's descendants list
model.clear()
self.speciesCount = 0
total_weight = 0
# Populate the tree
keys = ["catchId", "parentCatchId", "displayName", "isMix", "isDebris", "taxonomyId", "scientificName",
"commonName1", "commonName2", "commonName3",
"depthMin", "depthMax", "latMin", "latMax",
"weight", "count", "isMostRecent", "isLastNOperations", "catchContentId", "protocol"]
dataKeys = ["catchId", "displayName", "taxonomyId", "scientificName",
"commonName1", "commonName2", "commonName3",
"depthMin", "depthMax", "latMin", "latMax",
"weight", "count", "isMostRecent", "isLastNOperations", "type", "catchContentId", "protocol"]
# TODO Todd Hay sampleType - do I need to include this, I don't think so as I'm not using it.
# sql = "SELECT c.CATCH_ID, c.PARENT_CATCH_ID, c.DISPLAY_NAME, c.IS_MIX, c.IS_DEBRIS, cc.TAXONOMY_ID, " + \
# "t.SCIENTIFIC_NAME, t.COMMON_NAME_1, t.COMMON_NAME_2, t.COMMON_NAME_3, " + \
# "t.HISTORICAL_DEPTH_MIN, t.HISTORICAL_DEPTH_MAX, t.HISTORICAL_LAT_MIN, t.HISTORICAL_LAT_MAX, " + \
# "c.weight_kg, c.sample_count_int, cc.IS_MOST_RECENT, cc.IS_LAST_N_OPERATIONS, cc.CATCH_CONTENT_ID, " + \
# "s.DISPLAY_NAME " + \
# "FROM CATCH c " + \
# "LEFT JOIN CATCH_CONTENT_LU cc ON c.CATCH_CONTENT_ID = cc.CATCH_CONTENT_ID " + \
# "LEFT JOIN TAXONOMY_LU t ON t.TAXONOMY_ID = cc.TAXONOMY_ID " + \
# "LEFT JOIN SPECIES_SAMPLING_PLAN_LU s ON t.TAXONOMY_ID = s.TAXONOMY_ID " + \
# "WHERE c.OPERATION_ID = ? AND (s.PLAN_NAME = 'FRAM Standard Survey' or s.PLAN_NAME IS NULL);"
# Returns a dictionary of taxon_id: protocol name - these are used to compare against the sql query below
# to set the protocol display to a FRAM Standard Survey protocol name if one exists, otherwise, use what is
# retrieved from the query
protocols = self._get_fram_protocols()
sql = """
SELECT c.CATCH_ID, c.PARENT_CATCH_ID, c.DISPLAY_NAME, c.IS_MIX, c.IS_DEBRIS, cc.TAXONOMY_ID,
t.SCIENTIFIC_NAME, t.COMMON_NAME_1, t.COMMON_NAME_2, t.COMMON_NAME_3,
t.HISTORICAL_DEPTH_MIN, t.HISTORICAL_DEPTH_MAX, t.HISTORICAL_LAT_MIN, t.HISTORICAL_LAT_MAX,
c.weight_kg, c.sample_count_int, cc.IS_MOST_RECENT, cc.IS_LAST_N_OPERATIONS, cc.CATCH_CONTENT_ID,
s.DISPLAY_NAME
FROM CATCH c
LEFT JOIN CATCH_CONTENT_LU cc ON c.CATCH_CONTENT_ID = cc.CATCH_CONTENT_ID
LEFT JOIN TAXONOMY_LU t ON t.TAXONOMY_ID = cc.TAXONOMY_ID
LEFT JOIN SPECIES_SAMPLING_PLAN_LU s ON t.TAXONOMY_ID = s.TAXONOMY_ID
WHERE c.OPERATION_ID = ?
GROUP BY CATCH_ID
ORDER BY CATCH_ID
"""
params = [self._app.state_machine._haul["haul_id"], ]
results = self._db.execute(query=sql, parameters=params).fetchall()
if results:
results = [dict(zip(keys, values)) for values in results]
for x in results:
if x["isMix"].lower() == "false" and x["isDebris"].lower() == "false":
x["type"] = "Taxon"
elif x["isMix"].lower() == "true" and x["isDebris"].lower() == "false":
if "submix" in x["displayName"].lower():
x["type"] = "Submix"
else:
x["type"] = "Mix"
if isinstance(self._active_mix, QJSValue):
self._active_mix = self._active_mix.toVariant()
if self._active_mix["catchId"] is None:
self.activeMix = {"displayName": x["displayName"], "catchId": x["catchId"]}
elif x["isMix"].lower() == "false" and x["isDebris"].lower() == "true":
x["type"] = "Debris"
# Update weights / counts
totals = self._get_basket_weights_counts(catch_id=x["catchId"])
x["weight"] = totals["weight"]
x["count"] = totals["count"]
# Update protocol display using the protocols dict obtained above
x["protocol"] = protocols[x["taxonomyId"]] if x["taxonomyId"] in protocols else x["protocol"]
firstLevelItems = [x for x in results if x["parentCatchId"] is None]
for item in firstLevelItems:
# Get the data items
data = {x: item[x] for x in item if x in dataKeys}
# logging.info("adding data to treeview: {0}".format(data))
# Update the total weight
total_weight += data["weight"]
# Set None values to ""
data.update((k, "") for k, v in data.items() if v is None)
# Add to the FramTreeView
parentIdx = self.append_tree_item(data=data, parentIdx=QModelIndex())
# Remove from the FramListModel, i.e. the left-side ListView
if data["type"] != "Mix" and data["type"] != "Submix":
self.remove_list_item(data=data)
# For a mix, add children to the mix
if data["type"] == "Mix":
# Update the FramTreeModel self._mixCount
model.addMixCount("Mix")
# Get all of the Mix children, add those, but don't get/add the Mix baskets (the last argument)
children = [x for x in results if x["parentCatchId"] == item["catchId"] and x["type"] != "Mix"]
for child in children:
# Add to the FramTreeView
childData = {x: child[x] for x in child if x in dataKeys}
childData.update((k, "") for k, v in childData.items() if v is None)
subparentIdx = self.append_tree_item(data=childData, parentIdx=parentIdx)
# logging.info('childData: ' + str(childData["displayName"]) + ', ' + str(childData["type"]))
# Remove from the FramListModel
if childData["type"] != "Submix" and childData["type"] != "Mix":
self.remove_list_item(data=childData)
# Add Submixes
if childData["type"] == "Submix":
# Update the running count of the submixes for the given mix
model.addMixCount("Submix", parentIdx)
# Get the submix children, but don't get/add the submix baskets (the last argument here)
subchildren = [x for x in results if x["parentCatchId"] == child["catchId"] and x["type"] != "Submix"]
for subchild in subchildren:
# Add to the FramTreeView
subchildData = {x: subchild[x] for x in subchild if x in dataKeys}
subchildData.update((k, "") for k, v in subchildData.items() if v is None)
self.append_tree_item(data=subchildData, parentIdx=subparentIdx)
# Remove from the FramListModel
self.remove_list_item(data=subchildData)
model.sortCatch()
self.totalWeight = total_weight
logging.info("Initializing tree, mixes: {0}".format(self.seModel.mixCount))
@pyqtSlot(str)
def playSound(self, sound_name):
"""
Play a sound
:param sound_name:
:return:
"""
if not isinstance(sound_name, str):
return
self._sound_player.play_sound(sound_name=sound_name)
@staticmethod
def _filter_model(filter_text, data, type):
"""
Method to return a filtered list of the species matching the filter_text
:param filter_text: text against which to query
:param species: listing of the input species to query
:return: filtered list
"""
if type == "Debris":
return [d for d in data if filter_text.upper() in d['displayName'].upper()]
else:
filtered_list = [d for d in data
if (filter_text.upper() in d['displayName'].upper() or
filter_text.upper() in d['scientificName'].upper()or
(d["commonName1"] is not None and filter_text.upper() in d['commonName1'].upper()) or
(d["commonName2"] is not None and filter_text.upper() in d['commonName2'].upper()) or
(d["commonName3"] is not None and filter_text.upper() in d['commonName3'].upper()))]
if filter_text == "":
return filtered_list
start_match_list = [x for x in filtered_list if x['displayName'].upper().startswith(filter_text.upper())]
# start_match_list = sorted(start_match_list, key=lambda x: x["displayName"].lower())
remaining_list = [x for x in filtered_list if x not in start_match_list]
# remaining_list = sorted(remaining_list, key=lambda x: x["displayName"].lower())
sorted_list = start_match_list + remaining_list
return sorted_list
@pyqtSlot(str)
def filter_species(self, filter_text=""):
"""
Method use to filter the AvailableSpecies list model based on what the user types in the textbox
:param filter_text: Text that the user entered to filter the species
:return: None
"""
self._filter = filter_text
self.avFullSpeciesFiltered = self._filter_model(filter_text=filter_text, data=self.avFullSpecies, type="Taxon")
self.avFullModel.setItems(self.avFullSpeciesFiltered)
self.avRecentSpeciesFiltered = self._filter_model(filter_text=filter_text, data=self.avRecentSpecies, type="Taxon")
self.avRecentModel.setItems(self.avRecentSpeciesFiltered)
self.avDebrisFiltered = self._filter_model(filter_text=filter_text, data=self.avDebris, type="Debris")
self.avDebrisModel.setItems(self.avDebrisFiltered)
@pyqtSlot(QModelIndex, result=bool)
def add_list_item(self, index):
"""
Method to add a species back to the tvAvailableList FramListModel
:param index - QModelIndex - item to add back
:return: bool - successful or not (true/false)
"""
if not isinstance(index, QModelIndex):
return False
# Add to the Full List Model
item = self.seModel.getItem(index)
data = item.getAllDataAsDict()
# Clear out any weight + count data, i.e. if baskets had been taken, otherwise these could reappear
# when the item is added back to the TreeView
data["count"] = None
data["weight"] = None
type = data["type"]
filterList = [v for k, v in data.items() if v != "" and v is not None and k in ("displayName", "scientificName", "commonName1", "commonName2", "commonName3")]
if type == "Debris":
data["displayName"] = data["displayName"].replace("Debris - ", "")
self.avDebris.append(data)
self.avDebris = sorted(self.avDebris, key=lambda k: k['displayName'].lower())
if any(self._filter.lower() in x.lower() for x in filterList):
self.avDebrisModel.appendItem(data)
self.avDebrisModel.sort("displayName")
self.avDebrisFiltered.append(data)
self.avDebrisFiltered = sorted(self.avDebrisFiltered, key=lambda k: k['displayName'].lower())
elif type == "Mix" or type == "Submix":
# Need to recurse these to get all children and add back to the list
for child in item.children:
newIndex = self.seModel.createIndex(child.row, 0, child)
self.add_list_item(newIndex)
elif type == "Taxon":
self.avFullSpecies.append(data)
self.avFullSpecies = sorted(self.avFullSpecies, key=lambda k: k['displayName'].lower())
# Check if the displayName exists in the self.avFullSpeciesFiltered
if any(self._filter.lower() in x.lower() for x in filterList):
self.avFullModel.appendItem(data)
self.avFullModel.sort("displayName")
self.avFullSpeciesFiltered.append(data)
self.avFullSpeciesFiltered = sorted(self.avFullSpeciesFiltered, key=lambda k: k['displayName'].lower())
# TODO Todd Hay - are we using isMostRecent or isLastNOperations - probably the latter
if data["isMostRecent"] == "True":
self.avRecentSpecies.append(data)
self.avRecentSpecies = sorted(self.avRecentSpecies, key=lambda k: k['displayName'].lower())
# if any(d["displayName"] == data["displayName"] for d in self.avRecentSpeciesFiltered):
if any(self._filter.lower() in x.lower() for x in filterList):
self.avRecentModel.appendItem(data)
self.avRecentModel.sort("displayName")
self.avRecentSpeciesFiltered.append(data)
self.avRecentSpeciesFiltered = sorted(self.avRecentSpeciesFiltered, key=lambda k: k['displayName'].lower())
return True
@pyqtSlot(QVariant)
def remove_list_item(self, data):
"""
Method to remove an item from the FramListModel
:param data: dict - dictionary of the data to delete
:return: None
"""
if isinstance(data, QJSValue):
data = data.toVariant()
rolename = "displayName"
value = data[rolename]
type = data["type"]
if type == "Taxon":
idx = self.avFullModel.get_item_index(rolename=rolename, value=value)
if idx >= 0:
self.avFullModel.removeItem(idx)
self.avFullSpecies = [x for x in self.avFullSpecies if x["displayName"] != value]
self.avFullSpeciesFiltered = [x for x in self.avFullSpeciesFiltered if x["displayName"] != value]
if data["isMostRecent"] == "True":
idx = self.avRecentModel.get_item_index(rolename=rolename, value=value)
if idx >= 0:
self.avRecentModel.removeItem(idx)
self.avRecentSpecies = [x for x in self.avRecentSpecies if x["displayName"] != value]
self.avRecentSpeciesFiltered = [x for x in self.avRecentSpeciesFiltered if x["displayName"] != value]
elif type == "Debris":
idx = self.avDebrisModel.get_item_index(rolename=rolename, value=value)
if idx >= 0:
self.avDebrisModel.removeItem(idx)
self.avDebris = [x for x in self.avDebris if x["displayName"] != value]
self.avDebrisFiltered = [x for x in self.avDebrisFiltered if x["displayName"] != value]
def append_tree_item(self, data, parentIdx):
"""
Method to insert a row in the self._selected_species_model model. This is done during the initialization
of the TreeView only, as we don't want to insert new records into the database. See append_tree_item_with_sql
when a user actually chooses to add a new item to the TreeView
:param data: QJSValue dict - data to be appended as a new row
:param parentIdx: QModelIndex - index of the currently selected item in tvSelectedSpecies
:return: None
"""
model = self.seModel
if isinstance(parentIdx, QModelIndex) and parentIdx.row() >= 0:
parentItem = model.getItem(parentIdx)
else:
parentIdx = QModelIndex()
parentItem = model._rootItem
if isinstance(data, QJSValue):
data = data.toVariant() # Convert from QJSValue to dict
status = model.insertRows(parentItem.childCount(), 1, parentIdx)
child = parentItem.child(parentItem.childCount()-1)
row = child.row
# Update the speciesCount - I call the method which then emits a signal
if data["type"] == "Taxon":
self.speciesCount += 1
# Update the newly created child/row data with the data from tvAvailableSpecies model
for element in data:
if element in model.rootData: # and data[element] is not None and data[element] != "":
column = model.getColumnNumber(element)
if column >= 0:
index = model.createIndex(row, column, child)
role = model.getRoleNumber(role_name=element)
if element == "displayName" and data["type"] == "Debris":
data[element] = "Debris - " + data[element]
status = model.setData(index, data[element], role)
# Update the model._descendantSpecies list - do this after the data has been updated
colNum = model.getColumnNumber("taxonomyId")
taxonId = child.data(colNum)
if taxonId.value():
model.append_descendant(taxonId)
return model.createIndex(row, 0, child)
@pyqtProperty(int, notify=haulIdChanged)
def haulId(self):
self._initialize_tree()
return self._haul_id
@haulId.setter
def haulId(self, value):
self._haul_id = self._app.state_machine._haul["haul_id"]
self.haulIdChanged.emit()
@pyqtProperty(float, notify=totalWeightChanged)
def totalWeight(self):
"""
Method to return the total weight for the haul
:return:
"""
return self._total_weight
@totalWeight.setter
def totalWeight(self, value):
if not isinstance(value, float):
return
self._total_weight = value
self.totalWeightChanged.emit()
@pyqtProperty(int, notify=speciesCountChanged)
def speciesCount(self):
"""
Return the species_count
:return: int - species_count
"""
return self._species_count
@speciesCount.setter
def speciesCount(self, value):
"""
Set the self._species_count
:param value: int - value to set it to
:return:
"""
if value is None:
return
self._species_count = value
self.speciesCountChanged.emit()
@pyqtProperty(QObject, notify=speciesModelChanged)
def currentSpeciesModel(self):
"""
Property used to know if the currently selected speciesModel is the Full List or the
Most Recent List
:param model:
:return:
"""
return self._current_species_model
@currentSpeciesModel.setter
def currentSpeciesModel(self, model):
"""
Method for setting the self._speciesModel
:param model:
:return:
"""
self._current_species_model = model
self.speciesModelChanged.emit()
@pyqtProperty(QVariant)
def species(self):
"""
Get the full listing of species
:return: List of species
"""
return self._species
@pyqtProperty(FramListModel, notify=speciesModelChanged)
def FullAvailableSpeciesModel(self):
"""
Get the model for the tvAvailableSpecies TableView
:return: AvailableSpeciesMode
"""
return self.avFullModel
# TODO (todd.hay) Implement NOTIFY signal per warning I'm receiving and discussion of it here:
# http://stackoverflow.com/questions/6728615/warning-about-non-notifyable-properties-in-qml
@pyqtProperty(FramListModel, notify=speciesModelChanged)
def MostRecentAvailableSpeciesModel(self):
"""
Return the model of the most recent available species
:return:
"""
return self.avRecentModel
@pyqtProperty(FramTreeModel, notify=speciesModelChanged)
def SelectedSpeciesModel(self):
"""
Return the model of the selected species, a FramTreeModel
:return:
"""
return self.seModel
@pyqtProperty(FramListModel, notify=speciesModelChanged)
def DebrisModel(self):
"""
Return the model of the debris, a FramListModel
:return:
"""
return self.avDebrisModel
@pyqtSlot(QModelIndex, result=QVariant)
def getParent(self, idx):
model = self.seModel
typeCol = model.get_role_number("type")
type = model.data(idx, typeCol).value()
if type and (type == "Mix" or type == "Submix"):
parent = model.item(idx).value()
else:
parent = model._rootItem
return parent
@pyqtSlot(QModelIndex, QVariant, result=bool)
def checkTaxonId(self, idx, selection):
"""
Method to determine if a species with the given taxonomy id already exists in the current
level of the tvSelectedSpecies FramTreeModel. If so, don't add it, just highlight that row
:param idx: QModelIndex - index of the selected row in tvSelectedSpecies
:return: bool - true or false if the taxon_id already exists
"""
sel_model = self.seModel
root = sel_model._rootItem
# rootIndex = model.createIndex(root.row, 0, root)
taxonCol = sel_model.get_role_number("taxonomyId")
typeCol = sel_model.get_role_number("type")
type = sel_model.data(idx, typeCol).value()
if type and (type == "Mix" or type == "Submix"):
parent = sel_model.item(idx).value()
else:
parent = root
logging.info('selection: ' + str(selection))
for row in selection: #self.currentSpeciesModel.selectionModel():
logging.info("row: " + str(row))
result = False
return result
@pyqtSlot(QJSValue, QModelIndex, str)
def append_tree_item_with_sql(self, data, idx, parent_type):
"""
Method to insert a row in the self._selected_species_model model
:param data: QJSValue dict - data to be appended as a new row
:param idx: QModelIndex - index of the currently selected item in tvSelectedSpecies
:param parent_type: str - the type of entry being added: Mix, Taxon, or Debris
:return: None
"""
if isinstance(data, QJSValue):
data = data.toVariant() # Convert from QJSValue to dict
# Get references to key objects of interest
model = self.seModel
dataType = data["type"]
# Insert a new row and get a handle to the newly inserted child + it's row position
if (parent_type == "Mix" or parent_type == "Submix") and dataType != "Debris": # Mix is the current type
parent = model.getItem(idx)
parentIdx = idx
# elif parent_type == "Taxon" or parent_type == "Debris": # Taxon or Debris is the current type
# parent = model._rootItem
# parentIdx = QModelIndex()
else: # Type is None - nothing is selected
parent = model._rootItem
parentIdx = QModelIndex()
# insertRows > position, count, parent index
status = model.insertRows(parent.childCount(), 1, parentIdx)
# status = model.insertRows(parent.childCount(), 1, idx.parent())
child = parent.child(parent.childCount()-1)
row = child.row
# Update the speciesCount - I call the method which then emits a signal
if dataType == "Taxon":
self.speciesCount += 1
# Update the newly created child/row data with the data from tvAvailableSpecies model
for element in data:
if element in model.rootData: # and data[element] is not None: # and data[element] != "":
column = model.getColumnNumber(element)
if column >= 0:
index = model.createIndex(row, column, child)
role = model.getRoleNumber(role_name=element)
if element == "displayName" and data["type"] == "Debris":
data[element] = "Debris - " + data[element]
status = model.setData(index, data[element], role)
# Update the model._descendantSpecies list - do this after the data has been updated
colNum = model.getColumnNumber("taxonomyId")
taxonId = child.data(colNum)
if taxonId.value():
model.append_descendant(taxonId)
# Insert new record in the CATCH table for the given haul
is_debris = "False"
is_mix = "False"
displayName = data["displayName"]
catchContentId = None
if data["type"] == "Debris":
displayName = displayName.replace("Debris - ", "")
is_debris = "True"
catchContentId = data["catchContentId"]
elif data["type"] == "Mix" or data["type"] == "Submix":
is_mix = "True"
elif data["type"] == "Taxon":
catchContentId = data["catchContentId"]
# Determine if a PARENT_CATCH_ID exists for this record or not
parentCatchId = None
if parent.data(model.getColumnNumber("displayName")).value() != "displayName":
parentCatchId = parent.data(model.getColumnNumber("catchId")).value()
# TODO todd hay - remove MIX_NUMBER from CATCH table - do we need this anymore?
# TODO todd hay - CATCH Table - Drop OPERATION_TYPE_ID
sql = "INSERT INTO CATCH (PARENT_CATCH_ID, CATCH_CONTENT_ID, DISPLAY_NAME, IS_MIX, IS_DEBRIS, OPERATION_ID) " + \
"VALUES(?, ?, ?, ?, ?, ?);"
params = [parentCatchId, catchContentId, displayName, is_mix, is_debris, self._app.state_machine._haul["haul_id"]]
# print('params: ' + str(params))
result = self._db.execute(query=sql, parameters=params)
if result:
catchId = self._db.get_last_rowid()
column = model.getColumnNumber("catchId")
index = model.createIndex(row, column, child)
role = model.getRoleNumber(role_name="catchId")
status = model.setData(index, catchId, role)
@pyqtSlot(QModelIndex)
def remove_tree_item(self, index):
"""
Method to retrieve a FramTreeItem from a FramTreeModel
:param index: QModelIndex - the item to remove
:return: None
"""
if not isinstance(index, QModelIndex):
return
model = self.seModel
# Get the existing catchId from the data - Do before deleting the actual row
item = model.getItem(index)
typeCol = model.getColumnNumber("type")
catchId = item.data(model.getColumnNumber("catchId")).value()
type = item.data(typeCol).value()
if type == "Taxon":
self.speciesCount -= 1
elif type == "Mix":
if isinstance(self._active_mix, QJSValue):
self._active_mix = self._active_mix.toVariant()
if catchId == self._active_mix["catchId"]:
self.activeMix = {"displayName": None, "catchId": None}
# recurse to check all children + subchildren
self.speciesCount -= len([x for x in item.children if x.data(typeCol).value() == "Taxon"])
submixes = [x for x in item.children if x.data(typeCol).value() == "Submix"]
for submix in submixes:
self.speciesCount -= len([x for x in submix.children if x.data(typeCol).value() == "Taxon"])
# If the submix is the activeMix and we're removing the submix, then set the activeMix to None
if submix.data(model.getColumnNumber('catchId')).value() == self._active_mix["catchId"]:
self.activeMix = {"displayName": None, "catchId": None}
elif type == "Submix":
if isinstance(self._active_mix, QJSValue):
self._active_mix = self._active_mix.toVariant()
if catchId == self._active_mix["catchId"]:
self.activeMix = {"displayName": None, "catchId": None}
# recurse to check all children
self.speciesCount -= len([x for x in item.children if x.data(typeCol).value() == "Taxon"])
# Remove the rows
parentIdx = model.parent(index)
status = model.removeRows(index.row(), 1, parentIdx)
# Decrement the species count - this is shown in the upper right corner of the screen
# self.speciesCount -= 1
# Delete from the database
if isinstance(catchId, int):
catch_sql = """
WITH RECURSIVE subcatch(n) AS (
SELECT CATCH_ID FROM CATCH WHERE CATCH_ID = ?
UNION
SELECT c.CATCH_ID FROM CATCH c, subcatch
WHERE c.PARENT_CATCH_ID = subcatch.n
)
DELETE FROM CATCH WHERE CATCH_ID in subcatch;
"""
specimen_sql = """
WITH RECURSIVE subcatch(n) AS (
SELECT CATCH_ID FROM CATCH WHERE CATCH_ID = ?
UNION
SELECT c.CATCH_ID FROM CATCH c, subcatch
WHERE c.PARENT_CATCH_ID = subcatch.n
),
subspecimens(n) AS (
SELECT SPECIMEN_ID FROM SPECIMEN s INNER JOIN CATCH c
ON c.CATCH_ID = s.CATCH_ID WHERE c.CATCH_ID in subcatch
UNION
SELECT s.SPECIMEN_ID FROM SPECIMEN s, subspecimens
WHERE s.PARENT_SPECIMEN_ID = subspecimens.n
)
DELETE FROM SPECIMEN WHERE SPECIMEN_ID IN subspecimens;
"""
params = [catchId, ]
self._db.execute(query=specimen_sql, parameters=params)
self._db.execute(query=catch_sql, parameters=params)
def _get_debris(self):
"""
Method to retrieve all of the debris items from the database. This is used to populate the list of
possibel debris in the ProcessCatchScreen
:return: list - containing the list of debris from CATCH_CONTENT_LU
"""
debris = []
sql = "SELECT * FROM CATCH_CONTENT_VW WHERE TYPE = 'Debris';"
for d in self._db.execute(sql):
new_debris = dict()
new_debris["displayName"] = d[2]
new_debris["weight"] = None
new_debris["count"] = None
new_debris["type"] = d[13]
new_debris["catchContentId"] = d[14]
debris.append(new_debris)
debris = sorted(debris, key=lambda x: x['displayName'].upper())
return debris
def _get_species(self):
"""
Method to retrieve all of the species from the database. This is used to populate the list of
possible species in the ProcessCatchScreen
:return: dictionary containing the species
"""
species = []
# Get all of the FRAM-specific protocols, tied to the TAXONOMY_ID - this is used to update the protocol
# display below as there might be non-FRAM PI's who have a sampling plan for a given TAXONOMY_ID
protocols = self._get_fram_protocols()
# TODO (todd.hay) Get the species-specific protocol information as well
sql = "SELECT * FROM CATCH_CONTENT_VW WHERE TYPE = 'Taxon';"
# sql = "SELECT CONTENTS_ID, SCIENTIFIC_NAME, COMMON_NAME_1, COMMON_NAME_2, COMMON_NAME_3, DISPLAY_NAME, " + \
# "HISTORICAL_DEPTH_MIN, HISTORICAL_DEPTH_MAX, HISTORICAL_LAT_MIN, HISTORICAL_LAT_MAX, IS_MOST_RECENT " + \
# "FROM CATCH_CONTENTS_LU c INNER JOIN TYPES_LU t ON c.CONTENT_TYPE_ID = t.TYPE_ID " + \
# "WHERE t.CATEGORY = 'Content' AND t.TYPE = 'Taxon';"
for s in self._db.execute(sql):
new_species = dict()
new_species["taxonomyId"] = s[0]
new_species["protocol"] = protocols[s[0]] if s[0] in protocols else s[1]
new_species["displayName"] = s[2]
new_species["scientificName"] = s[3]
new_species["commonName1"] = s[4] if s[4] else ""
new_species["commonName2"] = s[5] if s[5] else ""
new_species["commonName3"] = s[6] if s[6] else ""
new_species["weight"] = None
new_species["count"] = None
new_species["depthMin"] = s[7] if s[7] else None
new_species["depthMax"] = s[8] if s[8] else None
new_species["latMin"] = s[9] if s[9] else None
new_species["latMax"] = s[10] if s[10] else None
new_species["isMostRecent"] = s[11] if s[11] else "False"
new_species["isLastNOperations"] = s[12] if s[12] else ""
new_species["type"] = s[13] if s[13] else None
new_species["catchContentId"] = s[14] if s[14] else None
species.append(new_species)
species = sorted(species, key=lambda x: x['displayName'].upper())
return species
@staticmethod
def _set_selected_species_model():
"""
Method that defines the species already selected for the self._activeHaul
:return: FramTreeModel - the model used with the tvSelectedSpecies TreeView
"""
# TODO Need to add sampleType (i.e. fish, salmon, coral - to drive Fish Sampling Screen)
# headers = ["taxonomyId", "displayName", "scientificName",
# "protocol", "weight", "count", "depthMin", "depthMax", "latMin", "latMax",
# "isMostRecent", "isLastNOperations", "type", "sampleType", "catchContentId", "catchId"]
headers = ["taxonomyId", "scientificName", "commonName1", "commonName2", "commonName3", "displayName",
"protocol", "weight", "count", "depthMin", "depthMax", "latMin", "latMax",
"isMostRecent", "isLastNOperations", "type", "sampleType", "catchContentId", "catchId"]
data = []
species_model = SpeciesTreeModel(headers=headers, data=data)
return species_model
@pyqtProperty(QVariant, notify=selectedIndexChanged)
def selectedIndex(self):
"""
Returns the QModelIndex of the currently selected item
:return:
"""
return self._selected_index
@selectedIndex.setter
def selectedIndex(self, value):
# if index is None:
# index = QModelIndex()
if isinstance(value, QJSValue):
value = value.toVariant()
self._selected_index = value
self.selectedIndexChanged.emit()
@pyqtSlot()
def updateWeightCount(self):
"""
Method called when returning from WeighBaskets to update the weights/num basket count of the
selected species
:return:
"""
# Get the update weight/count data
catch_id = self._app.state_machine.species["catch_id"]
results = self._get_basket_weights_counts(catch_id=catch_id)
# logging.info('selectedIndex: {0}'.format(self.selectedIndex))
try:
# Update the model
model = self.seModel
idx = self.selectedIndex["currentIndex"]
item = model.getItem(idx)
row = idx.row()
for x in list(results):
col = model.getColumnNumber(x)
if col != -1:
index = model.createIndex(row, col, item)
value = results[x]
role = model.getRoleNumber(role_name=x)
status = model.setData(index, value, role)
logging.info('{0} = {1}, row: {2}, col: {3}, role: {4}, status: {5}'.
format(x, value, row, col, role, status))
# logging.info('rootData: {0}'.format(model.rootData))
except Exception as ex:
pass
def _get_basket_weights_counts(self, catch_id):
"""
Method to get the total weight + number of baskets for the given catch_id. This is called
by initialize_tree when entering ProcessCatch and by TrawlBackdeckStateMachine when
returning to ProcessCatch from the WeighBaskets screen, so as to update the values for the
currently selected species
:param catch_id: int
:return: dict - contains the "weight" and "count"
"""
if not isinstance(catch_id, int):
return
try:
display_name = Catch.select().where(Catch.catch == catch_id).get().display_name
except DoesNotExist as ex:
display_name = ""
logging.info('Could not find the display name: ' + str(ex))
baskets_sql = """
WITH RECURSIVE subcatch(n) AS (
SELECT c.CATCH_ID FROM CATCH c
WHERE c.CATCH_ID = ?
UNION
SELECT c.CATCH_ID FROM CATCH c, subcatch
WHERE c.PARENT_CATCH_ID = subcatch.n AND c.DISPLAY_NAME = ?
)
select WEIGHT_KG, SAMPLE_COUNT_INT from CATCH c
WHERE c.CATCH_ID in subcatch AND c.RECEPTACLE_SEQ IS NOT NULL
"""
params = [catch_id, display_name]
total_weight = 0
num_baskets = 0
for basket in self._db.execute(query=baskets_sql, parameters=params):
total_weight += basket[0] if basket[0] else 0
num_baskets += 1 if basket[0] else 0
# logging.info('display name: ' + str(display_name) + ', weight: ' + str(total_weight) + ', count: ' + str(num_baskets))
return {"weight": total_weight, "count": num_baskets}
@pyqtSlot(result=QVariant)
def checkSpeciesForData(self):
"""
Method to determine if catch / specimen data has been collected for the species
:return: QVariant - dict - containing counts of baskets + specimens
"""
try:
results = {"baskets": 0, "specimens": 0}
catch_id = self._app.state_machine.species["catch_id"]
baskets_sql = """
WITH RECURSIVE subcatch(n) AS (
SELECT c.CATCH_ID FROM CATCH c
WHERE c.CATCH_ID = ?
UNION
SELECT c.CATCH_ID FROM CATCH c, subcatch
WHERE c.PARENT_CATCH_ID = subcatch.n
)
select count(*) from CATCH c WHERE c.CATCH_ID in subcatch
AND c.RECEPTACLE_SEQ IS NOT NULL
"""
for basket in self._db.execute(query=baskets_sql, parameters=[catch_id,]):
results["baskets"] = basket[0]
specimens_sql = """
WITH RECURSIVE subcatch(n) AS (
SELECT c.CATCH_ID FROM CATCH c WHERE c.CATCH_ID = ?
UNION
SELECT c.CATCH_ID FROM CATCH c, subcatch
WHERE c.PARENT_CATCH_ID = subcatch.n
),
subspecimens(n) AS (
SELECT SPECIMEN_ID FROM SPECIMEN s INNER JOIN CATCH c
ON c.CATCH_ID = s.CATCH_ID WHERE c.CATCH_ID in subcatch
UNION
SELECT s.SPECIMEN_ID FROM SPECIMEN s, subspecimens
WHERE s.PARENT_SPECIMEN_ID = subspecimens.n
)
SELECT count(*) FROM SPECIMEN WHERE SPECIMEN_ID IN subspecimens
AND PARENT_SPECIMEN_ID IS NULL;
"""
for specimen in self._db.execute(query=specimens_sql, parameters=[catch_id,]):
results["specimens"] = specimen[0]
except Exception as ex:
logging.info("Error getting basket and/or specimen counts: " + str(ex))
return {"baskets": -1, "specimens": -1}
return results
@pyqtSlot(result=QVariant)
def get_species_per_haul(self):
"""
Method to return all of the selected species for the self._haul
:return: list of dicts - containing all of the species for the given haul
"""
species = []
sql = "SELECT * FROM CATCH_VW WHERE HAUL_NUMBER = ?;"
sql = "SELECT c.CATCH_ID, c.PARENT_CATCH_ID, c.WEIGHT_KG, " + \
"c.SAMPLE_COUNT_INT, t.SCIENTIFIC_NAME, cc.DISPLAY_NAME " + \
"FROM CATCH c " + \
"INNER JOIN HAULS h ON c.OPERATION_ID = h.HAUL_ID " + \
"INNER JOIN CATCH_CONTENT_LU cc ON cc.CATCH_CONTENT_ID = c.CATCH_CONTENT_ID " + \
"INNER JOIN TAXONOMY_LU t ON cc.TAXONOMY_ID = t.TAXONOMY_ID " + \
"WHERE h.HAUL_ID = ?;"
params = [self._app.state_machine._haul["haul_id"], ]
for s in self._db.execute(query=sql, parameters=params):
new_species = {}
new_species["catch_partition_id"] = s[0]
new_species["parent_id"] = s[1] if s[1] else None
new_species["weight"] = s[2] if s[2] else None
new_species["count"] = s[3] if s[3] else None
new_species["scientific_name"] = s[4] if s[4] else None
new_species["display_name"] = s[5] if s[5] else None
species.append(new_species)
return species
def get_salmon_species(self):
"""
Method to return all of the salmon species. Used to drive the salmon-based FishSamplingScreen
selection in ProcessCatchScreen.qml
:return: list - all of the taxonomyId related to salmon species
"""
salmon = []
sql = "SELECT DISTINCT TAXONOMY_ID FROM SALMON_SPECIES_VW;"
for row in self._db.execute(query=sql):
salmon.append(row[0])
return salmon
def get_coral_species(self):
"""
Method to return all of the coral species. Used to drive the coral-based FishSamplingScreen
selection in ProcessCatchScreen.qml
:return: list - all of the taxonomyId related to salmon species
"""
corals = []
sql = "SELECT DISTINCT TAXONOMY_ID FROM CORAL_SPECIES_VW;"
for row in self._db.execute(query=sql):
corals.append(row[0])
return corals
def get_sponge_species(self):
"""
Method to return all of the sponge speccies. Used to drive the
sponge-based selection in ProcessCatchScreen.qml to push user over to the
SpecialActionsScreen.qml
:return:
"""
sponges = []
sql = "SELECT DISTINCT TAXONOMY_ID FROM SPONGE_SPECIES_VW;"
for row in self._db.execute(query=sql):
sponges.append(row[0])
return sponges
def get_rockfish_species(self):
"""
Method to return all of the rockfish species. Used to drive barcode collection
for Peter Sudmant (UC Berkeley) asking for muscle tissue for any rockfish
during 2019 survey season
:return:
"""
rockfish = []
sql = "SELECT DISTINCT TAXONOMY_ID FROM ROCKFISH_SPECIES_VW;"
for row in self._db.execute(query=sql):
rockfish.append(row[0])
return rockfish
@pyqtSlot(str, int, result=bool)
def checkSpeciesType(self, type, taxonId):
"""
Method to return the listing of the corals, as a pyqtProperty
:return:
"""
if type == "salmon":
return taxonId in self._salmon
elif type == "coral":
return taxonId in self._corals
elif type == "sponge":
return taxonId in self._sponges
elif type == "rockfish":
return taxonId in self._rockfish
# @pyqtSlot(str, str, QModelIndex)
@pyqtSlot()
def renameMixes(self):
"""
Method called by ProcessCatchScreen.qml, in the removeSpecies function when a mix or
a submix is removed from the selected species TreeView. This does not relabel the
items in the TreeView, as that is handled directly by the tree view, however, it does
update the catch.display_name's for all of the mixes and submixes that follow this
provided mix
:param mixType:
:param name:
:param parentIndex:
:return:
"""
# if mixType is None:
# return
#
# if not isinstance(parentIndex, QModelIndex):
# return
try:
logging.info(f"mixes: {self.seModel.mixCount}")
type_col = self.seModel.getColumnNumber("type")
display_name_col = self.seModel.getColumnNumber("displayName")
display_name_role = self.seModel.getRoleNumber(role_name="displayName")
catch_id_col = self.seModel.getColumnNumber("catchId")
catch_id_role = self.seModel.getRoleNumber(role_name="catchId")
mixes = [x for x in self.seModel.rootItem.children
if x.data(column=type_col).value() == "Mix"]
for mix_count, mix in enumerate(mixes):
mix_display_name = mix.data(column=display_name_col).value()
if int(mix_display_name.strip("Mix #")) - 1 != mix_count:
catch_id = mix.data(column=catch_id_col).value()
value = f"Mix #{mix_count+1}"
Catch.update(display_name = value).where(Catch.catch == catch_id).execute()
index = self.seModel.createIndex(mix.row, display_name_col, mix)
self.seModel.setData(index=index, value=value, role=display_name_role)
logging.info(f"mix to update, catch_id: {catch_id}, {mix_display_name} > {value}")
submixes = [x for x in mix.children
if x.data(column=type_col).value() == "Submix"]
for submix_count, submix in enumerate(submixes):
sm_display_name = submix.data(column=display_name_col).value()
if int(sm_display_name.strip("Submix #")) - 1 != submix_count:
catch_id = submix.data(column=catch_id_col).value()
value = f"Submix #{submix_count+1}"
Catch.update(display_name=value).where(Catch.catch == catch_id).execute()
index = self.seModel.createIndex(submix.row, display_name_col, submix)
self.seModel.setData(index=index, value=value, role=display_name_role)
logging.info(f"submix to update, catch_id: {catch_id}, {sm_display_name} > {value}")
except Exception as ex:
logging.error(f"Error renaming the mixes: {ex}")
| [
"[email protected]"
]
| |
5bbd81b6220271c40f2bc0df4af86e81a6f67d38 | a8fa49c3a6c6d6a66a89089fdd013343f48b436e | /count.py | c58715b8be547e0c27acc75056cc92fa42edd5b9 | []
| no_license | avi527/Tuple | 007ec5b6e832c8fd94a418e7e28001d1d3347553 | 1fa58417a5a86bc541ae62bdcdacddc7f6592e1f | refs/heads/master | 2020-07-08T09:17:53.483953 | 2019-08-21T17:18:53 | 2019-08-21T17:18:53 | 203,630,240 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | # NOTE :- the count() method is used to return the number of elements with
#a specific value in a tuple
#programm to count the number of times letter 'x' appears in specified string
tub='asasasnbcsdjbhjhfaaaaaaabjsdbfdhvfdjhb'
print(tub.count('a'))
| [
"[email protected]"
]
| |
2a5148f46a6509ada6a2311abb815eaa87a49e5a | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/tlm/data_gen/robust_gen/gen_runner2/pairwise_desc_neg_major.py | 5e2915d046f6499f00a526baceaf2ea5ad156de6 | []
| no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 678 | py | from functools import partial
from data_generator.job_runner import JobRunner
from epath import job_man_dir
from tlm.data_gen.adhoc_datagen import LeadingN
from tlm.data_gen.robust_gen.robust_generators import RobustPairwiseTrainGen2
from tlm.data_gen.run_robust_gen import RobustWorker
def main():
max_seq_length = 512
encoder = LeadingN(max_seq_length, 1)
worker_factory = partial(RobustWorker,
RobustPairwiseTrainGen2(encoder, max_seq_length, "desc", 1000, "neg_major_enum"))
runner = JobRunner(job_man_dir, 4, "robust_pairwise_head_desc_neg_major", worker_factory)
runner.start()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
bcfcfae6effa7e2b3cfddb5ad1e2af7d4f40caa6 | 09d564aaab98f72dce6585e78a0642c9fe3539f4 | /日常练习/python_exercise_20181124.py | 77a1eb50c86981a2e63439c1aafb739e42afc032 | []
| no_license | everydayxy/xy_py | 4b983b4bccc843602f1ea0b1d5ea9576119604bf | 08b314e7ecb10e13394aa93b92084c53596834f3 | refs/heads/master | 2020-04-03T08:52:44.729729 | 2019-09-20T15:05:35 | 2019-09-20T15:05:35 | 134,683,779 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 803 | py | # def aaa(n):
# count = len(str(n))
# w = 10 ** (count-1)
# for _ in range(count):
# print(n // w)
# n %= w
# w //= 10
#
# num = int(input('输入一个数字: '))
# aaa(num)
# #输入一个数字,打印最大值
# max1 = -100000000000000000000
# while True:
# try:
# num = int(input('请输入一个数字:'))
# if num > max1:
# max1 = num
# end = input('输入数字结束了吗??【y/n|Y/N】')
# if end == 'y' or end == 'Y':
# print('最大值为:', max1)
# break
# except ValueError:
# print('检测到非法字符,请重新输入')
# break
for i in range(1,10):
s = ''
for j in range(1,i+1):
s += '{}*{}={:<4}'.format(j,i,j*i)
print(s) | [
"[email protected]"
]
| |
f13d5c9080a5c0a35528af52c3526818137fe27e | 47386073517c1d5bd0d6e96ded48e0bbb9cdd7a4 | /src/study_cookbook/10模块和包/运行目录或压缩文件.py | 15c8839b803b0f8f296146c8eb820a6d8421bab1 | []
| no_license | halysl/python_module_study_code | f733eba00de75ebd1cdc9c1e9e36f3a7eee03c93 | 189fd3878b0abe68fd56e11357e88facdb4a186f | refs/heads/master | 2022-07-21T06:51:32.129654 | 2021-08-05T09:14:15 | 2021-08-05T09:14:15 | 148,780,484 | 1 | 0 | null | 2022-07-06T20:26:28 | 2018-09-14T11:39:21 | HTML | UTF-8 | Python | false | false | 277 | py | # -*- coding: utf-8 -*-
"""
myapplication/
spam.py
bar.py
grok.py
__main__.py
"""
"""
bash % python3 myapplication
"""
"""
bash % ls
spam.py bar.py grok.py __main__.py
bash % zip -r myapp.zip *.py
bash % python3 myapp.zip
... output from __main__.py ...
"""
| [
"[email protected]"
]
| |
32af9934c3684fece98b2a567b106cb16cc30b4c | 605d63d23bc2e07eb054979a14557d469787877e | /atest/testdata/variables/same_variable_file_names/different_variable_files/suite3/subsuite1/variable.py | 54f1c9224cb5c5536b9cbabadb8493758f6a6413 | [
"Apache-2.0",
"CC-BY-3.0"
]
| permissive | robotframework/robotframework | 407b0cdbe0d3bb088f9bfcf9ea7d16e22eee1ddf | cf896995f822f571c33dc5651d51365778b1cf40 | refs/heads/master | 2023-08-29T03:19:00.734810 | 2023-08-27T18:14:48 | 2023-08-28T18:14:11 | 21,273,155 | 8,635 | 2,623 | Apache-2.0 | 2023-09-05T04:58:08 | 2014-06-27T11:10:38 | Python | UTF-8 | Python | false | false | 38 | py | SUITE = SUITE_31 = "suite3.subsuite1"
| [
"[email protected]"
]
| |
b8b2d770fa56dc2737b20940196ba21100eeede9 | 85eff920f0f285abad84c2f6bcfd4f236f3976ab | /webservices/views/product/Discount.py | 2addd7a58ad5beee740e07d16fa28b10c0c2115c | []
| no_license | obxlifco/Web-Picking-App-GoGrocery | 8cf5f7924005a19764e5c4722a47bfd963965f2e | 6b084547bed2af43a67bada313d68e56f4228f96 | refs/heads/main | 2023-05-26T08:32:30.297317 | 2021-06-12T10:05:01 | 2021-06-12T10:05:01 | 315,206,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67,390 | py | from webservices.models import *
from django.http import Http404
from webservices.serializers import *
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.views.decorators.csrf import csrf_exempt
from django.http import JsonResponse
from rest_framework.parsers import JSONParser
from datetime import date,datetime
from rest_framework import generics
from itertools import chain
from django.core import serializers
from django.http import HttpResponse
from django.db.models import Q
from django.core.files.storage import FileSystemStorage
from webservices.views import loginview
from django.utils.crypto import get_random_string
import datetime
import json
import random
import os
import ast
import xlsxwriter
import xlrd
import sys
import traceback
from webservices.views.common import common
# class DiscountSet is used to insert Discount
class DiscountSet(generics.ListAPIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
has_multy = request.data['value']
name = has_multy['name']
# namelower = name.lower()
# name1 = namelower.replace(" ", "-")
# nametrns = name1.translate(
# {ord(c): "" for c in "!@#$%^&*()[]{};:,./<>?\|`~=+\"\'"})
# slugname = slugify(nametrns)
# cnt = EngageboostDiscountMasters.objects.filter(name=name).count()
# if cnt == 0:
# cnt = cnt
# slugname = slugname
# elif cnt == 1:
# cnt = cnt
# slugname = slugname + '1'
# else:
# slugname = slugname + str(cnt)
# slugname = common.create_discount_slug(name)
if 'DiscountMastersConditions' in has_multy.keys():
has_multy.pop('DiscountMastersConditions')
if 'DiscountMastersCoupons' in has_multy.keys():
has_multy.pop('DiscountMastersCoupons')
discount_master_type = has_multy['discount_master_type']
product_id_qty = ""
if 'product_id_qty' in has_multy.keys():
product_id_qty = has_multy['product_id_qty']
is_mul = has_multy['has_multiplecoupons']
d1 = {'created': datetime.datetime.now(), 'modified': datetime.datetime.now(), 'used_coupon': 0}
serializer_data = dict(has_multy, **d1)
if is_mul == 'n':
if discount_master_type != 0:
coupon_code = has_multy['coupon_code']
if has_multy['coupon_prefix'] is None:
has_multy['coupon_prefix'] = ""
if has_multy['coupon_suffix'] is None:
has_multy['coupon_suffix'] = ""
if has_multy['coupon_prefix'] is not None and has_multy['coupon_prefix'] != "":
has_multy['coupon_code'] = str(has_multy['coupon_code']).strip(str(has_multy['coupon_prefix']))
coupon_code = str(has_multy['coupon_prefix']).strip() + str(coupon_code).strip()
if has_multy['coupon_suffix'] is not None and has_multy['coupon_suffix'] != "":
has_multy['coupon_code'] = str(has_multy['coupon_code']).strip() + str(
has_multy['coupon_suffix']).strip()
coupon_code = str(coupon_code).strip() + str(has_multy['coupon_suffix']).strip()
serializer_data['coupon_code'] = coupon_code
cnt = EngageboostDiscountMasters.objects.filter(coupon_code=coupon_code).count()
if cnt == 0:
if 'id' in has_multy.keys():
discount_id = has_multy['id']
creditObj = EngageboostDiscountMasters.objects.get(id=discount_id)
serializer_data.pop("id")
serializer = DiscountMasterSerializer(creditObj, data=serializer_data, partial=True)
else:
serializer = DiscountMasterSerializer(data=serializer_data, partial=True)
if serializer.is_valid():
serializer.save()
obj = EngageboostDiscountMasters.objects.latest('id')
last_id = obj.id
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys) > 0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=last_id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=last_id,
product_id=pro_qty[0],
qty=pro_qty[1],
created=current_date,
modified=current_date)
data = {
'status': 1,
'api_status': {"id": last_id},
'message': 'Successfully Inserted',
}
return Response(data)
else:
data = {
'status': 0,
'api_status': serializer.errors,
'message': 'Data Not Found',
}
return Response(data)
else:
data = {
'status': 0,
'message': 'Coupon code already exists',
}
return Response(data)
else:
if 'id' in has_multy.keys():
discount_id = has_multy['id']
creditObj = EngageboostDiscountMasters.objects.get(id=discount_id)
serializer_data.pop("id")
serializer = DiscountMasterSerializer(creditObj, data=serializer_data, partial=True)
else:
serializer = DiscountMasterSerializer(data=serializer_data, partial=True)
if serializer.is_valid():
serializer.save()
obj = EngageboostDiscountMasters.objects.latest('id')
last_id = obj.id
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys) > 0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=last_id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=last_id,
product_id=pro_qty[0], qty=pro_qty[1],
created=current_date,
modified=current_date)
data = {
'status': 1,
'api_status': {"id": last_id},
'message': 'Successfully Inserted',
}
return Response(data)
else:
data = {
'status': 0,
'api_status': serializer.errors,
'message': 'Data Not Found',
}
return Response(data)
else:
d1 = request.data['value']
d1 = {'created': datetime.datetime.now(), 'modified': datetime.datetime.now()}
serializer_data = dict(has_multy, **d1)
if 'id' in d1.keys():
discount_id = d1['id']
creditObj = EngageboostDiscountMasters.objects.get(id=discount_id)
serializer_data.pop("id")
serializer = DiscountMasterSerializer(creditObj, data=serializer_data, partial=True)
else:
serializer = DiscountMasterSerializer(data=serializer_data, partial=True)
if serializer.is_valid():
serializer.save()
obj = EngageboostDiscountMasters.objects.latest('id')
last_id = obj.id
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys) > 0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=last_id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=last_id,
product_id=pro_qty[0], qty=pro_qty[1],
created=current_date,
modified=datetime.datetime.now())
if 'multiple_coupons' in has_multy.keys():
if request.data['multiple_coupons']:
list_of_multiple_coupons = request.data['multiple_coupons']
else:
list_of_multiple_coupons = None
else:
list_of_multiple_coupons = None
if list_of_multiple_coupons:
for coupon_code in list_of_multiple_coupons:
cnt = EngageboostDiscountMastersCoupons.objects.filter(coupon_code=coupon_code).count()
if cnt == 0:
User = EngageboostDiscountMastersCoupons.objects.create(website_id=has_multy['website_id'],
discount_master_id=last_id,
coupon_code=coupon_code,
created=datetime.datetime.now())
data = {
'status': 1,
'api_status': {"id": last_id},
'message': 'Successfully Inserted',
}
else:
data = {
'status': 0,
'message': 'Coupon code already exists',
}
else:
no_of_coupon = has_multy["number_of_coupon"]
flag = 0
sresult = 1
list_of_multiple_coupons = []
prefix = suffix = ""
if 'prefix' in has_multy.keys():
prefix = has_multy['prefix']
if 'suffix' in has_multy.keys():
suffix = has_multy['suffix']
while sresult != -1 and flag < no_of_coupon:
res = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6))
res = str(prefix) + str(res).lower() + str(suffix)
result = EngageboostDiscountMastersCoupons.objects.filter(isdeleted='n',
coupon_code=res).count()
if result > 0:
pass
else:
list_of_multiple_coupons.append(res)
sresult = int(flag)
flag += 1
for coupon_code in list_of_multiple_coupons:
User = EngageboostDiscountMastersCoupons.objects.create(website_id=has_multy['website_id'],
discount_master_id=last_id,
coupon_code=coupon_code,
created=datetime.datetime.now())
data = {
'status': 1,
'api_status': {"id": last_id},
'message': 'Successfully Inserted',
}
return Response(data)
else:
data = {
'status': 0,
'api_status': serializer.errors,
'message': 'Data Not Found',
}
return Response(data)
# class DiscountList is used to fetch list of all Discount
class DiscountList(generics.ListAPIView):
def get_object(self, pk,request):
company_db = loginview.db_active_connection(request)
try:
return EngageboostDiscountMasters.objects.using(company_db).get(pk=pk)
except EngageboostDiscountMasters.DoesNotExist:
raise Http404
#///////////////////Fetch Single Row
def get(self, request, pk, format=None):
company_db = loginview.db_active_connection(request)
coupon_code=[]
dis = self.get_object(pk,request)
serializer = DiscountMasterSerializer(dis)
couponcode1=EngageboostDiscountMastersCoupons.objects.using(company_db).all().filter(discount_master_id=pk)
customergrp = EngageboostCustomerGroup.objects.using(company_db).all().filter(isdeleted='n',
isblocked='n').order_by('name')
customer = CustomerGroupSerializer(customergrp, many=True)
for coupon in couponcode1:
coupon_array={'coupon_code':coupon.coupon_code,'is_used':coupon.is_used}
coupon_code.append(coupon_array)
if(serializer):
data ={
'status':1,
'api_status':serializer.data,
'multiple_coupons':coupon_code,
'customer_group':customer.data,
'message':'',
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
# Update Discount
def put(self, request, pk, format=None,partial=True):
company_db = loginview.db_active_connection(request)
dis = self.get_object(pk,request)
coupon_code = request.data['value']['coupon_code']
#------Binayak Start 11-03-2021-----#
warehouse = request.data['value']['warehouse']
# print("======warehouse======", warehouse)
# ------Binayak End 11-03-2021-----#
has_multy = request.data['value']
if 'DiscountMastersConditions' in has_multy.keys():
has_multy.pop('DiscountMastersConditions')
if 'DiscountMastersCoupons' in has_multy.keys():
has_multy.pop('DiscountMastersCoupons')
is_mul = has_multy['has_multiplecoupons']
discount_master_type = has_multy['discount_master_type']
# print('Chakradhar Working', is_mul, discount_master_type)
product_id_qty = ""
if 'product_id_qty' in has_multy.keys():
product_id_qty = has_multy['product_id_qty']
d1={'modified':datetime.datetime.now()}
serializer_data=dict(has_multy,**d1)
if is_mul == 'n':
if discount_master_type != 0:
cnt=EngageboostDiscountMasters.objects.using(company_db).filter(coupon_code=coupon_code).filter(~Q(id=pk)).count()
if cnt ==0:
serializer = DiscountMasterSerializer(dis,data=serializer_data,partial=True)
if serializer.is_valid():
latest = serializer.save()
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys)>0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=latest.id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=latest.id,product_id=pro_qty[0],qty=pro_qty[1],created=current_date,modified=current_date)
data = {
'status':1,
'api_status':'',
'message':'Successfully Updated',
}
return Response(data)
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
else:
data ={
'status':0,
'message':'Coupon code is already exists',
}
return Response(data)
else:
serializer = DiscountMasterSerializer(dis,data=serializer_data,partial=True)
if serializer.is_valid():
prev_products = list(EngageboostDiscountMastersConditions.objects.filter(discount_master_id = pk).values_list('all_product_id',flat=True))
prev_warehouses = EngageboostDiscountMasters.objects.filter(id=pk).first().warehouse_id
latest = serializer.save()
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys)>0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=latest.id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=latest.id,product_id=pro_qty[0],qty=pro_qty[1],created=current_date,modified=current_date)
data ={
'status':1,
'api_status':'',
'message':'Successfully Updated',
}
objproduct_list = EngageboostDiscountMastersConditions.objects.filter(discount_master_id = pk).values_list('all_product_id',flat=True)
if(prev_products):
objproduct_list = list(objproduct_list)
objproduct_list.extend(prev_products)
objproduct_list = list(set(objproduct_list))
# if objproduct_list :
# for elastic_product_id in objproduct_list:
# if(elastic_product_id is not None):
# print('Hello', elastic_product_id)
# if("," in elastic_product_id):
# prod_lst = elastic_product_id.split(",")
# elastic = common.update_bulk_elastic('EngageboostProducts',prod_lst)
# else:
# elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)])
if objproduct_list:
# print("=====objproduct_list=====", objproduct_list)
#-------Binayak start 12-03-2021------#\
prooduct_id_list = []
for prod in objproduct_list:
if prod:
prod = prod.split(',')
# prev_products = list(prev_products.split(","))
# print("=====prev_products=====", prev_products)
# print("=====prev_products=====", type(prev_products))
prooduct_id_list.extend(prod)
# print("=====prooduct_id_list=====", prooduct_id_list)
prooduct_id_list = list(map(int, prooduct_id_list))
# if prooduct_id_list:
# elastic = common.update_bulk_elastic('EngageboostProducts', prooduct_id_list,
# 'channel_currency_product_price', 'update', warehouse)
#-------Binayak end 12-03-2021------#
# for elastic_product_id in objproduct_list:
# if(elastic_product_id is not None):
# try:
# if("," in elastic_product_id):
# prod_lst = elastic_product_id.split(",")
# elastic = common.update_bulk_elastic('EngageboostProducts',prod_lst,'channel_currency_product_price','update', warehouse)
# else:
# print("=====in here 5=====")
# elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update', warehouse)
# except:
# print("=====in here 6=====")
# elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update', warehouse)
EngageboostUpdateQueue.objects.create(discount_id=pk,
process_type='single',
operation_for='discount',
prev_warehouses=prev_warehouses)
return Response(data)
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
else:
d1= request.data['value']
d2 = request.data['multiple_coupons']
d1={'modified':datetime.datetime.now()}
serializer_data=dict(has_multy,**d1)
serializer = DiscountMasterSerializer(dis,data=serializer_data,partial=True)
if serializer.is_valid():
latest = serializer.save()
if product_id_qty:
product_id_qtys = product_id_qty.split(",")
if len(product_id_qtys)>0:
EngageboostDiscountFreebieMappings.objects.filter(discount_master_id=latest.id).delete()
for item in product_id_qtys:
pro_qty = item.split("@")
current_date = datetime.datetime.now(datetime.timezone.utc).astimezone()
EngageboostDiscountFreebieMappings.objects.create(discount_master_id=latest.id,product_id=pro_qty[0],qty=pro_qty[1],created=current_date,modified=current_date)
for is_mul in d2:
cnt= EngageboostDiscountMastersCoupons.objects.using(company_db).filter(coupon_code=is_mul['coupon_code']).filter(~Q(discount_master_id=pk)).count()
if cnt ==0:
User = EngageboostDiscountMastersCoupons.objects.using(company_db).create(website_id=has_multy['website_id'],discount_master_id=pk,coupon_code=is_mul['coupon_code'],modified=datetime.datetime.now().date())
data ={
'status':1,
'api_status':'',
'message':'Successfully Updated',
}
else:
data ={
'status':0,
'api_status':'',
'message':'Coupon code is already exists',
}
return Response(data)
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
# Set discount conditions(Insert new records)
# Save discount data after save condition by cds on 11th Oct 2019
class DiscountConditions(generics.ListAPIView):
def post(self, request, format=None):
datas=[]
company_db = loginview.db_active_connection(request)
discount_master_id=request.data['discount_master_id']
warehouse = EngageboostDiscountMasters.objects.filter(id=discount_master_id).values_list('warehouse_id',
flat=True)
prev_products = list(EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id = discount_master_id).values_list('all_product_id',flat=True))
prev_conditions = EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id=discount_master_id).all()
prev_pro_ids = []
new_pro_ids = []
# if cnt >0:
if prev_conditions:
prev_conditions_serializar = DiscountConditionsSerializer(prev_conditions, many=True)
prev_conditions_serializar = prev_conditions_serializar.data
product_in_arr = []
product_out_arr = []
for ind_pre_cond in prev_conditions_serializar:
if ind_pre_cond["all_product_id"]:
product_id_array = ind_pre_cond["all_product_id"].split(",")
if ind_pre_cond["condition"] == "==":
product_in_arr = product_in_arr + product_id_array
else:
product_out_arr = product_out_arr + product_id_array
elif ind_pre_cond["all_category_id"]:
category_id_array = ind_pre_cond["all_category_id"].split(",")
find_category_products = EngageboostProductCategories.objects.filter(category_id__in=category_id_array).values_list('product_id', flat= True).distinct()
find_category_products = list(find_category_products)
if ind_pre_cond["condition"] == "==":
product_in_arr = product_in_arr + find_category_products
else:
product_out_arr = product_out_arr + find_category_products
prev_pro_ids = list(set(product_in_arr) - set(product_out_arr)) #product_in_arr-product_out_arr
EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id=discount_master_id).delete()
has_multy=request.data['value']
for data in has_multy:
has_record = EngageboostDiscountMastersConditions.objects.using(company_db).last()
if has_record:
last_entry_of_table = EngageboostDiscountMastersConditions.objects.order_by('-id').latest('id')
row_id = int(last_entry_of_table.id)+int(1)
else:
row_id = 1
d1={"id":row_id};
data=dict(data,**d1)
# datas.append(data)
# serializer = DiscountConditionsSerializer(data=data,partial=True)
serializer = EngageboostDiscountMastersConditions.objects.using(company_db).create(**data)
# objproduct_list = EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id = discount_master_id).values_list('all_product_id',flat=True)
# New
new_conditions = EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id=discount_master_id).all()
new_conditions_serializar = DiscountConditionsSerializer(new_conditions, many=True)
new_conditions_serializar = new_conditions_serializar.data
new_product_in_arr = []
new_product_out_arr = []
for ind_pre_cond in new_conditions_serializar:
if ind_pre_cond["all_product_id"]:
product_id_array = ind_pre_cond["all_product_id"].split(",")
if ind_pre_cond["condition"] == "==":
new_product_in_arr = new_product_in_arr + product_id_array
else:
new_product_out_arr = new_product_out_arr + product_id_array
elif ind_pre_cond["all_category_id"]:
category_id_array = ind_pre_cond["all_category_id"].split(",")
find_category_products = EngageboostProductCategories.objects.filter(category_id__in=category_id_array).values_list('product_id', flat= True).distinct()
find_category_products = list(find_category_products)
if ind_pre_cond["condition"] == "==":
new_product_in_arr = new_product_in_arr + find_category_products
else:
new_product_out_arr = new_product_out_arr + find_category_products
new_pro_ids = list(set(new_product_in_arr) - set(new_product_out_arr)) #product_in_arr-product_out_arr
# End New
diff_ids = prev_pro_ids + new_pro_ids
final_arr = list(set(diff_ids))
objproduct_list = prev_pro_ids # It is static. Checking for testing purpose
# if final_arr :
# for elastic_product_id in final_arr:
# if(elastic_product_id != "" and elastic_product_id is not None):
# elastic_product_id = str(elastic_product_id)
# if (elastic_product_id.find(',') != -1):
# prod_lst = elastic_product_id.split(",")
# for prod_id in prod_lst:
# if(prod_id!=""):
# elastic = common.save_data_to_elastic(int(prod_id),'EngageboostProducts')
# else:
# elastic = common.save_data_to_elastic(int(elastic_product_id),'EngageboostProducts')
if final_arr:
# for elastic_product_id in final_arr:
# if(elastic_product_id != "" and elastic_product_id is not None):
# print('Hello', elastic_product_id)
# try:
# if("," in elastic_product_id):
# prod_lst = elastic_product_id.split(",")
# elastic = common.update_bulk_elastic('EngageboostProducts',prod_lst,'channel_currency_product_price','update')
# else:
# elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update')
# except:
# elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update')
prooduct_id_list = []
# print("======final_arr=======", final_arr)
for prev_prov in final_arr:
if type(prev_prov) == 'str':
prev_prov = prev_prov.split(',')
prooduct_id_list.extend(prev_prov)
else:
prooduct_id_list.append(prev_prov)
prooduct_id_list = list(map(int, prooduct_id_list))
warehouse_lists = []
if prooduct_id_list:
for warehouse_ids in warehouse:
# print('======warehouse======', warehouse_ids)
warehouse_lists.extend(list(map(int, list(warehouse_ids.split(',')))))
# warehouse = list(map(int, list(warehouse)))
prev_pro_ids = list(map(str, prev_pro_ids))
EngageboostUpdateQueue.objects.create(discount_id=discount_master_id,
process_type='single',
operation_for='discount',
prev_products=", ".join(prev_pro_ids))
#
# elastic = common.update_bulk_elastic('EngageboostProducts', prooduct_id_list,
# 'channel_currency_product_price',
# 'update', warehouse_lists)
if serializer:
# serializer.save()
data ={
'status':1,
'api_status':'',
'message':'Successfully Inserted',
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
# return Response(datas)
# Set discount conditions Get single row and update
class DiscountConditionsSet(generics.ListAPIView):
def get_object(self, pk,request):
company_db = loginview.db_active_connection(request)
try:
return EngageboostDiscountMastersConditions.objects.using(company_db).get(pk=pk)
except EngageboostDiscountMastersConditions.DoesNotExist:
raise Http404
def get(self, request, pk, format=None,many=True):
company_db = loginview.db_active_connection(request)
Conditions = EngageboostDiscountMastersConditions.objects.using(company_db).all().filter(discount_master_id=pk)
serializer = DiscountConditionsSerializer(Conditions,many=True)
# data1 = EngageboostCustomers.objects.using(company_db).all().filter(isdeleted='n',isblocked='n').order_by('-id')
# serializer1 = CustomerSerializer(data1, many=True)
# Channels = EngageboostChannels.objects.using(company_db).all().filter(isdeleted='n',isblocked='n').order_by('-id')
# Channel = ChannelsSerializer(Channels, many=True)
# Categories = EngageboostCategoryMasters.objects.using(company_db).all().filter(isdeleted='n',isblocked='n',parent_id=0).order_by('-id')
# Category = CategoriesSerializer(Categories, many=True)
# product = EngageboostProducts.objects.using(company_db).all().filter(isblocked='n',isdeleted='n')
# product = BasicinfoSerializer(product,many=True)
if(serializer):
data ={
'Rows':serializer.data,
# 'customergrp':serializer1.data,
# 'category':Category.data,
# 'channel':Channel.data,
# 'product':product.data
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
# def put(self, request, pk, format=None,many=True):
# Reviews = self.get_object(pk,request)
# has_multy=request.data['value']
# for data1 in has_multy:
# #print(data1)
# serializer = DiscountConditionsSerializer(Reviews,data=data1)
# if serializer.is_valid():
# serializer.save()
# data ={
# 'status':1,
# 'message':'Successfully Updated',
# }
# else:
# data ={
# 'status':0,
# 'message':'Data Not Found',
# }
# return Response(data)
# Fetch All CustomerGroup Record for page load web services
class CustomerGroupDiscount(generics.ListAPIView):
def get_object(request, discount_master_id, format=None):
company_db = loginview.db_active_connection(request)
try:
return EngageboostDiscountMastersConditions.objects.using(company_db).get(discount_master_id=discount_master_id)
except EngageboostDiscountMastersConditions.DoesNotExist:
raise Http404
def get(self, request,discount_master_id, format=None,many=True):
company_db = loginview.db_active_connection(request)
# pk=request.data.get('pk')
user = EngageboostDiscountMastersConditions.objects.using(company_db).all().filter(discount_master_id=discount_master_id)
serializer = DiscountConditionsSerializer(user,many=True)
#####################Query Generation#################################
if request.data.get('search') and request.data.get('order_by'):
key=request.data.get('search')
order_by=request.data.get('order_by')
order_type=request.data.get('order_type')
if(order_type=='+'):
order=order_by
else:
order='-'+order_by
result = EngageboostCustomers.objects.using(company_db).all().order_by(order).filter(Q(first_name__icontains=key)|Q(last_sku__icontains=key)|Q(email__icontains=key))
elif request.data.get('search'):
key=request.data.get('search')
result = EngageboostCustomers.objects.using(company_db).all().order_by('-id').filter(Q(first_name__icontains=key)|Q(last_sku__icontains=key)|Q(email__icontains=key))
elif request.data.get('order_by'):
order_by=request.data.get('order_by')
order_type=request.data.get('order_type')
if(order_type=='+'):
order=order_by
else:
order='-'+order_by
result = EngageboostCustomers.objects.using(company_db).all().order_by(order)
else:
result = EngageboostCustomers.objects.using(company_db).all().order_by('-id')
result=result.filter(~Q(pk=discount_master_id)).filter(isblocked='n',isdeleted='n')
#print(request.data.get('search'))
page = self.paginate_queryset(result)
#####################Query Generation#################################
#####################Layout#################################
if page is not None:
serializer_product = CustomerSerializer(page, many=True)
module='Customers'
screen_name='list'
layout_fetch=EngageboostGridLayouts.objects.using(company_db).get(module=module,screen_name=screen_name)
layout_header=layout_fetch.header_name.split("@@")
layout_field=layout_fetch.field_name.split("@@")
layout_check=EngageboostGridColumnLayouts.objects.using(company_db).filter(module=module,screen_name=screen_name).count()
layout={}
layout_arr=[]
for header,field in zip(layout_header,layout_field):
ex_layout_field=field.split(".")
field_name=ex_layout_field[0]
if len(ex_layout_field)>1:
child_name=ex_layout_field[1]
else:
child_name=''
if(layout_check):
layout_column_fetch=EngageboostGridColumnLayouts.objects.using(company_db).get(module=module,screen_name=screen_name)
layout_column_header=layout_column_fetch.header_name
layout_column_field=layout_column_fetch.field_name
if header in layout_column_header:
status=1
else:
status=0
else:
status=1
layout={"title":header,"field":field_name,"child":child_name,"show":status}
layout_arr.append(layout)
#####################Layout#################################
pre_data={}
final_data=[]
pre_data['result']=serializer_product.data
pre_data['layout']=layout_arr
pre_data['discount']=serializer.data
final_data.append(pre_data)
return self.get_paginated_response(final_data)
# Fetch All Category Record for page load web services
class CategoryLoed(APIView):
def get(self, request, format=None):
company_db = loginview.db_active_connection(request)
Categories = EngageboostCategoryMasters.objects.using(company_db).all().filter(isdeleted='n',isblocked='n',parent_id=0).order_by('-id')
Category = CategoriesSerializer(Categories, many=True)
if(Category):
data ={
'status':1,
'category':Category.data,
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
# select chield category for parent category
class Getchild_category(APIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
category_id=request.data['category_id']
Categories = EngageboostCategoryMasters.objects.using(company_db).all().filter(parent_id=category_id)
arr2=[]
for Categories1 in Categories:
d2={"id":Categories1.id,"name":Categories1.name}
arr2.append(d2)
return HttpResponse(json.dumps({"category":arr2,"status":1}), content_type='application/json')
class ProductLoad(generics.ListAPIView):
# """ List all products from web services """
def post(self, request, format=None,many=True):
company_db = loginview.db_active_connection(request)
#####################Query Generation#################################
if request.data['search']:
key=request.data['search']
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
cnt = EngageboostProducts.objects.using(company_db).filter(Q(name__icontains=key)|Q(sku__icontains=key)|Q(default_price__icontains=key)).filter(isblocked='n',isdeleted='n').count()
if cnt !=0:
result = EngageboostProducts.objects.using(company_db).all().order_by(order).filter(Q(name__icontains=key)|Q(sku__icontains=key)|Q(default_price__icontains=key)).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for product in result:
data ={
'id':product.id,
'name':product.name,
'sku':product.sku,
'default_price':product.default_price
}
arr.append(data)
data2 ={
'product':arr
}
return Response(data2)
else:
data2 ={
'product':''
}
return Response(data2)
else:
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
result = EngageboostProducts.objects.using(company_db).all().order_by(order).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for product in result:
data ={
'id':product.id,
'name':product.name,
'sku':product.sku,
'default_price':product.default_price
}
arr.append(data)
data2 ={
'product':arr
}
return Response(data2)
class ProductLoadPaging(generics.ListAPIView):
# """ List all products from web services """
def post(self, request, format=None,many=True):
company_db = loginview.db_active_connection(request)
#####################Query Generation#################################
#print('Chkardahr Sahoo')
key = ''
if request.data['search']:
key = request.data['search']
order_type = request.data['order_type']
order_by = request.data['order_by']
product_id = request.data['product_id']
if(order_type=='+'):
order = order_by
elif(order_type=='-'):
order = '-'+order_by
parentProduct = EngageboostCossSellProducts.objects.values('product_id').filter(~Q(product_id=product_id))
# print("Parents",parentProduct)
ownchildProduct = EngageboostCossSellProducts.objects.values('cross_product_id').filter(~Q(product_id=product_id))
# print("Childs",ownchildProduct,ownchildProduct.query)
proObj = EngageboostProducts.objects.using(company_db).filter(isblocked='n',isdeleted='n').filter(~Q(id=product_id)).values('id')
product_ids = []
if proObj.count()>0:
result = proObj.all()
for item in result:
check = EngageboostCossSellProducts.objects.filter(Q(product_id=item['id'])|Q(cross_product_id=item['id'])).filter(~Q(product_id=product_id))
if check.count()==0:
product_ids.append(item['id'])
proObj = EngageboostProducts.objects.using(company_db).filter(id__in=product_ids)
catProduct = EngageboostProductCategories.objects.filter(isblocked='n',isdeleted='n',product_id=product_id).last()
sameCatProduct = EngageboostProductCategories.objects.filter(isblocked='n',isdeleted='n',category_id=catProduct.category_id).values('product_id')
proObj = proObj.filter(id__in=sameCatProduct)
if key != '':
proObj = proObj.filter(Q(name__icontains=key)|Q(sku__icontains=key)|Q(default_price__icontains=key))
# print(proObj.query)
cnt = proObj.count()
if cnt !=0:
result = proObj.all().order_by(order)[:100]
page = self.paginate_queryset(result)
arr=[]
if page is not None:
serializer_product = EngageboostProductsSerializer(page, many=True)
serializer_product = serializer_product.data
for product in serializer_product:
data ={
'id':product['id'],
'name':product['name'],
'sku':product['sku'],
'default_price':product['default_price']
}
arr.append(data)
return self.get_paginated_response(arr)
else:
data2 = {
"result":[]
}
return Response(data2)
# Customer group web services for Discount Setup Load
class CustomerLoed(APIView):
def get(self, request, format=None):
company_db = loginview.db_active_connection(request)
customergrp = EngageboostCustomerGroup.objects.using(company_db).all().filter(isdeleted='n',
isblocked='n').order_by('name')
customer = CustomerGroupSerializer(customergrp, many=True)
if(customer):
data ={
'status':1,
'customer':customer.data,
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
class CustomerType(generics.ListAPIView):
# """ List all products from web services """
def post(self, request, format=None,many=True):
company_db = loginview.db_active_connection(request)
#####################Query Generation#################################
if request.data['search']:
key=request.data['search']
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
cnt = EngageboostCustomerGroup.objects.using(company_db).filter(Q(name__icontains=key)).filter(isblocked='n',isdeleted='n').count()
if cnt !=0:
result = EngageboostCustomerGroup.objects.using(company_db).all().order_by(order).filter(Q(name__icontains=key)).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for customergrp in result:
data ={
'id':customergrp.id,
'name':customergrp.name
}
arr.append(data)
data2 ={
'customergrp':arr
}
return Response(data2)
else:
data2 ={
'customergrp':''
}
return Response(data2)
else:
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
result = EngageboostCustomerGroup.objects.using(company_db).all().order_by(order).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for customergrp in result:
data ={
'id':customergrp.id,
'name':customergrp.name
}
arr.append(data)
data2 ={
'customergrp':arr
}
return Response(data2)
class DiscountCustomer(generics.ListAPIView):
# """ List all products from web services """
def post(self, request, format=None,many=True):
company_db = loginview.db_active_connection(request)
#####################Query Generation#################################
if request.data['search']:
key=request.data['search']
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
cnt = EngageboostCustomers.objects.using(company_db).filter(Q(first_name__icontains=key)|Q(email__icontains=key)).filter(isblocked='n',isdeleted='n').count()
if cnt !=0:
result = EngageboostCustomers.objects.using(company_db).all().order_by(order).filter(Q(first_name__icontains=key)|Q(email__icontains=key)).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for customer in result:
data ={
'id':customer.id,
'first_name':customer.first_name,
'last_name':customer.last_name,
'email':customer.email
}
arr.append(data)
data2 ={
'customer':arr
}
return Response(data2)
else:
data2 ={
'customer':''
}
return Response(data2)
else:
order_type=request.data['order_type']
order_by=request.data['order_by']
if(order_type=='+'):
order=order_by
elif(order_type=='-'):
order='-'+order_by
result = EngageboostCustomers.objects.using(company_db).all().order_by(order).filter(isblocked='n',isdeleted='n')[:100]
arr=[]
for customer in result:
data ={
'id':customer.id,
'first_name':customer.first_name,
'last_name':customer.last_name,
'email':customer.email
}
arr.append(data)
data2 ={
'customer':arr
}
return Response(data2)
class CategoriesListDiscount(generics.ListAPIView):
# """ Categories Selected """
#///////////////////Fetch Single Row
def get(self, request, pk, format=None):
company_db = loginview.db_active_connection(request)
Categories = EngageboostCategoryMasters.objects.using(company_db).all().filter(isdeleted='n',isblocked='n',parent_id=0).order_by('-id')
Category = CategoriesSerializer(Categories, many=True)
child_id=EngageboostDiscountMastersConditions.objects.using(company_db).get(id=pk)
if str(child_id.all_category_id)!='None':
all_categories=child_id.all_category_id.split(',')
all_categories = [int(numeric_string) for numeric_string in all_categories]
all_categories.sort()
arr2=[]
for child in all_categories:
child_id1 = EngageboostCategoryMasters.objects.using(company_db).filter(id=child,isdeleted='n',isblocked='n')
if child_id1.count() > 0:
child_id1 = child_id1.first()
if child_id1.parent_id != 0:
child_count1=EngageboostCategoryMasters.objects.using(company_db).filter(id=child_id1.parent_id,isdeleted='n',isblocked='n').count()
if child_count1 > 0:
child_id2=EngageboostCategoryMasters.objects.using(company_db).get(id=child_id1.parent_id,isdeleted='n',isblocked='n')
if child_id2.parent_id!=0:
child_count2=EngageboostCategoryMasters.objects.using(company_db).filter(id=child_id2.parent_id,isdeleted='n',isblocked='n').count()
if child_count2 >0:
child_id3=EngageboostCategoryMasters.objects.using(company_db).get(id=child_id2.parent_id,isdeleted='n',isblocked='n')
if child_id3.parent_id!=0:
child_count2=EngageboostCategoryMasters.objects.using(company_db).filter(id=child_id3.parent_id,isdeleted='n',isblocked='n').count()
if child_count2 >0:
child_id4=EngageboostCategoryMasters.objects.using(company_db).get(id=child_id3.parent_id,isdeleted='n',isblocked='n')
category_1=child_id4.id
category_2=child_id3.id
category_3=child_id2.id
category_4=child_id1.id
else:
category_1=child_id3.id
category_2=child_id2.id
category_3=child_id1.id
category_4=0
else:
category_1=child_id2.id
category_2=child_id1.id
category_3=0
category_4=0
else:
category_1=child_id1.id
category_2=0
category_3=0
category_4=0
data_parent={"category_1":category_1,"category_2":category_2,"category_3":category_3,"category_4":category_4}
arr2.append(data_parent)
return HttpResponse(json.dumps({"parent_child":arr2,'category':Category.data}), content_type='application/json')
else:
data_parent=[{"category_1":0,"category_2":0,"category_3":0,"category_4":0}]
return HttpResponse(json.dumps({"parent_child":data_parent,'category':Category.data}), content_type='application/json')
class CatrgoryConditionsSet(generics.ListAPIView):
# Category all for discount coupan
def get(self, request, format=None,many=True):
company_db = loginview.db_active_connection(request)
Categories = EngageboostCategoryMasters.objects.using(company_db).all().filter(isdeleted='n',isblocked='n')
Category = CategoriesSerializer(Categories, many=True)
if(Category):
data ={
'category':Category.data,
}
else:
data ={
'status':0,
'api_status':serializer.errors,
'message':'Data Not Found',
}
return Response(data)
class ImportFileDiscounts(generics.ListAPIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
datas = []
db_fields = []
product_path = 'discounts'
module_id = 1
temp_model = 'TempDiscount'
model = 'Discount'
filepath = 'importfile'
post_data = request.data
if 'import_file' in request.FILES:
rand = str(random.randint(1,99999))
file1 = request.FILES['import_file']
file_name=file1.name
ext = file_name.split('.')[-1]
time_stamp = str(int(datetime.datetime.now().timestamp()))
new_file_name='DiscountImport_'+rand+time_stamp
fs=FileSystemStorage()
filename = fs.save(filepath+'/'+product_path+'/'+new_file_name+'.'+ext, file1)
uploaded_file_url = fs.url(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
csvReader = xlrd.open_workbook(settings.BASE_DIR+uploaded_file_url)
sheet = csvReader.sheet_by_name('Sheet1')
headers = [str(cell.value) for cell in sheet.row(0)]
headers = {k: k for k in headers}
#********* List Of Parent Category *********#
category_lists = []
category_cond = EngageboostCategoryMasters.objects.using(company_db).all().filter(website_id=post_data['website_id'],parent_id=0,isblocked="n",isdeleted="n").order_by('name')
if category_cond:
category_list = CategoriesSerializer(category_cond,many=True)
category_lists = category_list.data
else:
category_lists = []
datas = {"category_list":category_lists,"filename":new_file_name+'.'+ext,"xls_header":headers}
return Response(datas)
class SaveFileDiscounts(generics.ListAPIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
product_path = 'discounts'
module_id = 1
temp_model = 'TempDiscount'
model = 'Discount'
filepath = 'importfile'
datas = []
custom_field_datas=[]
post_data = request.data
# map_fields = post_data["map_fields"]
# Read xls Data
fs=FileSystemStorage()
filename = filepath+'/'+product_path+'/'+post_data["filename"]
uploaded_file_url = fs.url(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.exists(BASE_DIR):
csvReader = xlrd.open_workbook(settings.BASE_DIR+uploaded_file_url)
sheet = csvReader.sheet_by_index(0)
length=len(sheet.col_values(0))
xls_column_header_info = []
xls_column_info={}
row_no_in_xls= sheet.ncols
# max_column = sheet.ncols
for x in range(length):
if x==0:
for i in range(row_no_in_xls):
d11 ={"column_name":sheet.col_values(i)[x],"column_number":i}; xls_column_info=dict(xls_column_info,**d11)
xls_column_header_info.append(xls_column_info)
else:
pass
for x in range(length):
if x==0:
pass
else:
has_record = EngageboostTempDiscountMasters.objects.last()
if has_record:
last_entry_of_table = EngageboostTempDiscountMasters.objects.order_by('-id').latest('id')
row_id = int(last_entry_of_table.id)+int(1)
else:
row_id = 1
serializer_data={}
CF_serializer_data={}
custom_field_list=[]
d2 = {}
d1 = {"id":row_id,"website_id":post_data['website_id'],"file_name":post_data['filename']};
serializer_data=dict(serializer_data,**d1)
try:
for xls_column_header in xls_column_header_info:
coupon_type = ""
disc_type = ""
column_name = str(xls_column_header["column_name"])
column_name = column_name.strip()
column_number = xls_column_header["column_number"]
field_value = sheet.col_values(column_number)[x] if sheet.col_values(column_number)[x] else None
if column_name=="Discount Name":
keyword = "name"
if column_name=="Discount Description":
keyword = "description"
if column_name=="Discount Type":
keyword = "discount_type"
if column_name=="Apply Per/Fixed":
keyword = "disc_type"
disc_type = field_value
if column_name=="Coupon Code":
keyword = "coupon_code"
if column_name=="Coupon Type":
keyword = "coupon_type"
if column_name=="Generate Options":
keyword = "has_multiplecoupons"
if column_name=="Number of Coupons":
keyword = "used_coupon"
if column_name=="Coupon Prefix":
keyword = "coupon_prefix"
if column_name=="Coupon Suffix":
keyword = "coupon_suffix"
if column_name=="Discount Amount":
keyword = "amount"
if column_name=="Discount Starts":
keyword = "disc_start_date"
if column_name=="Discount End":
keyword = "disc_end_date"
if column_name=="Max. no of items":
keyword = "no_of_quantity_per"
if column_name=="Max Discount Amount":
keyword = "up_to_discount"
if column_name=="Offer Type":
keyword = "offer_type"
if column_name=="Customer Group":
keyword = "customer_group"
if column_name=="Status":
keyword = "isblocked"
if column_name=="SKU Equals To":
keyword = "sku_equals"
if column_name=="SKU Not Equals To":
keyword = "sku_not_equals"
if column_name=="Category Equals To":
keyword = "category_equals"
if column_name=="Category Not Equals To":
keyword = "category_not_equals"
if column_name=="Amount Equals To":
keyword = "amount_equals"
if column_name=="Amount Equals To>":
keyword = "amount_equals_greater"
if column_name=="Amount Equals To<":
keyword = "amount_equals_less"
if column_name=="Free Item sku":
keyword = "free_item_sku"
if column_name=="Free item Quantity":
keyword = "free_item_quantity"
if column_name=="Weekly Equals To":
keyword = "weekly_equals"
if column_name=="Weekly Not Equals To":
keyword = "weekly_not_equals"
if column_name=="Customer Equals To":
keyword = "customer_equals"
if column_name=="Customer Not Equals To":
keyword = "customer_not_equals"
if column_name=="Free Shipping":
keyword = "free_shipping"
d2.update({keyword:field_value})
if d2['discount_type'].lower()=="product" and d2['disc_type'].lower()=="p":
d2.update({"disc_type":1})
elif d2['discount_type'].lower()=="product" and d2['disc_type'].lower()=="f":
d2.update({"disc_type":2})
elif d2['discount_type'].lower()=="product" and d2['disc_type'].lower()=="p":
d2.update({"disc_type":4})
elif d2['discount_type'].lower()=="coupon" and d2['disc_type'].lower()=="p":
d2.update({"disc_type":6})
elif d2['discount_type'].lower()=="coupon" and d2['disc_type'].lower()=="f":
d2.update({"disc_type":3})
elif d2['discount_type'].lower()=="coupon" and d2['disc_type'].lower()=="fh":
d2.update({"disc_type":7})
if d2['discount_type'].lower()=="product":
d2.update({"discount_type":"p"})
elif d2['discount_type'].lower()=="coupon":
d2.update({"discount_type":"c"})
if d2['discount_type'].lower()=="coupon" and d2['coupon_type'].lower()=='single use':
d2.update({"coupon_type":1})
elif d2['discount_type'].lower()=="coupon" and d2['coupon_type'].lower()=='multiple use':
d2.update({"coupon_type":2})
if d2['has_multiplecoupons'].lower()=="single code":
d2.update({"has_multiplecoupons":'n'})
elif d2['has_multiplecoupons'].lower()=="multiple code":
d2.update({"has_multiplecoupons":'y'})
if d2['coupon_type'].lower()=="single use":
d2.update({"coupon_type":1})
elif d2['coupon_type'].lower()=="multiple use":
d2.update({"coupon_type":2})
if d2['isblocked'].lower()=="active":
d2.update({"isblocked":'n'})
elif d2['isblocked'].lower()=="inactive":
d2.update({"isblocked":'y'})
if d2['customer_group']!="" and d2['customer_group']!=None:
obj = EngageboostCustomerGroup.objects.filter(name=d2['customer_group'])
if obj.count()>0:
custgrp=obj.last()
d2.update({"customer_group":custgrp.id})
else:
d2.update({"customer_group":None})
workbook_datemode = csvReader.datemode
y, M, d, h, m, s = xlrd.xldate_as_tuple(d2['disc_start_date'], workbook_datemode)
d2['disc_start_date'] = ("{0}-{1}-{2}".format(y, M, d, h, m, s))
y, M, d, h, m, s = xlrd.xldate_as_tuple(d2['disc_end_date'], workbook_datemode)
d2['disc_end_date'] = ("{0}-{1}-{2}".format(y, M, d, h, m, s))
d2['disc_start_date'] = datetime.datetime.strptime(d2['disc_start_date'],'%Y-%m-%d').strftime('%Y-%m-%dT%H:%M:%SZ')
d2['disc_end_date'] = datetime.datetime.strptime(d2['disc_end_date'],'%Y-%m-%d').strftime('%Y-%m-%dT%H:%M:%SZ')
serializer_data=dict(serializer_data,**d2)
except KeyError: no=""
current_time = datetime.datetime.now(datetime.timezone.utc).astimezone()
d1={"created":current_time,"modified":current_time};
serializer_data=dict(serializer_data,**d1)
try:
save_temp_product = EngageboostTempDiscountMasters.objects.using(company_db).create(**serializer_data)
data_status = {"status":1,"filename":post_data["filename"]}
except Exception as e :
data_status = {"status":0,"filename":post_data["filename"],'errors':str(e) }
os.remove(settings.BASE_DIR+uploaded_file_url)
else:
data_status = {"status":0,"filename":post_data["filename"],'errors':"File Not Exists" }
return Response(data_status)
class PreviewSaveFileDiscounts(generics.ListAPIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
post_data = request.data
fetch_all_data = []
data = {}
if post_data["model"] == "discount":
fetch_all_data_cond = EngageboostTempDiscountMasters.objects.using(company_db).all().filter(website_id=post_data['website_id'],file_name=post_data['filename']) #fetch from temp product table
if fetch_all_data_cond:
fetch_all_datas = TempDiscountsSerializer(fetch_all_data_cond,many=True)
# fetch_all_data = fetch_all_datas.data
for fad in fetch_all_datas.data:
#print(check_exported_data(fad,request))
error=[]
special_char='no'
if error:
fad["error"] = 1
fad["error_message"] = error
else:
error.append("SUCCESS")
fad["error"] = 0
fad["error_message"] = error
fetch_all_data = fetch_all_datas.data
data = {"preview_data":fetch_all_data,"filename":post_data['filename']}
return Response(data)
class SaveAllImportedDiscounts(generics.ListAPIView):
def post(self, request, format=None):
company_db = loginview.db_active_connection(request)
product_path = 'discount'
module_id = 1
temp_model = 'TempDiscount'
model = 'Discount'
datas = []
fetch_temp_datas = []
# map_field_dict="";map_field_array=[]
post_data = request.data
selectedIds = post_data["selected_ids"].split(',')
for i in selectedIds:
fetch_temp_data_cond = EngageboostTempDiscountMasters.objects.using(company_db).filter(id=int(i)).first()
if fetch_temp_data_cond:
fetch_temp_data = TempDiscountsSerializer(fetch_temp_data_cond,partial=True)
fetch_temp_datas.append(fetch_temp_data.data)
for fetchtempdatas in fetch_temp_datas:
serializer_data = {}
serializer_data = dict(serializer_data,**fetchtempdatas)
if fetchtempdatas['discount_type']=="c":
discount_master_type = 1
elif fetchtempdatas['discount_type']=="p":
discount_master_type = 0
current_time = datetime.datetime.now(datetime.timezone.utc).astimezone()
d1 = {"discount_master_type":discount_master_type,"website_id":fetchtempdatas["website_id"],"created":current_time,"modified":current_time}
serializer_data = dict(serializer_data,**d1)
datas.append(serializer_data)
serializer = DiscountMasterSerializer(data=serializer_data,partial=True)
if serializer.is_valid():
serializer.save()
responseDatas = {"status":1,"api_response":datas,"message":'Discounts Saved'}
else:
data ={'status':0,'api_status':serializer.errors,'message':'Error Occured'}
datas.append(data)
responseDatas = {"status":0,"api_response":datas,"message":'Error Occured in Discounts'}
EngageboostTempDiscountMasters.objects.using(company_db).filter(file_name=post_data['filename']).delete()
return Response(responseDatas)
# Set discount condition(Insert new records)
class DiscountCouponCondition(generics.ListAPIView):
def post(self, request, format=None):
datas=[]
company_db = loginview.db_active_connection(request)
discount_master_id=request.data['discount_master_id']
disc_cnt=EngageboostDiscountMasters.objects.using(company_db).filter(id=discount_master_id,discount_type="c").count()
if disc_cnt>0:
cnt=EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id=discount_master_id).count()
if cnt >0:
EngageboostDiscountMastersConditions.objects.using(company_db).filter(discount_master_id=discount_master_id).delete()
has_multy=request.data['value']
error = []
datas = []
for data in has_multy:
# serializer_data = dict()
if data['fields']=="-1"or data['fields']=="-4" or data['fields']=="-2" or data['fields']=="-3" or data['fields']=="9175" or data['fields']=="0":
has_record = EngageboostDiscountMastersConditions.objects.using(company_db).last()
if has_record:
last_entry_of_table = EngageboostDiscountMastersConditions.objects.order_by('-id').latest('id')
row_id = int(last_entry_of_table.id)+int(1)
else:
row_id = 1
d1={"id":row_id};
#Customer
if data['fields']=="-2":
if not 'all_customer_id' in data or data['all_customer_id']=="" or data['all_customer_id']==None:
data.update({"error":"Select customers"})
error.append(data)
#Week Days
elif data['fields']=="-3":
if not 'all_day_id' in data or data['all_day_id']=="" or data['all_day_id']==None:
data.update({"error":"Select Days"})
error.append(data)
#Free Shipping or Order Amount
elif data['fields']=="-4" or data['fields']=="-1":
if not 'value' in data or data['value']=="" or data['value']==None:
data.update({"error":"Enter value"})
error.append(data)
#SKU
elif data['fields']=="9175":
if not 'all_product_id' in data or data['all_product_id']=="" or data['all_product_id']==None:
data.update({"error":"Select Products"})
error.append(data)
#Category
elif data['fields']=="0":
if not 'all_category_id' in data or data['all_category_id']=="" or data['all_category_id']==None:
data.update({"error":"Select Categories"})
error.append(data)
data=dict(data,**d1)
now = datetime.datetime.now()
current_time = now.strftime("%Y-%m-%d")
d3 = {"created":current_time,"modified":current_time}
data=dict(data,**d3)
# datas.append(data)
serializer = DiscountConditionsSerializer(data=data,partial=True)
if serializer.is_valid():
serializer.save()
else:
data.update({"error":serializer.errors})
error.append(data)
else:
data.update({"error":"Invalid discount type"})
error.append(data)
else:
error.append("Invalid discount")
if len(error)>0:
context ={
'status':0,
'api_status':serializer.errors,
'message':'Something went wrong',
}
else:
context ={
'status':1,
'api_status':'',
'message':'Successfully Inserted',
}
return Response(context)
# return Response(datas)
# Set discount condition(Insert new records)
class DiscountProductFree(generics.ListAPIView):
def post(self, request, format=None):
data=request.data
company_db = loginview.db_active_connection(request)
now = datetime.datetime.now()
current_time = datetime.datetime.now(datetime.timezone.utc).astimezone()
d3 = {"created":current_time,"modified":current_time}
data = dict(data,**d3)
#print(data)
serializer = DiscountMasterSerializer(data=data,partial=True)
if serializer.is_valid():
prev_products = list(EngageboostDiscountMastersConditions.objects.filter(discount_master_id = serializer.id).values_list('all_product_id',flat=True))
serializer.save()
# if(serializer.id):
# objproduct_list = EngageboostDiscountMastersConditions.objects.filter(discount_master_id = serializer.id).values_list('all_product_id',flat=True)
# if(prev_products):
# objproduct_list = list(objproduct_list)
# objproduct_list.extend(prev_products)
# objproduct_list = list(set(objproduct_list))
# if objproduct_list :
# for elastic_product_id in objproduct_list:
# if(elastic_product_id!=""):
# if("," in elastic_product_id):
# prod_lst = elastic_product_id.split(",")
# for prod_id in prod_lst:
# if(prod_id!=""):
# elastic = common.save_data_to_elastic(int(prod_id),'EngageboostProducts')
# else:
# elastic = common.save_data_to_elastic(int(elastic_product_id),'EngageboostProducts')
if(serializer.id):
objproduct_list = EngageboostDiscountMastersConditions.objects.filter(discount_master_id = serializer.id).values_list('all_product_id',flat=True)
if(prev_products):
objproduct_list = list(objproduct_list)
objproduct_list.extend(prev_products)
objproduct_list = list(set(objproduct_list))
if objproduct_list :
for elastic_product_id in objproduct_list:
if(elastic_product_id != "" and elastic_product_id is not None):
try:
if("," in elastic_product_id):
prod_lst = elastic_product_id.split(",")
elastic = common.update_bulk_elastic('EngageboostProducts',prod_lst,'channel_currency_product_price','update')
else:
elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update')
except:
elastic = common.update_bulk_elastic('EngageboostProducts',[int(elastic_product_id)],'channel_currency_product_price','update')
context ={
'status':1,
'api_status':'',
'message':'Successfully Inserted',
}
else:
context ={
'status':0,
'api_status':serializer.errors,
'message':'Something went wrong',
}
return Response(context)
# return Response(datas)
class CouponExport(generics.ListAPIView):
def post(self, request, *args, **kwargs):
company_db = loginview.db_active_connection(request)
try:
discount_master_id=request.data['discount_master_id']
## ************ Check file dir exist or not. If dir not exist then create
file_dir = settings.MEDIA_ROOT+'/exportfile/'
export_dir = settings.MEDIA_URL+'exportfile/'
if not os.path.exists(file_dir):
os.makedirs(file_dir)
## ************ Create file name
file_name = "coupon_export_"+get_random_string(length=5)
## Create file full path
file_path = file_dir+file_name+'.xlsx'
export_file_path = export_dir+file_name+'.xlsx'
export_file_path = export_file_path[1:]
workbook = xlsxwriter.Workbook(file_path)
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': True})
row = 1
worksheet.write(0,0,'ID',bold)
worksheet.write(0,1,'Coupon Code',bold)
worksheet.write(0,2,'Is Used',bold)
is_used = ""
result = EngageboostDiscountMastersCoupons.objects.using(company_db).filter(isdeleted='n',discount_master_id=discount_master_id).order_by('id')
result_count = result.count()
if result_count>0:
result_data = DiscountMasterCouponSerializer(result, many=True)
result_data = result_data.data
for resultdata in result_data:
if resultdata['is_used']=="y":
is_used = "Yes"
else:
is_used = "No"
worksheet.write(row,0,resultdata['id'],0)
worksheet.write(row,1,resultdata['coupon_code'],0)
worksheet.write(row,2,is_used,0)
row = row + 1
workbook.close()
data ={'status':1,"file_path":export_file_path}
else:
data ={'status':0,"message":"No coupon found for this promotion"}
except Exception as error:
trace_back = sys.exc_info()[2]
line = trace_back.tb_lineno
data = {"status":0,"api_status":traceback.format_exc(),"error_line":line,"error_message":str(error),"message": str(error)}
return Response(data)
# @csrf_exempt
# @permission_classes((AllowAny,))
def check_exported_data(serializer_data,request):
company_db = loginview.db_active_connection(request)
d2 = {'status':'','err_flag':'','error_text':[]}
current_time = datetime.datetime.now()
serializer_data['disc_start_date'] = datetime.datetime.strptime(serializer_data['disc_start_date'],'%Y-%m-%dT%H:%M:%SZ')
serializer_data['disc_end_date'] = datetime.datetime.strptime(serializer_data['disc_end_date'],'%Y-%m-%dT%H:%M:%SZ')
coupon=EngageboostDiscountMasters.objects.using(company_db).filter(coupon_code=serializer_data['coupon_code'],isblocked="n",isdeleted="n").count()
if serializer_data['discount_type'] == 'coupon' and serializer_data['coupon_code'] == "" and serializer_data['has_multiplecoupons'] == "Single Code":
d2['error_text'].append("Coupon code is missing")
if serializer_data['discount_type'] == 'coupon' and serializer_data['has_multiplecoupons'] == "Multiple Code" and serializer_data['used_coupon'] <= 0:
d2['error_text'].append("Number of coupons should not blank")
if serializer_data['discount_type'] == 'product' and serializer_data['offer_type'] == "" :
d2['error_text'].append("Offer type should not blank")
if serializer_data['coupon_type'] == "" and serializer_data['discount_type'] == 'coupon':
d2['error_text'].append("Coupon type does not exist")
if serializer_data['name'] == "":
d2['error_text'].append("Discount name is missing")
if serializer_data['amount'] == "":
d2['error_text'].append("Discount amount should not blank")
if serializer_data['disc_start_date']=="":
d2['error_text'].append("Start date should not blank")
if serializer_data['disc_start_date']<current_time:
d2['error_text'].append("Start date should not less than current date")
if serializer_data['disc_end_date'] == "":
d2['error_text'].append("End date should not blank")
if serializer_data['disc_end_date']<current_time:
d2['error_text'].append("End date should not less than current date")
if serializer_data['disc_end_date']<=serializer_data['disc_start_date']:
d2['error_text'].append("End date should be greater than Start date")
if serializer_data['offer_type'] == "":
d2['error_text'].append("Offer type is missing")
if serializer_data['customer_group'] == "":
d2['error_text'].append("Customer group is missing")
if serializer_data['isblocked'] == "":
d2['error_text'].append("Status is missing")
if serializer_data['disc_type'] == "":
d2['error_text'].append("Apply per should not blank")
if coupon > 0:
d2['error_text'].append("Coupon code exist")
if len(d2['error_text'])>0:
d2['err_flag']= 1
else:
d2['err_flag']= 0
serializer_data=dict(serializer_data,**d2)
return serializer_data | [
"[email protected]"
]
| |
10a39d56ddb328c6d322c5856d31a5d373aa2ed8 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_ConstantTrend_Seasonal_DayOfWeek_LSTM.py | d3d6f857ae0191e4610ab1abbda44e021c1aa868 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 175 | py | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['ConstantTrend'] , ['Seasonal_DayOfWeek'] , ['LSTM'] ); | [
"[email protected]"
]
| |
263bdb79183e6a8852db732e9fe310df072166bd | b2301365d220ff0295b8beddbed38b0581f9610d | /Django/fs_books_prj/apps/books/migrations/0001_initial.py | 1850bb0485a27325ab15bd0c528dafe99ff4abef | []
| no_license | JoA-MoS/Python | db246a5ff2201c6ef1dfb9d9b0fd8a37e1d7c46d | 4547c2667f3eaf0a001532bb2b103aab3c344fbe | refs/heads/master | 2021-08-16T11:18:20.420868 | 2017-07-21T05:52:18 | 2017-07-21T05:52:18 | 96,125,892 | 0 | 0 | null | 2021-06-10T18:40:09 | 2017-07-03T15:34:52 | Python | UTF-8 | Python | false | false | 676 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-19 19:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('author', models.CharField(max_length=255)),
('category', models.CharField(max_length=100)),
],
),
]
| [
"[email protected]"
]
| |
9346e299b29cdb9fc9e6cd49bfae383ada7dd18e | 957ab2916bb75edc78b9d7598b4f890b80687ea8 | /core_library/game/file_processing.py | 54792eabf69578013dbe142f86df137299ab7598 | []
| no_license | doug3230/Slayer | 9e91f5db01702c206c3d52460bfb880691d3eb6a | 1dd7e72779e060c397f1e68b829004e147e15f84 | refs/heads/master | 2016-09-06T03:32:53.914322 | 2014-11-01T07:27:12 | 2014-11-01T07:27:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,956 | py | '''
Created on Oct 25, 2014
@author: Richard
'''
import pygame, customization
from pygame.freetype import Font, SysFont
def path_to_file(dir_name, file_name):
if dir_name:
return "{0}/{1}".format(dir_name, file_name)
else:
return file_name
def path_to_image(file_name):
return path_to_file(customization.file_settings.FILE_IMAGE_DIRECTORY, file_name)
def path_to_music(file_name):
return path_to_file(customization.file_settings.FILE_MUSIC_DIRECTORY, file_name)
def path_to_level(file_name):
return path_to_file(customization.file_settings.FILE_LEVEL_DIRECTORY, file_name)
def path_to_font(file_name):
return path_to_file(customization.file_settings.FILE_FONT_DIRECTORY, file_name)
def load_music(file_name, path_included = False):
if not path_included:
pygame.mixer.music.load(path_to_music(file_name))
else:
pygame.mixer.music.load(file_name)
return
def play_music(loop = True):
if loop:
pygame.mixer.music.play(-1)
else:
pygame.mixer.music.play()
return
def stop_music():
pygame.mixer.music.stop()
return
def load_image(file_name, path_included = False):
if not path_included:
image = pygame.image.load(path_to_image(file_name))
else:
image = pygame.image.load(file_name)
return image.convert()
def resize_image(image, new_width, new_height):
image = pygame.transform.scale(image, (int(new_width), int(new_height)))
return image.convert()
def load_font(file_name, size, bold = False, italic = False, path_included = False):
if not path_included:
font = Font(path_to_font(file_name), size, bold, italic)
else:
font = Font(file_name, size, bold, italic)
return font
def load_system_font(file_name, size, bold = False, italic = False):
font = SysFont(file_name, size, bold, italic)
return font
| [
"[email protected]"
]
| |
683c8168877dd2b6b8a0c8527e1f7de813a4bdfd | 4c6e0771833c087876b91962ca0f7c2ef821daa4 | /numscrypt/random.py | 1eecb29cb5e905085a0f3250913c2166291b3135 | [
"Apache-2.0"
]
| permissive | fangbei/Numscrypt | fb8a57d57ee1fad39ed9789f4e6241ae152ca563 | cf92b8b8edc57b08d24e8db482b5ea9ee8f494cd | refs/heads/master | 2021-01-17T05:18:43.257510 | 2016-03-30T10:27:13 | 2016-03-30T10:27:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | import numscrypt as ns
def rand (*dims):
result = ns.empty (dims, 'float64')
for i in range (result.data.length):
result.data [i] = Math.random ()
return result
| [
"[email protected]"
]
| |
e63f9b6c0744c053c48c90909cbf38b01602bcd6 | fadff32aecd82fb9a0925895b437b4c6d994f50b | /microcollections/collections.py | 74946405d62be4c5ee41479296eb1c1de8cbf2dd | []
| no_license | zbyte64/micromodels-collections | 638e6551d7c7dc9092818a65695023eaa8c51bd5 | a7e7a0715f18ade96745321892a0dbafd9e04dac | refs/heads/master | 2021-01-13T02:18:44.343841 | 2014-02-20T00:57:03 | 2014-02-20T00:57:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,149 | py | # -*- coding: utf-8 -*-
import micromodels
class NotSet:
pass
class CollectionQuery(object):
def __init__(self, collection, params):
self.collection = collection
self.params = params
self._cache = dict()
@property
def model(self):
return self.collection.model
@property
def data_store(self):
return self.collection.data_store
def get(self, **params):
if params:
return self.clone(**params).get()
if 'get' not in self._cache:
result = self.data_store.get(self.collection, self.params)
self._cache['get'] = \
self.data_store.load_instance(self.collection, result)
return self._cache['get']
def __iter__(self):
self._cache.setdefault('objects', dict())
if 'results' not in self._cache:
self.find()
#yield cached objects
index = 0
while index in self._cache['objects']:
yield self._cache['objects'][index]
index += 1
#yield objects not yet loaded
for index, result in self._cache['results']:
if index not in self._cache['objects']: # redundant
self._cache['objects'][index] = \
self.data_store.load_instance(self.collection, result)
yield self._cache['objects'][index]
def __getitem__(self, index):
#TODO communicate to backend so that we don't fetch more then we need
self._cache.setdefault('objects', dict())
if 'results' not in self._cache:
self.find()
if isinstance(index, slice):
def sliced_gen():
for y, obj in enumerate(iter(self)):
if y >= index.start and y < index.stop:
yield obj
return sliced_gen()
else:
if index in self._cache['objects']:
return self._cache['objects'][index]
else:
for y, obj in enumerate(iter(self)):
if y == index:
return obj
raise KeyError('Not found: %s' % index)
def __len__(self):
return self.count()
def find(self, **params):
if params:
return self.clone(**params).find()
if not self.params:
return self.all()
if 'results' not in self._cache:
results = self.data_store.find(self.collection, self.params)
self._cache['results'] = enumerate(results)
return iter(self)
def first(self, **params):
if params:
return self.clone(**params).first()
if not self.params:
return self.all().next()
if 'results' not in self._cache:
results = self.data_store.find(self.collection, self.params)
self._cache['results'] = enumerate(results)
try:
return iter(self).next()
except StopIteration:
return None
def all(self):
if self.params:
return self.find()
if 'results' not in self._cache:
results = self.data_store.all(self.collection)
self._cache['results'] = enumerate(results)
return iter(self)
def delete(self):
return self.data_store.delete(self.collection, self.params)
def count(self):
if 'count' not in self._cache:
self._cache['count'] = \
self.data_store.count(self.collection, self.params)
return self._cache['count']
def keys(self):
if 'keys' not in self._cache:
self._cache['keys'] = \
self.data_store.keys(self.collection, self.params)
return self._cache['keys']
def exists(self, **params):
if params:
return self.clone(**params).exists()
if 'exists' not in self._cache:
self._cache['exists'] = \
self.data_store.exists(self.collection, self.params)
return self._cache['exists']
def clone(self, **params):
new_params = dict(self.params)
new_params.update(params)
return type(self)(self.collection, new_params)
class CRUDHooks(object):
def modelRegistered(self, model):
return model
def afterInitialize(self, instance):
return instance
def beforeCreate(self, params):
return params
def afterCreate(self, instance):
return instance
def beforeSave(self, instance):
return instance
def afterSave(self, instance):
return instance
def beforeRemove(self, instance):
return instance
def afterRemove(self, instance):
return instance
#CONSIDER: ids or params?
def afterDelete(self):
return
class BaseCollection(CRUDHooks):
model = None
object_id_field = None
id_generator = None
params = dict()
def get_query(self, **params):
if self.params:
params.update(self.params)
return CollectionQuery(self, params)
def get_loader(self):
return self.model
def get_object_id(self, instance):
object_id = instance.get(self.object_id_field, self.id_generator)
if callable(object_id):
object_id = object_id()
return object_id
def get_serializable(self, instance):
'''
Returns an object representation that can be easily serialized
'''
return instance
## Dictionary like methods ##
def __setitem__(self, key, instance):
if self.object_id_field:
if hasattr(instance, '__setitem__'):
instance[self.object_id_field] = key
elif hasattr(instance, self.object_id_field):
setattr(instance, self.object_id_field, key)
return self.save(instance, key)
def __getitem__(self, key):
return self.get(pk=key)
def __delitem__(self, key):
return self.find(pk=key).delete()
def __contains__(self, key):
return self.exists(pk=key)
def __len__(self):
return self.count()
def keys(self):
return self.get_query().keys()
def values(self):
return self.all()
def items(self):
#TODO make efficient
for key in self.keys():
yield (key, self.get(key))
def extend(self, items):
for item in items:
self.save(item)
def update(self, items):
for key, value in items.items():
self[key] = value
def pop(self, key, default=NotSet):
try:
instance = self[key]
except (KeyError, IndexError):
if default == NotSet:
raise
return default
instance.remove()
return instance
def has_key(self, key):
return key in self
def clear(self):
self.delete()
def setdefault(self, key, value):
if key in self:
return
self[key] = value
def copy(self):
return dict(self.items())
def get(self, pk=NotSet, _default=None, **params):
'''
Returns a single object matching the query params
Raises exception if no object matches
'''
if pk is not NotSet:
params['pk'] = pk
try:
return self.get_query(**params).get()
except (KeyError, IndexError):
return _default
## Query Methods ##
def first(self, **params):
'''
Returns a single object matching the query params
Returns None if no object matches
'''
return self.get_query(**params).first()
def find(self, **params):
'''
Returns a query object that iterates over instances matching the query params
'''
return self.get_query(**params)
def exists(self, **params):
'''
Returns a boolean on whether objects match the query params
'''
return self.get_query(**params).exists()
def new(self, **params):
'''
Instantiates and returns a new instance
'''
instance = self.model(**params)
return self.afterInitialize(instance)
def create(self, **params):
'''
Saves a new instance
'''
instance = self.new(**params)
return self.save(instance)
def save(self, instance, key=None):
return self.data_store.save(self, instance, key)
def remove(self, instance):
return self.data_store.remove(self, instance)
def all(self):
return self.get_query()
def delete(self):
return self.get_query().delete()
def count(self):
return self.get_query().count()
def __iter__(self):
return self.get_query().__iter__()
class RawCollection(BaseCollection):
'''
A collection that returns dictionaries and responds like a dictionary
'''
object_id_field = 'id'
def __init__(self, data_store=None, model=dict, name=None,
object_id_field=None, id_generator=None, params=None):
if data_store is None:
from .datastores import MemoryDataStore
data_store = MemoryDataStore
if callable(data_store):
data_store = data_store
self.data_store = data_store
self.model = model
self.name = name
self.params = params or dict()
if object_id_field:
self.object_id_field = object_id_field
if id_generator:
self.id_generator = id_generator
super(RawCollection, self).__init__()
## Hooks ##
def beforeSave(self, instance):
#set the id field if we have one
if self.object_id_field:
key = self.get_object_id(instance)
if hasattr(instance, '__setitem__'):
instance[self.object_id_field] = key
else:
setattr(instance, self.object_id_field, key)
return super(RawCollection, self).beforeSave(instance)
class Collection(RawCollection):
'''
A collection bound to a schema and returns model instances
'''
def __init__(self, model, data_store=None, name=None,
object_id_field=None, id_generator=None, params=None):
if name is None:
name = model.__name__
super(Collection, self).__init__(model=model, data_store=data_store,
name=name, object_id_field=object_id_field,
id_generator=id_generator, params=params,)
def prepare_model(self, model):
'''
Legacy hook, you shouldn't modify the model, but if you do return a new
class
'''
return self.modelRegistered(model)
def get_loader(self):
'''
Returns a callable that returns an instantiated model instance
'''
if not hasattr(self, '_prepped_model'):
self._prepped_model = self.prepare_model(self.model)
return self._prepped_model
def get_object_id(self, instance):
object_id = getattr(instance, self.object_id_field, self.id_generator)
if callable(object_id):
object_id = object_id()
return object_id
def get_serializable(self, instance):
return instance.to_dict(serial=True)
def modelRegistered(self, model):
model._collection = self
if not hasattr(model, 'remove'):
def remove(instance):
return self.remove(instance)
model.remove = remove
if not hasattr(model, 'save'):
def save(instance):
return self.save(instance)
model.save = save
return super(Collection, self).modelRegistered(model)
class PolymorphicLoader(object):
'''
Returns the proper model class based on the object type field
'''
def __init__(self, poly_collection):
self.collection = poly_collection
def __call__(self, **values):
object_type = self.collection.get_object_type_from_values(values)
model = self.collection.get_model(object_type)
return model(**values)
class PolymorphicCollection(Collection):
'''
A collection representing mixed objects
'''
object_type_field = '_object_type'
object_types_field = '_object_types'
def __init__(self, model, *args, **kwargs):
self.prepped_base_model = self.prepare_model(model)
# object_type => model
self.descendent_registry = dict()
# model => (object_type, object_types)
self.reverse_descendent_registry = dict()
super(PolymorphicCollection, self).__init__(model, *args, **kwargs)
def get_loader(self):
return PolymorphicLoader(self)
def get_model(self, object_type):
if object_type not in self.descendent_registry:
self.load_model(object_type)
return self.descendent_registry.get(object_type,
self.prepped_base_model)
def load_model(self, object_type):
#import and add model here
pass
def extract_object_type(self, cls):
return '%s.%s' (cls.__module__, cls.__name__)
def register_model(self, model):
'''
Registers a new model to belong in the collection
'''
if not issubclass(model, (self.model, self.prepped_base_model)):
return
model = self.prepare_model(model)
object_type = self.extract_object_type(model)
object_types = [object_type]
def collect_parents(bases):
for entry in bases:
if isinstance(entry, tuple):
collect_parents(entry)
elif issubclass(entry, micromodels.Model):
parent_type = self.extract_object_type(entry)
if parent_type not in object_types:
object_types.append(parent_type)
collect_parents(model.__bases__)
self.descendent_registry[object_type] = model
self.reverse_descendent_registry[model] = (object_type, object_types)
def get_object_type(self, instance):
'''
Return a string representing the model instance type
'''
model = type(instance)
if model in self.reverse_descendent_registry:
return self.reverse_descendent_registry[model][0]
object_type = getattr(instance, self.object_type_field, None)
if object_type is None:
object_type = self.extract_object_type(type(instance))
if callable(object_type):
object_type = object_type()
return object_type
def get_object_types(self, instance):
'''
Return a list of strings representing the various inherritted types of
the model instance
'''
model = type(instance)
if model in self.reverse_descendent_registry:
return self.reverse_descendent_registry[model][1]
object_types = getattr(instance, self.object_types_field, None)
if callable(object_types):
object_types = object_types()
return object_types
def get_object_type_from_values(self, values):
return values.get(self.object_type_field, None)
def afterInitialize(self, instance):
object_type = self.get_object_type(instance)
if object_type:
instance.add_field(self.object_type_field, object_type,
micromodels.CharField())
else:
assert False, 'Why is object type None?'
object_types = self.get_object_types(instance)
if object_types:
assert len(set(object_types)) == len(object_types), 'Duplicate object types detected'
instance.add_field(self.object_types_field, object_types,
micromodels.FieldCollectionField(micromodels.CharField()))
else:
assert False, 'Why is object types None?'
return super(PolymorphicCollection, self).afterInitialize(instance)
def findType(self, cls, **params):
object_type = self.extract_object_type(cls)
params[self.object_types_field] = object_type
return self.find(**params)
| [
"[email protected]"
]
| |
dca261af830e2fc00b8bbeb22fa8e92af90f3b9d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_oppressors.py | 794179fbab874c62e6c90e5f1fc1331b1f5c3cc8 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _OPPRESSORS():
def __init__(self,):
self.name = "OPPRESSORS"
self.definitions = oppressor
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['oppressor']
| [
"[email protected]"
]
| |
3bf1dfaa5339532ee42b32558cc1c9e8bb8157b2 | 9142f290c2ca89e53ced306804fece05043c3aa0 | /py/trash/bk0/908_predict_1026-1.py | fc46045263cfc0793b058c3a73dbecb9567b93fc | [
"MIT"
]
| permissive | norihitoishida/PLAsTiCC-2018 | aea5ecff5c06c46e3097673228054726fb1cc972 | f7f3e86fd3b01145de6f96ebe9a7bdad49439956 | refs/heads/master | 2021-10-09T00:19:27.389096 | 2018-12-19T06:29:55 | 2018-12-19T06:29:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,969 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 26 15:46:49 2018
@author: kazuki.onodera
"""
import numpy as np
import pandas as pd
import os, gc
from glob import glob
from tqdm import tqdm
import sys
sys.path.append(f'/home/{os.environ.get("USER")}/PythonLibrary')
import lgbextension as ex
import lightgbm as lgb
from multiprocessing import cpu_count
import utils
utils.start(__file__)
#==============================================================================
SUBMIT_FILE_PATH = '../output/1026-1.csv.gz'
COMMENT = 'f001~011'
EXE_SUBMIT = True
DROP = ['f001_hostgal_specz']
SEED = np.random.randint(9999)
np.random.seed(SEED)
print('SEED:', SEED)
NFOLD = 5
LOOP = 5
param = {
'objective': 'multiclass',
'num_class': 14,
'metric': 'multi_logloss',
'learning_rate': 0.01,
'max_depth': 6,
'num_leaves': 63,
'max_bin': 255,
'min_child_weight': 10,
'min_data_in_leaf': 150,
'reg_lambda': 0.5, # L2 regularization term on weights.
'reg_alpha': 0.5, # L1 regularization term on weights.
'colsample_bytree': 0.5,
'subsample': 0.5,
# 'nthread': 32,
'nthread': cpu_count(),
'bagging_freq': 1,
'verbose':-1,
}
# =============================================================================
# load
# =============================================================================
files_tr = sorted(glob('../data/train_f*.pkl'))
[print(f) for f in files_tr]
X = pd.concat([
pd.read_pickle(f) for f in tqdm(files_tr, mininterval=60)
], axis=1)
y = utils.load_target().target
X.drop(DROP, axis=1, inplace=True)
target_dict = {}
target_dict_r = {}
for i,e in enumerate(y.sort_values().unique()):
target_dict[e] = i
target_dict_r[i] = e
y = y.replace(target_dict)
if X.columns.duplicated().sum()>0:
raise Exception(f'duplicated!: { X.columns[X.columns.duplicated()] }')
print('no dup :) ')
print(f'X.shape {X.shape}')
gc.collect()
COL = X.columns.tolist()
#CAT = list( set(X.columns)&set(utils_cat.ALL))
#print(f'CAT: {CAT}')
# =============================================================================
# cv
# =============================================================================
dtrain = lgb.Dataset(X, y, #categorical_feature=CAT,
free_raw_data=False)
gc.collect()
model_all = []
nround_mean = 0
wloss_list = []
for i in range(LOOP):
gc.collect()
param['seed'] = np.random.randint(9999)
ret, models = lgb.cv(param, dtrain, 99999, nfold=NFOLD,
feval=utils.lgb_multi_weighted_logloss,
early_stopping_rounds=100, verbose_eval=50,
seed=SEED)
model_all += models
nround_mean += len(ret['multi_logloss-mean'])
wloss_list.append( ret['wloss-mean'][-1] )
nround_mean = int((nround_mean/LOOP) * 1.3)
result = f"CV wloss: {np.mean(wloss_list)} + {np.std(wloss_list)}"
print(result)
imp = ex.getImp(model_all)
imp['split'] /= imp['split'].max()
imp['gain'] /= imp['gain'].max()
imp['total'] = imp['split'] + imp['gain']
imp.sort_values('total', ascending=False, inplace=True)
imp.reset_index(drop=True, inplace=True)
imp.to_csv(f'LOG/imp_{__file__}.csv', index=False)
png = f'LOG/imp_{__file__}.png'
utils.savefig_imp(imp, png, x='total', title=f'{__file__}')
utils.send_line(result, png)
COL = imp[imp.gain>0].feature.tolist()
# =============================================================================
# model
# =============================================================================
dtrain = lgb.Dataset(X[COL], y, #categorical_feature=CAT,
free_raw_data=False)
gc.collect()
np.random.seed(SEED)
model_all = []
for i in range(LOOP):
print('building', i)
gc.collect()
param['seed'] = np.random.randint(9999)
model = lgb.train(param, dtrain, num_boost_round=nround_mean, valid_sets=None,
valid_names=None, fobj=None, feval=None, init_model=None,
feature_name='auto', categorical_feature='auto',
early_stopping_rounds=None, evals_result=None,
verbose_eval=True, learning_rates=None,
keep_training_booster=False, callbacks=None)
model_all.append(model)
del dtrain, X; gc.collect()
# =============================================================================
# test
# =============================================================================
files_te = sorted(glob('../data/test_f*.pkl'))
X_test = pd.concat([
pd.read_pickle(f) for f in tqdm(files_te, mininterval=60)
], axis=1)[COL]
for i,model in enumerate(tqdm(model_all)):
y_pred = model.predict(X_test)
if i==0:
y_pred_all = y_pred
else:
y_pred_all += y_pred
y_pred_all /= len(model_all)
sub = pd.read_csv('../input/sample_submission.csv.zip')
df = pd.DataFrame(y_pred_all, columns=sub.columns[1:-1])
# Compute preds_99 as the proba of class not being any of the others
# preds_99 = 0.1 gives 1.769
preds_99 = np.ones(df.shape[0])
for i in range(df.shape[1]):
preds_99 *= (1 - df.iloc[:, i])
df['class_99'] = preds_99
sub = pd.concat([sub[['object_id']], df], axis=1)
sub.to_csv(SUBMIT_FILE_PATH, index=False, compression='gzip')
sub.iloc[:, 1:].hist(bins=30, figsize=(16, 12))
png = f'LOG/sub_{__file__}.png'
utils.savefig_sub(sub, png)
utils.send_line('DONE!', png)
# =============================================================================
# submission
# =============================================================================
if EXE_SUBMIT:
print('submit')
utils.submit(SUBMIT_FILE_PATH, COMMENT)
#==============================================================================
utils.end(__file__)
utils.stop_instance()
| [
"[email protected]"
]
| |
00d487751a336a68638c30b21a13815a2a96c309 | 07ecc53b5be6b1a34914a0e02265e847f3ac1a65 | /Python/Greedy Algorithm/984_Medium_不含AAA或BBB的字符串.py | 22600af30df05dac1d9060917965bf75ff165bad | []
| no_license | JasmineRain/Algorithm | 764473109ad12c051f5337ed6f22b517ed9bff30 | 84d7e11c1a01b1994e04a3ab446f0a35eb3d362a | refs/heads/master | 2023-03-14T00:39:51.767074 | 2021-03-09T12:41:44 | 2021-03-09T12:41:44 | 289,603,630 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,275 | py | class Solution:
def strWithout3a3b(self, a: int, b: int) -> str:
ans = [""] * (a + b)
index = 0
round = 1
ca = a
cb = b
if a >= b:
while ca > 0:
ans[index] = "a"
ca -= 1
index += 3
if index >= (a + b):
index = round
round += 1
while cb > 0:
ans[index] = "b"
cb -= 1
index += 3
if index >= (a + b):
index = round
round += 1
return "".join(ans)
else:
while cb > 0:
ans[index] = "b"
cb -= 1
index += 3
if index >= (a + b):
index = round
round += 1
while ca > 0:
ans[index] = "a"
ca -= 1
index += 3
if index >= (a + b):
index = round
round += 1
return "".join(ans)
if __name__ == "__main__":
S = Solution()
print(S.strWithout3a3b(a=1, b=2))
print(S.strWithout3a3b(a=4, b=1))
print(S.strWithout3a3b(a=1, b=3))
| [
"[email protected]"
]
| |
c3f8c9326a98788d32a1dd6f6cb2abac77136527 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /1301-1400/1342-Number of Steps to Reduce a Number to Zero/1342-Number of Steps to Reduce a Number to Zero.py | ff2024f01916125f617a9207453ff82c87002008 | [
"MIT"
]
| permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 239 | py | class Solution:
def numberOfSteps (self, num: int) -> int:
step = 0
while num != 0:
if num & 1:
num -= 1
else:
num >>= 1
step += 1
return step
| [
"[email protected]"
]
| |
6a1b9b2699b5d40ab586304e0361f170ab18ac56 | ed8cdcce521b8cab33c66f716c0886e17f035d21 | /.history/public/publicfunction_20191221202740.py | 99378bfc5a5a438a74f015f442e55bdf06ce5ce6 | []
| no_license | deancsdfy/AndroidPerformanceTool_windows | 8ac35729bc651c3af551f090d6788b6ee3f17eb5 | c4906aa9347e8e5eca68dbb7cf2d66a327c70d1f | refs/heads/master | 2020-11-27T20:38:55.014228 | 2020-01-09T15:55:52 | 2020-01-09T15:55:52 | 229,593,460 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,646 | py | #coding=utf-8
import os,platform
import subprocess
import re
serialno_num=''
#判断系统类型,windows使用findstr,linux使用grep
system = platform.system()
if system is "Windows":
find_util = "findstr"
else:
find_util = "grep"
#判断是否设置环境变量ANDROID_HOME
# if "ANDROID_HOME" in os.environ:
# if system == "Windows":
# command = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb.exe")
# else:
# command = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb")
# else:
# raise EnvironmentError(
# "Adb not found in $ANDROID_HOME path: %s." %os.environ["ANDROID_HOME"])
#获取手机
def get_devices():
devices=[]
result = subprocess.Popen("adb devices", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.readlines()
for line in result[1:]:
if 'device' in line.strip():
devices.append(line.split()[0])
else:
break
return devices
#adb命令
def adb(args):
# global serialno_num
# if serialno_num == "":
# devices = get_devices()
# if len(devices) == 1:
# serialno_num = devices[0]
# else:
# raise EnvironmentError("more than 1 device")
cmd = "adb %s" %(str(args))
return os.popen(cmd)
#adb shell命令
def shell(args):
# global serialno_num
# if serialno_num == "":
# devices = get_devices()
# serialno_num = devices[0]
cmd = '\'adb shell \"%s\"\'' %( str(args))
print(cmd)
# cmd = str(args)
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def get_current_packagename():
#正则匹配出package和activity
pattern = re.compile(r"[a-zA-Z0-9\.]+/.[a-zA-Z0-9\.]+")
# package = shell('adb shell "dumpsys activity top| grep ACTIVITY"').stdout.read()
package = shell('dumpsys activity top| grep ACTIVITY').stdout.read()
#用-1,是因为部分机型上,还会返回一些系统进程和包,比如小米8
print(pattern.findall(package.decode())[-1].split('/')[0])
# return pattern.findall(package.decode())[-1].split('/')[0]
def get_current_activity():
#正则匹配出package和activity
pattern = re.compile(r"[a-zA-Z0-9\.]+/.[a-zA-Z0-9\.]+")
#新的adb命令行,这个已经取不到activity了
# package = shell('dumpsys activity top| grep ACTIVITY').stdout.read()
# print(pattern.findall(package.decode())[-1].split('/')[1])
# return pattern.findall(package.decode())[-1].split('/')[1]
if __name__ == "__main__":
get_current_activity()
get_current_packagename() | [
"[email protected]"
]
| |
3abf4e446f31266763a2fe710d62337e05dd91a8 | c21faf85627b1cfd96494aac73cc40e5f11ebb46 | /results/test_330.py | 8be1918b363f41265b49b7e3247f2554e04979da | []
| no_license | ekkya/Cyclomatic-Complexity | d02c61e009087e7d51738e60605875741532b878 | 172db2efdd974f5abad964e335552aec974b47cb | refs/heads/master | 2021-08-28T17:13:14.718314 | 2017-12-12T22:04:13 | 2017-12-12T22:04:13 | 112,042,202 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 73,382 | py | """Get the number of each character in any given text.
Inputs:
A txt file -- You will be asked for an input file. Simply input the name
of the txt file in which you have the desired text.
"""
import pprint
import collections
def main():
file_input = input('File Name: ')
with open(file_input, 'r') as info:
count = collections.Counter(info.read().upper())
value = pprint.pformat(count)
print(value)
if __name__ == "__main__":
main()# Script Name : pscheck.py
# Author : Craig Richards
# Created : 19th December 2011
# Last Modified : 17th June 2013
# Version : 1.1
# Modifications : 1.1 - 17/06/13 - CR - Changed to functions, and check os before running the program
# Description : Process check on Nix boxes, diplsay formatted output from ps command
import commands, os, string
def ps():
program = raw_input("Enter the name of the program to check: ")
try:
#perform a ps command and assign results to a list
output = commands.getoutput("ps -f|grep " + program)
proginfo = string.split(output)
#display results
print "\n\
Full path:\t\t", proginfo[5], "\n\
Owner:\t\t\t", proginfo[0], "\n\
Process ID:\t\t", proginfo[1], "\n\
Parent process ID:\t", proginfo[2], "\n\
Time started:\t\t", proginfo[4]
except:
print "There was a problem with the program."
def main():
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
ps() # Call the function
elif os.name in ("nt", "dos", "ce"): # if the OS is windows
print "You need to be on Linux or Unix to run this"
if __name__ == '__main__':
main()from bs4 import BeautifulSoup
import datetime
import mechanize
import urllib2
# Create a Browser
b = mechanize.Browser()
# Disable loading robots.txt
b.set_handle_robots(False)
b.addheaders = [('User-agent',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows 98;)')]
# Navigate
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
# Choose a form
b.select_form(nr=0)
# Fill it out
b['regno'] = '37000304'
currentdate = datetime.date(1997,3,10)
enddate = datetime.date(1998,4,1)
while currentdate <= enddate:
ct=0
#print currentdate
yyyymmdd = currentdate.strftime("%Y/%m/%d")
ddmmyyyy = yyyymmdd[8:] + "/" + yyyymmdd[5:7] + "/" +yyyymmdd[:4]
print(ddmmyyyy)
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
b.select_form(nr=0)
b['regno'] = '37000304'
b['dob'] = ddmmyyyy
fd = b.submit()
#print(fd.read())
soup = BeautifulSoup(fd.read(),'html.parser')
for writ in soup.find_all('table'):
ct = ct + 1;
#print (ct)
if ct == 6:
print("---fail---")
else:
print("--true--")
break;
currentdate += datetime.timedelta(days=1)
#print fd.read()# Script Name : new_script.py
# Author : Craig Richards
# Created : 20th November 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will create a new basic template for a new script
import os # Load the library module
import sys # Load the library module
import datetime # Load the library module
text = '''You need to pass an argument for the new script you want to create, followed by the script name. You can use
-python : Python Script
-bash : Bash Script
-ksh : Korn Shell Script
-sql : SQL Script'''
if len(sys.argv) < 3:
print text
sys.exit()
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv:
print text
sys.exit()
else:
if '-python' in sys.argv[1]:
config_file = "python.cfg"
extension = ".py"
elif '-bash' in sys.argv[1]:
config_file = "bash.cfg"
extension = ".bash"
elif '-ksh' in sys.argv[1]:
config_file = "ksh.cfg"
extension = ".ksh"
elif '-sql' in sys.argv[1]:
config_file = "sql.cfg"
extension = ".sql"
else:
print 'Unknown option - ' + text
sys.exit()
confdir = os.getenv("my_config")
scripts = os.getenv("scripts")
dev_dir = "Development"
newfile = sys.argv[2]
output_file = (newfile + extension)
outputdir = os.path.join(scripts,dev_dir)
script = os.path.join(outputdir, output_file)
input_file = os.path.join(confdir,config_file)
old_text = " Script Name : "
new_text = (" Script Name : " + output_file)
if not(os.path.exists(outputdir)):
os.mkdir(outputdir)
newscript = open(script, 'w')
input = open(input_file, 'r')
today = datetime.date.today()
old_date = " Created :"
new_date = (" Created : " + today.strftime("%d %B %Y"))
for line in input:
line = line.replace(old_text, new_text)
line = line.replace(old_date, new_date)
newscript.write(line)
# Script Name : osinfo.py
# Authors : {'geekcomputers': 'Craig Richards', 'dmahugh': 'Doug Mahugh','rutvik1010':'Rutvik Narayana Nadimpally','y12uc231': 'Satyapriya Krishna', 'minto4644':'Mohit Kumar'}
# Created : 5th April 2012
# Last Modified : July 19 2016
# Version : 1.0
# Modification 1 : Changed the profile to list again. Order is important. Everytime we run script we don't want to see different ordering.
# Modification 2 : Fixed the AttributeError checking for all properties. Using hasttr().
# Modification 3 : Removed ': ' from properties inside profile.
# Description : Displays some information about the OS you are running this script on
import platform as pl
profile = [
'architecture',
'linux_distribution',
'mac_ver',
'machine',
'node',
'platform',
'processor',
'python_build',
'python_compiler',
'python_version',
'release',
'system',
'uname',
'version',
]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
for key in profile:
if hasattr(pl,key):
print(key + bcolors.BOLD + ": "+ str(getattr(pl,key)())+ bcolors.ENDC)
import csv
import glob
import os
import pdb
import pandas as pd
def main():
directory = []
for dirs in os.walk("."):
directory.append(dirs)
folders = directory[0][1]
for ff in folders:
if ff != ".git":
allFiles = glob.glob(ff + "/*.csv")
frame = pd.DataFrame()
dfs = []
for files in allFiles:
df = pd.read_csv(files, index_col=None, header=0)
dfs.append(df)
frame = pd.concat(dfs)
frame.to_csv(ff + "/results.csv")
main()# Script Name : logs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 14 February 2016
# Version : 1.2
#
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - Tidy up comments and syntax
#
# Description : This script will search for all *.log files in the given directory, zip them using the program you specify and then date stamp them
import os # Load the Library Module
from time import strftime # Load just the strftime Module from Time
logsdir = "c:\puttylogs" # Set the Variable logsdir
zip_program = "zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1 = files + "." + strftime("%Y-%m-%d") + ".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
os.remove(files) # Remove the original log files# Script Name : check_for_sqlite_files.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Remove unecessary line and variable on Line 21
# Description : Scans directories to check if there are any sqlite files in there
import os
def isSQLite3(filename):
from os.path import isfile, getsize
if not isfile(filename):
return False
if getsize(filename) < 100: # SQLite database file header is 100 bytes
return False
else:
Header = open(filename, 'rb').read(100)
fd.close()
if Header[0:16] == 'SQLite format 3\000':
return True
else:
return False
log=open('sqlite_audit.txt','w')
for r,d,f in os.walk(r'.'):
for files in f:
if isSQLite3(files):
print files
print "[+] '%s' **** is a SQLITE database file **** " % os.path.join(r,files)
log.write("[+] '%s' **** is a SQLITE database file **** " % files+'\n')
else:
log.write("[-] '%s' is NOT a sqlite database file" % os.path.join(r,files)+'\n')
log.write("[-] '%s' is NOT a sqlite database file" % files+'\n')
# Script Name : create_dir_if_not_there.py
# Author : Craig Richards
# Created : 09th January 2012
# Last Modified : 22nd October 2015
# Version : 1.0.1
# Modifications : Added exceptions
# : 1.0.1 Tidy up comments and syntax
#
# Description : Checks to see if a directory exists in the users home directory, if not then create it
import os # Import the OS module
try:
home = os.path.expanduser("~") # Set the variable home by expanding the users set home directory
print home # Print the location
if not os.path.exists(home + '/testdir'):
os.makedirs(home + '/testdir') # If not create the directory, inside their home directory
except Exception, e:
print e# Script Name : move_files_over_x_days.py
# Author : Craig Richards
# Created : 8th December 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will move all the files from the src directory that are over 240 days old to the destination directory.
import shutil
import sys
import time
import os
src = 'u:\\test' # Set the source directory
dst = 'c:\\test' # Set the destination directory
now = time.time() # Get the current time
for f in os.listdir(src): # Loop through all the files in the source directory
if os.stat(f).st_mtime < now - 240 * 86400: # Work out how old they are, if they are older than 240 days old
if os.path.isfile(f): # Check it's a file
shutil.move(f, dst) # Move the files
# Script Name : sqlite_table_check.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Checks the main SQLITE database to ensure all the tables should exist
import sqlite3
import sys
import os
dropbox = os.getenv("dropbox")
config = os.getenv("my_config")
dbfile = ("Databases\jarvis.db")
listfile = ("sqlite_master_table.lst")
master_db = os.path.join(dropbox, dbfile)
config_file = os.path.join(config, listfile)
tablelist = open(config_file,'r');
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute('SELECT SQLITE_VERSION()')
data = cursor.fetchone()
if str(data) == "(u'3.6.21',)":
print ("\nCurrently " + master_db + " is on SQLite version: %s" % data + " - OK -\n")
else:
print ("\nDB On different version than master version - !!!!! \n")
conn.close()
print ("\nCheckling " + master_db + " against " + config_file + "\n")
for table in tablelist.readlines():
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute("select count(*) from sqlite_master where name = ?",(table.strip(), ))
res = cursor.fetchone()
if (res[0]):
print ('[+] Table : ' + table.strip() + ' exists [+]')
else:
print ('[-] Table : ' + table.strip() + ' does not exist [-]')
# Script Name : puttylogs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 29th February 2012
# Version : 1.2
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - 29-02-12 - CR - Added shutil module and added one line to move the zipped up logs to the zipped_logs directory
# Description : Zip up all the logs in the given directory
import os # Load the Library Module
import shutil # Load the Library Module - 1.2
from time import strftime # Load just the strftime Module from Time
logsdir="c:\logs\puttylogs" # Set the Variable logsdir
zipdir="c:\logs\puttylogs\zipped_logs" # Set the Variable zipdir - 1.2
zip_program="zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1=files+"."+strftime("%Y-%m-%d")+".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
shutil.move(files1, zipdir) # Move the zipped log files to the zipped_logs directory - 1.2
os.remove(files) # Remove the original log files
# Script Name : daily_checks.py
# Author : Craig Richards
# Created : 07th December 2011
# Last Modified : 01st May 2013
# Version : 1.5
#
# Modifications : 1.1 Removed the static lines for the putty sessions, it now reads a file, loops through and makes the connections.
# : 1.2 Added a variable filename=sys.argv[0] , as when you use __file__ it errors when creating an exe with py2exe.
# : 1.3 Changed the server_list.txt file name and moved the file to the config directory.
# : 1.4 Changed some settings due to getting a new pc
# : 1.5 Tidy comments and syntax
#
# Description : This simple script loads everything I need to carry out the daily checks for our systems.
import platform # Load Modules
import os
import subprocess
import sys
from time import strftime # Load just the strftime Module from Time
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def print_docs(): # Function to print the daily checks automatically
print ("Printing Daily Check Sheets:")
# The command below passes the command line string to open word, open the document, print it then close word down
subprocess.Popen(["C:\\Program Files (x86)\Microsoft Office\Office14\winword.exe", "P:\\\\Documentation\\Daily Docs\\Back office Daily Checks.doc", "/mFilePrintDefault", "/mFileExit"]).communicate()
def putty_sessions(): # Function to load the putty sessions I need
for server in open(conffilename): # Open the file server_list.txt, loop through reading each line - 1.1 -Changed - 1.3 Changed name to use variable conffilename
subprocess.Popen(('putty -load '+server)) # Open the PuTTY sessions - 1.1
def rdp_sessions():
print ("Loading RDP Sessions:")
subprocess.Popen("mstsc eclr.rdp") # Open up a terminal session connection and load the euroclear session
def euroclear_docs():
# The command below opens IE and loads the Euroclear password document
subprocess.Popen('"C:\\Program Files\\Internet Explorer\\iexplore.exe"' '"file://fs1\pub_b\Pub_Admin\Documentation\Settlements_Files\PWD\Eclr.doc"')
# End of the functions
# Start of the Main Program
def main():
filename = sys.argv[0] # Create the variable filename
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.3
conffile = ('daily_checks_servers.conf') # Set the variable conffile - 1.3
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.3
clear_screen() # Call the clear screen function
# The command below prints a little welcome message, as well as the script name, the date and time and where it was run from.
print ("Good Morning " + os.getenv('USERNAME') + ", "+
filename, "ran at", strftime("%Y-%m-%d %H:%M:%S"), "on",platform.node(), "run from",os.getcwd())
print_docs() # Call the print_docs function
putty_sessions() # Call the putty_session function
rdp_sessions() # Call the rdp_sessions function
euroclear_docs() # Call the euroclear_docs function
if __name__ == "__main__":
main()
import serial
import sys
#A serial port-scanner for linux and windows platforms
#Author: Julio César Echeverri Marulanda
#e-mail: [email protected]
#blog: blogdelingeniero1.wordpress.com
#You should have installed the PySerial module to use this method.
#You can install pyserial with the following line: pip install pyserial
def ListAvailablePorts():
#This function return a list containing the string names for Virtual Serial Ports
#availables in the computer (this function works only for Windows & Linux Platforms but you can extend it)
#if there isn't available ports, returns an empty List
AvailablePorts = []
platform = sys.platform
if platform == 'win32':
for i in range(255):
try:
ser = serial.Serial(i,9600)
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append(ser.portstr)
ser.close()
elif platform == 'linux':
for i in range(0,255):
try:
ser = serial.Serial('/dev/ttyUSB'+str(i))
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append('/dev/ttyUSB'+str(i))
ser.close()
else:
print '''This method was developed only for linux and windows
the current platform isn't recognised'''
return AvailablePorts
# EXAMPLE OF HOW IT WORKS
# if an Arduino is connected to the computer, the port will be show in the terminal
# print ListAvailablePorts()# Script Name : nslookup_check.py
# Author : Craig Richards
# Created : 5th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This very simple script opens the file server_list.txt and the does an nslookup for each one to check the DNS entry
import subprocess # Import the subprocess module
for server in open('server_list.txt'): # Open the file and read each line
subprocess.Popen(('nslookup ' + server)) # Run the nslookup command for each server in the listimport pprint
info = '''SCENE I. Yorkshire. Gaultree Forest.
Enter the ARCHBISHOP OF YORK, MOWBRAY, LORD HASTINGS, and others
ARCHBISHOP OF YORK
What is this forest call'd?
HASTINGS
'Tis Gaultree Forest, an't shall please your grace.
ARCHBISHOP OF YORK
Here stand, my lords; and send discoverers forth
To know the numbers of our enemies.
HASTINGS
We have sent forth already.
ARCHBISHOP OF YORK
'Tis well done.
My friends and brethren in these great affairs,
I must acquaint you that I have received
New-dated letters from Northumberland;
Their cold intent, tenor and substance, thus:
Here doth he wish his person, with such powers
As might hold sortance with his quality,
The which he could not levy; whereupon
He is retired, to ripe his growing fortunes,
To Scotland: and concludes in hearty prayers
That your attempts may overlive the hazard
And fearful melting of their opposite.
MOWBRAY
Thus do the hopes we have in him touch ground
And dash themselves to pieces.
Enter a Messenger
HASTINGS
Now, what news?
Messenger
West of this forest, scarcely off a mile,
In goodly form comes on the enemy;
And, by the ground they hide, I judge their number
Upon or near the rate of thirty thousand.
MOWBRAY
The just proportion that we gave them out
Let us sway on and face them in the field.
ARCHBISHOP OF YORK
What well-appointed leader fronts us here?
Enter WESTMORELAND
MOWBRAY
I think it is my Lord of Westmoreland.
WESTMORELAND
Health and fair greeting from our general,
The prince, Lord John and Duke of Lancaster.
ARCHBISHOP OF YORK
Say on, my Lord of Westmoreland, in peace:
What doth concern your coming?
WESTMORELAND
Then, my lord,
Unto your grace do I in chief address
The substance of my speech. If that rebellion
Came like itself, in base and abject routs,
Led on by bloody youth, guarded with rags,
And countenanced by boys and beggary,
I say, if damn'd commotion so appear'd,
In his true, native and most proper shape,
You, reverend father, and these noble lords
Had not been here, to dress the ugly form
Of base and bloody insurrection
With your fair honours. You, lord archbishop,
Whose see is by a civil peace maintained,
Whose beard the silver hand of peace hath touch'd,
Whose learning and good letters peace hath tutor'd,
Whose white investments figure innocence,
The dove and very blessed spirit of peace,
Wherefore do you so ill translate ourself
Out of the speech of peace that bears such grace,
Into the harsh and boisterous tongue of war;
Turning your books to graves, your ink to blood,
Your pens to lances and your tongue divine
To a trumpet and a point of war?
ARCHBISHOP OF YORK
Wherefore do I this? so the question stands.
Briefly to this end: we are all diseased,
And with our surfeiting and wanton hours
Have brought ourselves into a burning fever,
And we must bleed for it; of which disease
Our late king, Richard, being infected, died.
But, my most noble Lord of Westmoreland,
I take not on me here as a physician,
Nor do I as an enemy to peace
Troop in the throngs of military men;
But rather show awhile like fearful war,
To diet rank minds sick of happiness
And purge the obstructions which begin to stop
Our very veins of life. Hear me more plainly.
I have in equal balance justly weigh'd
What wrongs our arms may do, what wrongs we suffer,
And find our griefs heavier than our offences.
We see which way the stream of time doth run,
And are enforced from our most quiet there
By the rough torrent of occasion;
And have the summary of all our griefs,
When time shall serve, to show in articles;
Which long ere this we offer'd to the king,
And might by no suit gain our audience:
When we are wrong'd and would unfold our griefs,
We are denied access unto his person
Even by those men that most have done us wrong.
The dangers of the days but newly gone,
Whose memory is written on the earth
With yet appearing blood, and the examples
Of every minute's instance, present now,
Hath put us in these ill-beseeming arms,
Not to break peace or any branch of it,
But to establish here a peace indeed,
Concurring both in name and quality.
WESTMORELAND
When ever yet was your appeal denied?
Wherein have you been galled by the king?
What peer hath been suborn'd to grate on you,
That you should seal this lawless bloody book
Of forged rebellion with a seal divine
And consecrate commotion's bitter edge?
ARCHBISHOP OF YORK
My brother general, the commonwealth,
To brother born an household cruelty,
I make my quarrel in particular.
WESTMORELAND
There is no need of any such redress;
Or if there were, it not belongs to you.
MOWBRAY
Why not to him in part, and to us all
That feel the bruises of the days before,
And suffer the condition of these times
To lay a heavy and unequal hand
Upon our honours?
WESTMORELAND
O, my good Lord Mowbray,
Construe the times to their necessities,
And you shall say indeed, it is the time,
And not the king, that doth you injuries.
Yet for your part, it not appears to me
Either from the king or in the present time
That you should have an inch of any ground
To build a grief on: were you not restored
To all the Duke of Norfolk's signories,
Your noble and right well remember'd father's?
MOWBRAY
What thing, in honour, had my father lost,
That need to be revived and breathed in me?
The king that loved him, as the state stood then,
Was force perforce compell'd to banish him:
And then that Harry Bolingbroke and he,
Being mounted and both roused in their seats,
Their neighing coursers daring of the spur,
Their armed staves in charge, their beavers down,
Their eyes of fire sparking through sights of steel
And the loud trumpet blowing them together,
Then, then, when there was nothing could have stay'd
My father from the breast of Bolingbroke,
O when the king did throw his warder down,
His own life hung upon the staff he threw;
Then threw he down himself and all their lives
That by indictment and by dint of sword
Have since miscarried under Bolingbroke.
WESTMORELAND
You speak, Lord Mowbray, now you know not what.
The Earl of Hereford was reputed then
In England the most valiant gentlemen:
Who knows on whom fortune would then have smiled?
But if your father had been victor there,
He ne'er had borne it out of Coventry:
For all the country in a general voice
Cried hate upon him; and all their prayers and love
Were set on Hereford, whom they doted on
And bless'd and graced indeed, more than the king.
But this is mere digression from my purpose.
Here come I from our princely general
To know your griefs; to tell you from his grace
That he will give you audience; and wherein
It shall appear that your demands are just,
You shall enjoy them, every thing set off
That might so much as think you enemies.
MOWBRAY
But he hath forced us to compel this offer;
And it proceeds from policy, not love.
WESTMORELAND
Mowbray, you overween to take it so;
This offer comes from mercy, not from fear:
For, lo! within a ken our army lies,
Upon mine honour, all too confident
To give admittance to a thought of fear.
Our battle is more full of names than yours,
Our men more perfect in the use of arms,
Our armour all as strong, our cause the best;
Then reason will our heart should be as good
Say you not then our offer is compell'd.
MOWBRAY
Well, by my will we shall admit no parley.
WESTMORELAND
That argues but the shame of your offence:
A rotten case abides no handling.
HASTINGS
Hath the Prince John a full commission,
In very ample virtue of his father,
To hear and absolutely to determine
Of what conditions we shall stand upon?
WESTMORELAND
That is intended in the general's name:
I muse you make so slight a question.
ARCHBISHOP OF YORK
Then take, my Lord of Westmoreland, this schedule,
For this contains our general grievances:
Each several article herein redress'd,
All members of our cause, both here and hence,
That are insinew'd to this action,
Acquitted by a true substantial form
And present execution of our wills
To us and to our purposes confined,
We come within our awful banks again
And knit our powers to the arm of peace.
WESTMORELAND
This will I show the general. Please you, lords,
In sight of both our battles we may meet;
And either end in peace, which God so frame!
Or to the place of difference call the swords
Which must decide it.
ARCHBISHOP OF YORK
My lord, we will do so.
Exit WESTMORELAND
MOWBRAY
There is a thing within my bosom tells me
That no conditions of our peace can stand.
HASTINGS
Fear you not that: if we can make our peace
Upon such large terms and so absolute
As our conditions shall consist upon,
Our peace shall stand as firm as rocky mountains.
MOWBRAY
Yea, but our valuation shall be such
That every slight and false-derived cause,
Yea, every idle, nice and wanton reason
Shall to the king taste of this action;
That, were our royal faiths martyrs in love,
We shall be winnow'd with so rough a wind
That even our corn shall seem as light as chaff
And good from bad find no partition.
ARCHBISHOP OF YORK
No, no, my lord. Note this; the king is weary
Of dainty and such picking grievances:
For he hath found to end one doubt by death
Revives two greater in the heirs of life,
And therefore will he wipe his tables clean
And keep no tell-tale to his memory
That may repeat and history his loss
To new remembrance; for full well he knows
He cannot so precisely weed this land
As his misdoubts present occasion:
His foes are so enrooted with his friends
That, plucking to unfix an enemy,
He doth unfasten so and shake a friend:
So that this land, like an offensive wife
That hath enraged him on to offer strokes,
As he is striking, holds his infant up
And hangs resolved correction in the arm
That was uprear'd to execution.
HASTINGS
Besides, the king hath wasted all his rods
On late offenders, that he now doth lack
The very instruments of chastisement:
So that his power, like to a fangless lion,
May offer, but not hold.
ARCHBISHOP OF YORK
'Tis very true:
And therefore be assured, my good lord marshal,
If we do now make our atonement well,
Our peace will, like a broken limb united,
Grow stronger for the breaking.
MOWBRAY
Be it so.
Here is return'd my Lord of Westmoreland.
Re-enter WESTMORELAND
WESTMORELAND
The prince is here at hand: pleaseth your lordship
To meet his grace just distance 'tween our armies.
MOWBRAY
Your grace of York, in God's name then, set forward.
ARCHBISHOP OF YORK
Before, and greet his grace: my lord, we come.
Exeunt'''
count = {}
for character in info.upper():
count[character] = count.get(character, 0) + 1
value = pprint.pformat(count)
print(value)# Script Name : get_info_remoute_srv.py
# Author : Pavel Sirotkin
# Created : 3th April 2016
# Last Modified : -
# Version : 1.0.0
# Modifications :
# Description : this will get info about remoute server on linux through ssh connection. Connect these servers must be through keys
import subprocess
HOSTS = ('proxy1', 'proxy')
COMMANDS = ('uname -a', 'uptime')
for host in HOSTS:
result = []
for command in COMMANDS:
ssh = subprocess.Popen(["ssh", "%s" % host, command],
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result.append(ssh.stdout.readlines())
print('--------------- ' + host + ' --------------- ')
for res in result:
if not res:
print(ssh.stderr.readlines())
break
else:
print(res)# Script Name : portscanner.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Port Scanner, you just pass the host and the ports
import optparse # Import the module
from socket import * # Import the module
from threading import * # Import the module
screenLock = Semaphore(value=1) # Prevent other threads from preceeding
def connScan(tgtHost, tgtPort): # Start of the function
try:
connSkt = socket(AF_INET, SOCK_STREAM) # Open a socket
connSkt.connect((tgtHost, tgtPort))
connSkt.send('')
results=connSkt.recv(100)
screenLock.acquire() # Acquire the lock
print '[+] %d/tcp open'% tgtPort
print '[+] ' + str(results)
except:
screenLock.acquire()
print '[-] %d/tcp closed '% tgtPort
finally:
screenLock.release()
connSkt.close()
def portScan(tgtHost, tgtPorts): # Start of the function
try:
tgtIP = gethostbyname(tgtHost) # Get the IP from the hostname
except:
print "[-] Cannot resolve '%s': Unknown host"%tgtHost
return
try:
tgtName = gethostbyaddr(tgtIP) # Get hostname from IP
print '\n[+] Scan Results for: ' +tgtName[0]
except:
print '\n[+] Scan Results for: ' + tgtIP
setdefaulttimeout(1)
for tgtPort in tgtPorts: # Scan host and ports
t = Thread(target=connScan, args=(tgtHost, int(tgtPort)))
t.start()
def main():
parser = optparse.OptionParser('usage %prog -H'+' <target host> -p <target port>')
parser.add_option('-H', dest='tgtHost', type='string', help='specify target host')
parser.add_option('-p', dest='tgtPort',type='string', help='specify target port[s] seperated by a comma')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
portScan(tgtHost, tgtPorts)
if __name__ == '__main__':
main()# Script Name : work_connect.py
# Author : Craig Richards
# Created : 11th May 2012
# Last Modified : 31st October 2012
# Version : 1.1
# Modifications : 1.1 - CR - Added some extra code, to check an argument is passed to the script first of all, then check it's a valid input
# Description : This simple script loads everything I need to connect to work etc
import subprocess # Load the Library Module
import sys # Load the Library Module
import os # Load the Library Module
import time # Load the Library Module
dropbox = os.getenv("dropbox") # Set the variable dropbox, by getting the values of the environment setting for dropbox
rdpfile = ("remote\\workpc.rdp") # Set the variable logfile, using the arguments passed to create the logfile
conffilename=os.path.join(dropbox, rdpfile) # Set the variable conffilename by joining confdir and conffile together
remote = (r"c:\windows\system32\mstsc.exe ") # Set the variable remote with the path to mstsc
text = '''You need to pass an argument
-c Followed by login password to connect
-d to disconnect''' # Text to display if there is no argument passed or it's an invalid option - 1.2
if len(sys.argv) < 2: # Check there is at least one option passed to the script - 1.2
print text # If not print the text above - 1.2
sys.exit() # Exit the program - 1.2
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print text # Print the text, stored in the text variable - 1.2
sys.exit(0) # Exit the program
else:
if sys.argv[1].lower().startswith('-c'): # If the first argument is -c then
passwd = sys.argv[2] # Set the variable passwd as the second argument passed, in this case my login password
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe connect -u username -p "+passwd))
subprocess.Popen((r"c:\geektools\puttycm.exe"))
time.sleep(15) # Sleep for 15 seconds, so the checkpoint software can connect before opening mstsc
subprocess.Popen([remote, conffilename])
elif sys.argv[1].lower().startswith('-d'): # If the first argument is -d then disconnect my checkpoint session.
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe disconnect "))
else:
print 'Unknown option - ' + text # If any other option is passed, then print Unknown option and the text from above - 1.2# Script Name : testlines.py
# Author : Craig Richards
# Created : 08th December 2011
# Last Modified :
# Version : 1.0
# Modifications : beven nyamande
# Description : This very simple script open a file and prints out 100 lines of whatever is set for the line variableest you want to print\n" # This sets the variable for the text that you want to print
def write_to_file(filename, txt):
with open(filename, 'w') as file_object:
s = file_object.write(txt)
if __name__ == '__main__':
write_to_file('test.txt', 'I am beven')
# Script Name : ping_subnet.py
# Author : Craig Richards
# Created : 12th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : After supplying the first 3 octets it will scan the final range for available addresses
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
filename = sys.argv[0] # Sets a variable for the script name
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the first octets of the address Usage : ''' + filename + ''' 111.111.111 '''
sys.exit(0)
else:
if (len(sys.argv) < 2): # If no arguments are passed then display the help and instructions on how to run the script
sys.exit (' You need to supply the first octets of the address Usage : ' + filename + ' 111.111.111')
subnet = sys.argv[1] # Set the variable subnet as the three octets you pass it
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
f = open('ping_'+subnet+'.log', 'w') # Open a logfile
for ip in range(2,255): # Set the ip variable for the range of numbers
ret = subprocess.call(myping + str(subnet)+"."+str(ip) , shell=True,stdout=f,stderr=subprocess.STDOUT) # Run the command pinging the servers
if ret == 0: # Depending on the response
f.write (subnet+"."+str(ip) + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (subnet+"."+str(ip) + " did not respond" + "\n") # Write out you can't reach the box# Script Name : ping_servers.py
# Author : Craig Richards
# Created : 9th May 2012
# Last Modified : 14th May 2012
# Version : 1.1
# Modifications : 1.1 - 14th May 2012 - CR Changed it to use the config directory to store the server files
# Description : This script will, depending on the arguments supplied will ping the servers associated with that application group.
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the application group for the servers you want to ping, i.e.
dms
swaps
Followed by the site i.e.
155
bromley'''
sys.exit(0)
else:
if (len(sys.argv) < 3): # If no arguments are passed,display the help/instructions on how to run the script
sys.exit ('\nYou need to supply the app group. Usage : ' + filename + ' followed by the application group i.e. \n \t dms or \n \t swaps \n then the site i.e. \n \t 155 or \n \t bromley')
appgroup = sys.argv[1] # Set the variable appgroup as the first argument you supply
site = sys.argv[2] # Set the variable site as the second argument you supply
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
if 'dms' in sys.argv: # If the argument passed is dms then
appgroup = 'dms' # Set the variable appgroup to dms
elif 'swaps' in sys.argv: # Else if the argment passed is swaps then
appgroup = 'swaps' # Set the variable appgroup to swaps
if '155' in sys.argv: # If the argument passed is 155 then
site = '155' # Set the variable site to 155
elif 'bromley' in sys.argv: # Else if the argument passed is bromley
site = 'bromley' # Set the variable site to bromley
filename = sys.argv[0] # Sets a variable for the script name
logdir = os.getenv("logs") # Set the variable logdir by getting the OS environment logs
logfile = 'ping_'+appgroup+'_'+site+'.log' # Set the variable logfile, using the arguments passed to create the logfile
logfilename=os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.2
conffile = (appgroup+'_servers_'+site+'.txt') # Set the variable conffile - 1.2
conffilename=os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.2
f = open(logfilename, "w") # Open a logfile to write out the output
for server in open(conffilename): # Open the config file and read each line - 1.2
ret = subprocess.call(myping + server, shell=True,stdout=f,stderr=subprocess.STDOUT) # Run the ping command for each server in the list.
if ret == 0: # Depending on the response
f.write (server.strip() + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (server.strip() + " did not respond" + "\n") # Write out you can't reach the box
print ("\n\tYou can see the results in the logfile : "+ logfilename); # Show the location of the logfile# Script Name : backup_automater_services.py
# Author : Craig Richards
# Created : 24th October 2012
# Last Modified : 13th February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up the comments and syntax
# Description : This will go through and backup all my automator services workflows
import datetime # Load the library module
import os # Load the library module
import shutil # Load the library module
today = datetime.date.today() # Get Today's date
todaystr = today.isoformat() # Format it so we can use the format to create the directory
confdir = os.getenv("my_config") # Set the variable by getting the value from the OS setting
dropbox = os.getenv("dropbox") # Set the variable by getting the value from the OS setting
conffile = ('services.conf') # Set the variable as the name of the configuration file
conffilename = os.path.join(confdir, conffile) # Set the variable by combining the path and the file name
sourcedir = os.path.expanduser('~/Library/Services/') # Source directory of where the scripts are located
destdir = os.path.join(dropbox, "My_backups" + "/" +
"Automater_services" + todaystr + "/") # Combine several settings to create
# the destination backup directory
for file_name in open(conffilename): # Walk through the configuration file
fname = file_name.strip() # Strip out the blank lines from the configuration file
if fname: # For the lines that are not blank
sourcefile = os.path.join(sourcedir, fname) # Get the name of the source files to backup
destfile = os.path.join(destdir, fname) # Get the name of the destination file names
shutil.copytree(sourcefile, destfile) # Copy the directories# Script Name : powerup_checks.py
# Author : Craig Richards
# Created : 25th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Creates an output file by pulling all the servers for the given site from SQLITE database, then goes through the list pinging the servers to see if they are up on the network
import sys # Load the Library Module
import sqlite3 # Load the Library Module
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
dropbox=os.getenv("dropbox") # Set the variable, by getting the value of the variable from the OS
config=os.getenv("my_config") # Set the variable, by getting the value of the variable from the OS
dbfile=("Databases/jarvis.db") # Set the variable to the database
master_db=os.path.join(dropbox, dbfile) # Create the variable by linking the path and the file
listfile=("startup_list.txt") # File that will hold the servers
serverfile=os.path.join(config,listfile) # Create the variable by linking the path and the file
outputfile=('server_startup_'+strftime("%Y-%m-%d-%H-%M")+'.log')
# Below is the help text
text = '''
You need to pass an argument, the options the script expects is
-site1 For the Servers relating to site1
-site2 For the Servers located in site2'''
def windows(): # This is the function to run if it detects the OS is windows.
f = open(outputfile, 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
#ret = subprocess.call("ping -n 3 %s" % server.strip(), shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
ret = subprocess.call("ping -n 3 %s" % server.strip(),stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def linux(): # This is the function to run if it detects the OS is nix.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def get_servers(query): # Function to get the servers from the database
conn = sqlite3.connect(master_db) # Connect to the database
cursor = conn.cursor() # Create the cursor
cursor.execute('select hostname from tp_servers where location =?',(query,)) # SQL Statement
print ('\nDisplaying Servers for : ' + query + '\n')
while True: # While there are results
row = cursor.fetchone() # Return the results
if row == None:
break
f = open(serverfile, 'a') # Open the serverfile
f.write("%s\n" % str(row[0])) # Write the server out to the file
print row[0] # Display the server to the screen
f.close() # Close the file
def main(): # Main Function
if os.path.exists(serverfile): # Checks to see if there is an existing server file
os.remove(serverfile) # If so remove it
if len(sys.argv) < 2: # Check there is an argument being passed
print text # Display the help text if there isn't one passed
sys.exit() # Exit the script
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # If the ask for help
print text # Display the help text if there isn't one passed
sys.exit(0) # Exit the script after displaying help
else:
if sys.argv[1].lower().startswith('-site1'): # If the argument is site1
query = 'site1' # Set the variable to have the value site
elif sys.argv[1].lower().startswith('-site2'): # Else if the variable is bromley
query = 'site2' # Set the variable to have the value bromley
else:
print '\n[-] Unknown option [-] ' + text # If an unknown option is passed, let the user know
sys.exit(0)
get_servers(query) # Call the get servers funtion, with the value from the argument
if os.name == "posix": # If the OS is linux.
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows function
print ('\n[+] Check the log file ' + outputfile + ' [+]\n') # Display the name of the log
if __name__ == '__main__':
main() # Call the main function# Script Name : password_cracker.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Old school password cracker using python
from sys import platform as _platform
# Check the current operating system to import the correct version of crypt
if _platform == "linux" or _platform == "linux2":
import crypt # Import the module
elif _platform == "darwin":
# Mac OS X
import crypt
elif _platform == "win32":
# Windows
try:
import fcrypt # Try importing the fcrypt module
except ImportError:
print 'Please install fcrypt if you are on Windows'
def testPass(cryptPass): # Start the function
salt = cryptPass[0:2]
dictFile=open('dictionary.txt','r') # Open the dictionary file
for word in dictFile.readlines(): # Scan through the file
word=word.strip('\n')
cryptWord=crypt.crypt(word,salt) # Check for password in the file
if (cryptWord == cryptPass):
print "[+] Found Password: "+word+"\n"
return
print "[-] Password Not Found.\n"
return
def main():
passFile = open('passwords.txt') # Open the password file
for line in passFile.readlines(): # Read through the file
if ":" in line:
user=line.split(':')[0]
cryptPass = line.split(':')[1].strip(' ') # Prepare the user name etc
print "[*] Cracking Password For: "+user
testPass(cryptPass) # Call it to crack the users password
if __name__ == "__main__":
main()# Script Name : check_file.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications : with statement added to ensure correct file closure
# Description : Check a file exists and that we can read the file
from __future__ import print_function
import sys # Import the Modules
import os # Import the Modules
# Prints usage if not appropriate length of arguments are provided
def usage():
print('[-] Usage: python check_file.py <filename1> [filename2] ... [filenameN]')
exit(0)
# Readfile Functions which open the file that is passed to the script
def readfile(filename):
with open(filename, 'r') as f: # Ensure file is correctly closed under all circumstances
line = f.read()
print(line)
def main():
if len(sys.argv) >= 2: # Check the arguments passed to the script
filenames = sys.argv[1:]
for filename in filenames: # Iterate for each filename passed in command line argument
if not os.path.isfile(filename): # Check the File exists
print ('[-] ' + filename + ' does not exist.')
filenames.remove(filename) #remove non existing files from filenames list
continue
if not os.access(filename, os.R_OK): # Check you can read the file
print ('[-] ' + filename + ' access denied')
filenames.remove(filename) # remove non readable filenames
continue
else:
usage() # Print usage if not all parameters passed/Checked
# Read the content of each file
for filename in filenames:
print ('[+] Reading from : ' + filename) # Display Message and read the file contents
readfile(filename)
if __name__ == '__main__':
main()
# Script Name : nmap_scan.py
# Author : Craig Richards
# Created : 24th May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This scans my scripts directory and gives a count of the different types of scripts, you need nmap installed to run this
import nmap # Import the module
import optparse # Import the module
def nmapScan(tgtHost, tgtPort): # Create the function, this fucntion does the scanning
nmScan = nmap.PortScanner()
nmScan.scan(tgtHost, tgtPort)
state = nmScan[tgtHost]['tcp'][int(tgtPort)]['state']
print "[*] " + tgtHost + " tcp/" + tgtPort + " " + state
def main(): # Main Program
parser = optparse.OptionParser('usage%prog ' + '-H <host> -p <port>') # Display options/help if required
parser.add_option('-H', dest='tgtHost', type='string', help='specify host')
parser.add_option('-p', dest='tgtPort', type='string', help='port')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
for tgtPort in tgtPorts: # Scan the hosts with the ports etc
nmapScan(tgtHost, tgtPort)
if __name__ == '__main__':
main()
import urllib2
try:
urllib2.urlopen("http://google.com", timeout=2)
print ("working connection")
except urllib2.URLError:
print ("No internet connection")# Script Name : sqlite_check.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Runs checks to check my SQLITE database
import sqlite3 as lite
import sys
import os
dropbox= os.getenv("dropbox")
dbfile=("Databases\jarvis.db")
master_db=os.path.join(dropbox, dbfile)
con = None
try:
con = lite.connect(master_db)
cur = con.cursor()
cur.execute('SELECT SQLITE_VERSION()')
data = cur.fetchone()
print "SQLite version: %s" % data
except lite.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
rows = cur.fetchall()
for row in rows:
print row
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
while True:
row = cur.fetchone()
if row == None:
break
print row[0]# Script Name : fileinfo.py
# Author : Not sure where I got this from
# Created : 28th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Show file information for a given file
# get file information using os.stat()
# tested with Python24 vegsaeat 25sep2006
from __future__ import print_function
import os
import sys
import stat # index constants for os.stat()
import time
try_count = 16
while try_count:
file_name = raw_input("Enter a file name: ") # pick a file you have
try_count >>= 1
try:
file_stats = os.stat(file_name)
break
except OSError:
print ("\nNameError : [%s] No such file or directory\n", file_name)
if try_count == 0:
print ("Trial limit exceded \nExiting program")
sys.exit()
# create a dictionary to hold file info
file_info = {
'fname': file_name,
'fsize': file_stats[stat.ST_SIZE],
'f_lm': time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_MTIME])),
'f_la': time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_ATIME])),
'f_ct': time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_CTIME]))
}
print
print ("file name = %(fname)s", file_info)
print ("file size = %(fsize)s bytes", file_info)
print ("last modified = %(f_lm)s", file_info)
print ("last accessed = %(f_la)s", file_info)
print ("creation time = %(f_ct)s", file_info)
print
if stat.S_ISDIR(file_stats[stat.ST_MODE]):
print ("This a directory")
else:
print ("This is not a directory")
print ()
print ("A closer look at the os.stat(%s) tuple:" % file_name)
print (file_stats)
print ()
print ("The above tuple has the following sequence:")
print ("""st_mode (protection bits), st_ino (inode number),
st_dev (device), st_nlink (number of hard links),
st_uid (user ID of owner), st_gid (group ID of owner),
st_size (file size, bytes), st_atime (last access time, seconds since epoch),
st_mtime (last modification time), st_ctime (time of creation, Windows)"""
)# Script Name : dir_test.py
# Author : Craig Richards
# Created : 29th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Tests to see if the directory testdir exists, if not it will create the directory for you
import os # Import the OS module
if not os.path.exists('testdir'): # Check to see if it exists
os.makedirs('testdir') # Create the directoryimport time
import webbrowser
#how much views you want
#This only works when video has less than 300 views, it won't work when there are more than 300 views...
#due to youtube's policy.
print("Enjoy your Time\n" + time.ctime())
for count in range(30):
time.sleep(5)
webbrowser.open("https://www.youtube.com/watch?v=o6A7nf3IeeA")# batch_file_rename.py
# Created: 6th August 2012
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
__author__ = 'Craig Richards'
__version__ = '1.0'
import os
import sys
def batch_rename(work_dir, old_ext, new_ext):
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
# files = os.listdir(work_dir)
for filename in os.listdir(work_dir):
# Get the file extension
file_ext = os.path.splitext(filename)[1]
# Start of the logic to check the file extensions, if old_ext = file_ext
if old_ext == file_ext:
# Set newfile to be the filename, replaced with the new extension
newfile = filename.replace(old_ext, new_ext)
# Write the files
os.rename(
os.path.join(work_dir, filename),
os.path.join(work_dir, newfile)
)
def main():
'''
This will be called if the script is directly invoked.
'''
# Set the variable work_dir with the first argument passed
work_dir = sys.argv[1]
# Set the variable old_ext with the second argument passed
old_ext = sys.argv[2]
# Set the variable new_ext with the third argument passed
new_ext = sys.argv[3]
batch_rename(work_dir, old_ext, new_ext)
if __name__ == '__main__':
main()
# Script Name : recyclebin.py
# Author : Craig Richards
# Created : 07th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Scans the recyclebin and displays the files in there, originally got this script from the Violent Python book
import os # Load the Module
import optparse # Load the Module
from _winreg import * # Load the Module
def sid2user(sid): # Start of the function to gather the user
try:
key = OpenKey(HKEY_LOCAL_MACHINE, "SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList" + '\\' + sid)
(value, type) = QueryValueEx(key, 'ProfileImagePath')
user = value.split('\\')[-1]
return user
except:
return sid
def returnDir(): # Start of the function to search through the recyclebin
dirs=['c:\\Recycler\\','C:\\Recycled\\','C:\\$RECYCLE.BIN\\']
#dirs=['c:\\$RECYCLE.BIN\\']
for recycleDir in dirs:
if os.path.isdir(recycleDir):
return recycleDir
return None
def findRecycled(recycleDir): # Start of the function, list the contents of the recyclebin
dirList = os.listdir(recycleDir)
for sid in dirList:
files = os.listdir(recycleDir + sid)
user = sid2user(sid)
print '\n[*] Listing Files for User: ' + str(user)
for file in files:
print '[+] Found File: ' + str(file)
def main():
recycleDir = returnDir()
findRecycled(recycleDir)
if __name__ == '__main__':
main()# Script Name : powerdown_startup.py
# Author : Craig Richards
# Created : 05th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This goes through the server list and pings the machine, if it's up it will load the putty session, if its not it will notify you.
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
def windows(): # This is the function to run if it detects the OS is windows.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt','r'): # Read the list of servers from the list
ret = subprocess.call("ping -n 3 %s" % server, shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive, loading PuTTY session" % server.strip() + "\n") # Write out to the logfile
subprocess.Popen(('putty -load '+server)) # Load the putty session
else:
f.write ("%s : did not respond" % server.strip() + "\n") # Write to the logfile if the server is down
def linux():
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive" % server.strip() + "\n") # Print a message
subprocess.Popen(['ssh', server.strip()])
else:
f.write ("%s: did not respond" % server.strip() + "\n")
# End of the functions
# Start of the Main Program
if os.name == "posix": # If the OS is linux...
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows functionimport SimpleHTTPServer
import SocketServer
PORT = 8000 #This will serve at port 8080
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()# Script Name : folder_size.py
# Author : Craig Richards
# Created : 19th July 2012
# Last Modified : 22 February 2016
# Version : 1.0.1
# Modifications : Modified the Printing method and added a few comments
# Description : This will scan the current directory and all subdirectories and display the size.
import os
import sys ''' Load the library module and the sys module for the argument vector'''
try:
directory = sys.argv[1] # Set the variable directory to be the argument supplied by user.
except IndexError:
sys.exit("Must provide an argument.")
dir_size = 0 # Set the size to 0
fsizedicr = {'Bytes': 1,
'Kilobytes': float(1) / 1024,
'Megabytes': float(1) / (1024 * 1024),
'Gigabytes': float(1) / (1024 * 1024
* 1024)}
for (path, dirs, files) in os.walk(directory): # Walk through all the directories. For each iteration, os.walk returns the folders, subfolders and files in the dir.
for file in files: # Get all the files
filename = os.path.join(path, file)
dir_size += os.path.getsize(filename) # Add the size of each file in the root dir to get the total size.
fsizeList = [str(round(fsizedicr[key] * dir_size, 2)) + " " + key for key in fsizedicr] # List of units
if dir_size == 0: print ("File Empty") # Sanity check to eliminate corner-case of empty file.
else:
for units in sorted(fsizeList)[::-1]: # Reverse sort list of units so smallest magnitude units print first.
print ("Folder Size: " + units)# Script Name : env_check.py
# Author : Craig Richards
# Created : 14th May 2012
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up comments and syntax
# Description : This script will check to see if all of the environment variables I require are set
import os
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable
conffile = 'env_check.conf' # Set the variable conffile
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together
for env_check in open(conffilename): # Open the config file and read all the settings
env_check = env_check.strip() # Set the variable as itsself, but strip the extra text out
print '[{}]'.format(env_check) # Format the Output to be in Square Brackets
newenv = os.getenv(env_check) # Set the variable newenv to get the settings from the OS what is currently set for the settings out the configfile
if newenv is None: # If it doesn't exist
print env_check, 'is not set' # Print it is not set
else: # Else if it does exist
print 'Current Setting for {}={}\n'.format(env_check, newenv) # Print out the details# Script Name : script_count.py
# Author : Craig Richards
# Created : 27th February 2012
# Last Modified : 20th July 2012
# Version : 1.3
# Modifications : 1.1 - 28-02-2012 - CR - Changed inside github and development functions, so instead of if os.name = "posix" do this else do this etc
# : I used os.path.join, so it condensed 4 lines down to 1
# : 1.2 - 10-05-2012 - CR - Added a line to include PHP scripts.
# : 1.3 - 20-07-2012 - CR - Added the line to include Batch scripts
# Description : This scans my scripts directory and gives a count of the different types of scripts
import os # Load the library module
path = os.getenv("scripts") # Set the variable path by getting the value from the OS environment variable scripts
dropbox = os.getenv("dropbox") # Set the variable dropbox by getting the value from the OS environment variable dropbox
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def count_files(path, extensions): # Start of the function to count the files in the scripts directory, it counts the extension when passed below
counter = 0 # Set the counter to 0
for root, dirs, files in os.walk(path): # Loop through all the directories in the given path
for file in files: # For all the files
counter += file.endswith(extensions) # Count the files
return counter # Return the count
def github(): # Start of the function just to count the files in the github directory
github_dir = os.path.join(dropbox, 'github') # Joins the paths to get the github directory - 1.1
github_count = sum((len(f) for _, _, f in os.walk(github_dir))) # Get a count for all the files in the directory
if github_count > 5: # If the number of files is greater then 5, then print the following messages
print '\nYou have too many in here, start uploading !!!!!'
print 'You have: ' + str(github_count) + ' waiting to be uploaded to github!!'
elif github_count == 0: # Unless the count is 0, then print the following messages
print '\nGithub directory is all Clear'
else: # If it is any other number then print the following message, showing the number outstanding.
print '\nYou have: ' + str(github_count) + ' waiting to be uploaded to github!!'
def development(): # Start of the function just to count the files in the development directory
dev_dir = os.path.join(path, 'development') # Joins the paths to get the development directory - 1.1
dev_count = sum((len(f) for _, _, f in os.walk(dev_dir))) # Get a count for all the files in the directory
if dev_count > 10: # If the number of files is greater then 10, then print the following messages
print '\nYou have too many in here, finish them or delete them !!!!!'
print 'You have: ' + str(dev_count) + ' waiting to be finished!!'
elif dev_count ==0: # Unless the count is 0, then print the following messages
print '\nDevelopment directory is all clear'
else:
print '\nYou have: ' + str(dev_count) + ' waiting to be finished!!' # If it is any other number then print the following message, showing the number outstanding.
clear_screen() # Call the function to clear the screen
print '\nYou have the following :\n'
print 'AutoIT:\t' + str(count_files(path, '.au3')) # Run the count_files function to count the files with the extension we pass
print 'Batch:\t' + str(count_files(path, ('.bat', ',cmd'))) # 1.3
print 'Perl:\t' + str(count_files(path, '.pl'))
print 'PHP:\t' + str(count_files(path, '.php')) # 1.2
print 'Python:\t' + str(count_files(path, '.py'))
print 'Shell:\t' + str(count_files(path, ('.ksh', '.sh', '.bash')))
print 'SQL:\t' + str(count_files(path, '.sql'))
github() # Call the github function
development() # Call the development function# Script Name : script_listing.py
# Author : Craig Richards
# Created : 15th February 2012
# Last Modified : 29th May 2012
# Version : 1.2
# Modifications : 1.1 - 28-02-2012 - CR - Added the variable to get the logs directory, I then joined the output so the file goes to the logs directory
# : 1.2 - 29-05/2012 - CR - Changed the line so it doesn't ask for a directory, it now uses the environment varaible scripts
# Description : This will list all the files in the given directory, it will also go through all the subdirectories as well
import os # Load the library module
logdir = os.getenv("logs") # Set the variable logdir by getting the value from the OS environment variable logs
logfile = 'script_list.log' # Set the variable logfile
path = os.getenv("scripts") # Set the varable path by getting the value from the OS environment variable scripts - 1.2
#path = (raw_input("Enter dir: ")) # Ask the user for the directory to scan
logfilename = os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
log = open(logfilename, 'w') # Set the variable log and open the logfile for writing
for dirpath, dirname, filenames in os.walk(path): # Go through the directories and the subdirectories
for filename in filenames: # Get all the filenames
log.write(os.path.join(dirpath, filename)+'\n') # Write the full path out to the logfile
print ("\nYour logfile " , logfilename, "has been created") # Small message informing the user the file has been createdimport numpy as np
import cv2
cap = cv2.VideoCapture(0)
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
# Our operations on the frame come here
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Display the resulting frame
cv2.imshow('frame',gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"[email protected]"
]
| |
288b5306f323b0024d8bdacbb104e4fef3aef131 | f31fda8014ecadf6af7d4e3392fb917c49e0352a | /HeavyIonsAnalysis/VertexAnalysis/python/__init__.py | 33e53c5f468fb19b01b06f042239cab6d2875ca9 | []
| no_license | jniedzie/lightbylight | acea5051f053c49824a49a0b78bac3a2247ee75f | f5a4661fcf3fd3c0e9ccd8893a46a238e30c2aa8 | refs/heads/master | 2020-03-18T12:24:31.970468 | 2018-02-09T15:50:00 | 2018-02-09T15:50:00 | 134,724,759 | 0 | 1 | null | 2018-05-24T14:11:12 | 2018-05-24T14:11:12 | null | UTF-8 | Python | false | false | 216 | py | #Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/HeavyIonsAnalysis/VertexAnalysis/',1)[0])+'/cfipython/slc6_amd64_gcc491/HeavyIonsAnalysis/VertexAnalysis')
| [
"[email protected]"
]
| |
4a196cd4f4e92e6b42f9a4e0df6489a41ad1cdfe | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/1/c8n.py | a152200db7ea0c68b8daebb0c4d24d5fe2909fef | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'c8N':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
0d7612855c30e129d873683d5f7f339e5fd16d61 | aad164e4efe1d55cc189c35956bfd435b14a0f52 | /eve-8.21.494548/eve/client/script/parklife/autopilot.py | 633cb52208bb534d30fdf32b77f2d3a6d17944d3 | []
| no_license | Pluckyduck/eve | 61cc41fe8fd4dca4fbdcc4761a37bcfeb27ed84f | 9a277707ab1f162c6bd9618faf722c0be3ea93ad | refs/heads/master | 2020-12-28T23:35:29.992875 | 2013-05-06T14:24:33 | 2013-05-06T14:24:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,327 | py | #Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/parklife/autopilot.py
import util
import destiny
import base
import service
import sys
import uthread
import blue
import log
import localization
import uiconst
from collections import defaultdict
AUTO_NAVIGATION_LOOP_INTERVAL_MS = 2000
class AutoPilot(service.Service):
__guid__ = 'svc.autoPilot'
__exportedcalls__ = {'SetOn': [],
'SetOff': [],
'GetState': []}
__notifyevents__ = ['OnBallparkCall', 'OnSessionChanged', 'OnRemoteMessage']
__dependencies__ = ['michelle', 'starmap']
def __init__(self):
service.Service.__init__(self)
self.updateTimer = None
self.autopilot = 0
self.ignoreTimerCycles = 0
self.isOptimizing = False
self.approachAndTryTarget = None
self.warpAndTryTarget = None
self.__navigateSystemDestinationItemID = None
self.__navigateSystemThread = None
uthread.new(self.UpdateWaypointsThread).context = 'autoPilot::UpdateWaypointsThread'
def UpdateWaypointsThread(self):
blue.pyos.synchro.SleepWallclock(2000)
starmapSvc = sm.GetService('starmap')
waypoints = starmapSvc.GetWaypoints()
if len(waypoints):
starmapSvc.SetWaypoints(waypoints)
def Run(self, memStream = None):
service.Service.Run(self, memStream)
self.StartTimer()
def SetOn(self):
if self.autopilot == 1:
return
self.autopilot = 1
if not sm.GetService('machoNet').GetGlobalConfig().get('newAutoNavigationKillSwitch', False):
self.CancelSystemNavigation()
else:
self.AbortApproachAndTryCommand()
self.AbortWarpAndTryCommand()
sm.ScatterEvent('OnAutoPilotOn')
eve.Message('AutoPilotEnabled')
self.KillTimer()
self.StartTimer()
self.LogNotice('Autopilot Enabled')
def OnSessionChanged(self, isremote, session, change):
self.KillTimer()
self.ignoreTimerCycles = 3
self.StartTimer()
sm.GetService('starmap').UpdateRoute(fakeUpdate=True)
def SetOff(self, reason = ''):
if self.autopilot == 0:
self.KillTimer()
return
sm.ScatterEvent('OnAutoPilotOff')
self.autopilot = 0
if reason == ' - waypoint reached':
eve.Message('AutoPilotWaypointReached')
elif reason == ' - no destination path set':
eve.Message('AutoPilotDisabledNoPathSet')
else:
eve.Message('AutoPilotDisabled')
self.LogNotice('Autopilot Disabled', reason)
def OnRemoteMessage(self, msgID, *args, **kwargs):
if msgID == 'FleetWarp':
self.LogInfo('Canceling auto navigation due to fleet warp detected')
self.CancelSystemNavigation()
def OnBallparkCall(self, functionName, args):
functions = ['GotoDirection', 'GotoPoint']
if args[0] != eve.session.shipid:
return
if not sm.GetService('machoNet').GetGlobalConfig().get('newAutoNavigationKillSwitch', False):
cancelAutoNavigation = False
if self.__navigateSystemDestinationItemID is None:
pass
elif functionName in {'GotoDirection', 'GotoPoint', 'Orbit'}:
cancelAutoNavigation = True
elif functionName == 'FollowBall' and self.__navigateSystemDestinationItemID != args[1]:
cancelAutoNavigation = True
if cancelAutoNavigation:
self.LogInfo('Canceling auto navigation to', self.__navigateSystemDestinationItemID, 'as a respons to OnBallparkCall:', functionName, args)
self.CancelSystemNavigation()
else:
approachAndTryFunctions = ['GotoDirection',
'GotoPoint',
'FollowBall',
'Orbit',
'WarpTo']
warpAndTryFunctions = ['GotoDirection',
'GotoPoint',
'FollowBall',
'Orbit']
if functionName in approachAndTryFunctions:
if functionName != 'FollowBall' or self.approachAndTryTarget != args[1]:
self.AbortApproachAndTryCommand()
if functionName in warpAndTryFunctions:
self.AbortWarpAndTryCommand()
if functionName in functions:
if functionName == 'GotoDirection' and self.gotoCount > 0:
self.gotoCount = 0
self.LogInfo('Autopilot gotocount set to 0')
return
if self.gotoCount == 0:
waypoints = sm.GetService('starmap').GetWaypoints()
if waypoints and util.IsStation(waypoints[-1]):
return
self.SetOff(functionName + str(args))
self.LogInfo('Autopilot stopped gotocount is ', self.gotoCount)
def GetState(self):
return self.autopilot
def Stop(self, stream):
self.KillTimer()
service.Service.Stop(self)
def KillTimer(self):
self.updateTimer = None
def StartTimer(self):
self.gotoCount = 0
self.updateTimer = base.AutoTimer(2000, self.Update)
def Update(self):
if self.autopilot == 0:
self.KillTimer()
return
elif self.ignoreTimerCycles > 0:
self.ignoreTimerCycles = self.ignoreTimerCycles - 1
return
elif not session.IsItSafe():
self.LogInfo('returning as it is not safe')
return
elif not session.rwlock.IsCool():
self.LogInfo("returning as the session rwlock isn't cool")
return
else:
starmapSvc = sm.GetService('starmap')
destinationPath = starmapSvc.GetDestinationPath()
if len(destinationPath) == 0:
self.SetOff(' - no destination path set')
return
elif destinationPath[0] == None:
self.SetOff(' - no destination path set')
return
bp = sm.GetService('michelle').GetBallpark()
if not bp:
return
elif sm.GetService('jumpQueue').IsJumpQueued():
return
ship = bp.GetBall(session.shipid)
if ship is None:
return
elif ship.mode == destiny.DSTBALL_WARP:
return
destID = None
destItem = None
for ballID in bp.balls.iterkeys():
slimItem = bp.GetInvItem(ballID)
if slimItem == None:
continue
if slimItem.groupID == const.groupStargate and destinationPath[0] in map(lambda x: x.locationID, slimItem.jumps):
destID = ballID
destItem = slimItem
break
elif destinationPath[0] == slimItem.itemID:
destID = ballID
destItem = slimItem
break
if destID is None:
return
jumpingToCelestial = not util.IsSolarSystem(destinationPath[0])
theJump = None
if not jumpingToCelestial:
for jump in destItem.jumps:
if destinationPath[0] == jump.locationID:
theJump = jump
break
if theJump is None and not jumpingToCelestial:
return
approachObject = bp.GetBall(destID)
if approachObject is None:
return
if jumpingToCelestial:
jumpToLocationName = cfg.evelocations.Get(destinationPath[0]).name
else:
jumpToLocationName = cfg.evelocations.Get(theJump.locationID).name
shipDestDistance = bp.GetSurfaceDist(ship.id, destID)
if shipDestDistance < const.maxStargateJumpingDistance and not jumpingToCelestial:
if ship.isCloaked:
return
if session.mutating:
self.LogInfo('session is mutating')
return
if session.changing:
self.LogInfo('session is changing')
return
if bp.solarsystemID != session.solarsystemid:
self.LogInfo('bp.solarsystemid is not solarsystemid')
return
if sm.GetService('michelle').GetRemotePark()._Moniker__bindParams != session.solarsystemid:
self.LogInfo('remote park moniker bindparams is not solarsystemid')
return
try:
self.LogNotice('Autopilot jumping from', destID, 'to', theJump.toCelestialID, '(', jumpToLocationName, ')')
sm.GetService('sessionMgr').PerformSessionChange('autopilot', sm.GetService('michelle').GetRemotePark().CmdStargateJump, destID, theJump.toCelestialID, session.shipid)
eve.Message('AutoPilotJumping', {'what': jumpToLocationName})
sm.ScatterEvent('OnAutoPilotJump')
self.ignoreTimerCycles = 5
except UserError as e:
if e.msg == 'SystemCheck_JumpFailed_Stuck':
self.SetOff()
raise
elif e.msg.startswith('SystemCheck_JumpFailed_'):
eve.Message(e.msg, e.dict)
elif e.msg == 'NotCloseEnoughToJump':
park = sm.GetService('michelle').GetRemotePark()
park.CmdSetSpeedFraction(1.0)
shipui = uicore.layer.shipui
if shipui.isopen:
shipui.SetSpeed(1.0)
park.CmdFollowBall(destID, 0.0)
self.LogWarn("Autopilot: I thought I was close enough to jump, but I wasn't.")
sys.exc_clear()
self.LogError('Autopilot: jumping to ' + jumpToLocationName + ' failed. Will try again')
self.ignoreTimerCycles = 5
except:
sys.exc_clear()
self.LogError('Autopilot: jumping to ' + jumpToLocationName + ' failed. Will try again')
self.ignoreTimerCycles = 5
return
elif jumpingToCelestial and util.IsStation(destID) and shipDestDistance < const.maxDockingDistance:
if not sm.GetService('machoNet').GetGlobalConfig().get('newAutoNavigationKillSwitch', False):
if self.__navigateSystemDestinationItemID != destID:
if shipDestDistance > 2500:
sm.GetService('audio').SendUIEvent('wise:/msg_AutoPilotApproachingStation_play')
sm.GetService('menu').Dock(destID)
self.ignoreTimerCycles = 5
else:
if shipDestDistance > 2500 and self.approachAndTryTarget != destID:
sm.GetService('audio').SendUIEvent('wise:/msg_AutoPilotApproachingStation_play')
sm.GetService('menu').Dock(destID)
return
elif shipDestDistance < const.minWarpDistance:
if ship.mode == destiny.DSTBALL_FOLLOW and ship.followId == destID:
return
self.CancelSystemNavigation()
park = sm.GetService('michelle').GetRemotePark()
park.CmdSetSpeedFraction(1.0)
shipui = uicore.layer.shipui
if shipui.isopen:
shipui.SetSpeed(1.0)
park.CmdFollowBall(destID, 0.0)
eve.Message('AutoPilotApproaching')
if not (jumpingToCelestial and util.IsStation(destID)):
sm.GetService('audio').SendUIEvent('wise:/msg_AutoPilotApproaching_play')
self.LogInfo('Autopilot: approaching')
self.ignoreTimerCycles = 2
return
try:
sm.GetService('space').WarpDestination(destID, None, None)
sm.GetService('michelle').GetRemotePark().CmdWarpToStuffAutopilot(destID)
eve.Message('AutoPilotWarpingTo', {'what': jumpToLocationName})
if jumpingToCelestial:
if util.IsStation(destID):
sm.GetService('audio').SendUIEvent('wise:/msg_AutoPilotWarpingToStation_play')
self.LogInfo('Autopilot: warping to celestial object', destID)
else:
sm.GetService('audio').SendUIEvent('wise:/msg_AutoPilotWarpingTo_play')
self.LogInfo('Autopilot: warping to gate')
sm.ScatterEvent('OnAutoPilotWarp')
self.ignoreTimerCycles = 2
except UserError as e:
sys.exc_clear()
item = sm.GetService('godma').GetItem(session.shipid)
if item.warpScrambleStatus > 0:
self.SetOff('Autopilot cannot warp while warp scrambled.')
if 'WarpDisrupted' in e.msg:
self.SetOff('Autopilot cannot warp while warp scrambled by bubble.')
except Exception as e:
self.SetOff('Unknown error')
return
def NavigateSystemTo(self, itemID, interactionRange, commandFunc, *args, **kwargs):
self.LogInfo('Navigate to item', itemID, 'range', interactionRange, 'and execute', commandFunc)
self.__navigateSystemDestinationItemID = itemID
self.__navigateSystemThread = base.AutoTimer(50, self.__NavigateSystemTo, itemID, interactionRange, commandFunc, *args, **kwargs)
def CancelSystemNavigation(self):
self.LogInfo('Cancel system navigation')
self.__navigateSystemDestinationItemID = None
self.__navigateSystemThread = None
self.AbortApproachAndTryCommand()
self.AbortWarpAndTryCommand()
def __NavigateSystemTo(self, itemID, interactionRange, commandFunc, *args, **kwargs):
try:
if self.InWarp():
pass
elif self.InInteractionRange(itemID, interactionRange) and not self.IsCloaked():
self.LogInfo('System navigation: at target location. Triggering action')
try:
commandFunc(*args, **kwargs)
except UserError:
raise
finally:
self.CancelSystemNavigation()
elif self.InWarpRange(itemID):
self.LogInfo('System navigation: warping to target', itemID, interactionRange)
sm.GetService('menu').WarpToItem(itemID, warpRange=const.minWarpEndDistance, cancelAutoNavigation=False)
elif self.IsApproachable(itemID):
sm.GetService('menu').Approach(itemID, cancelAutoNavigation=False)
else:
self.LogInfo('Unable to resolve the proper navigation action. Aborting.', itemID, interactionRange, commandFunc)
self.CancelSystemNavigation()
if self.__navigateSystemThread:
self.__navigateSystemThread.interval = AUTO_NAVIGATION_LOOP_INTERVAL_MS
except UserError as e:
self.LogInfo('User error detected', e.msg, itemID, interactionRange, commandFunc)
raise
except:
self.LogError('Problem while navigating system', itemID, interactionRange, commandFunc)
log.LogException(channel=self.__guid__)
def IsApproachable(self, itemID):
destBall = self.michelle.GetBall(itemID)
if destBall is not None and destBall.surfaceDist < const.minWarpDistance:
return True
return False
def InInteractionRange(self, itemID, interactionRange):
destBall = self.michelle.GetBall(itemID)
if destBall is not None and destBall.surfaceDist < interactionRange:
return True
return False
def InWarp(self):
shipBall = self.michelle.GetBall(session.shipid)
if shipBall is not None and shipBall.mode == destiny.DSTBALL_WARP:
return True
return False
def InWarpRange(self, itemID):
destBall = self.michelle.GetBall(itemID)
if destBall is not None and destBall.surfaceDist > const.minWarpDistance:
return True
return False
def IsCloaked(self):
shipBall = self.michelle.GetBall(session.shipid)
if shipBall is not None:
return bool(shipBall.isCloaked)
return False
def WarpAndTryCommand(self, id, cmdMethod, args, interactionRange):
bp = sm.StartService('michelle').GetRemotePark()
if not bp:
return
if sm.StartService('space').CanWarp() and self.warpAndTryTarget != id:
self.approachAndTryTarget = None
self.warpAndTryTarget = id
try:
michelle = sm.StartService('michelle')
shipBall = michelle.GetBall(session.shipid)
if shipBall is None:
return
if shipBall.mode != destiny.DSTBALL_WARP:
bp.CmdWarpToStuff('item', id)
sm.StartService('space').WarpDestination(id, None, None)
while self.warpAndTryTarget == id and shipBall.mode != destiny.DSTBALL_WARP:
blue.pyos.synchro.SleepWallclock(500)
while shipBall.mode == destiny.DSTBALL_WARP:
blue.pyos.synchro.SleepWallclock(500)
counter = 3
while self.warpAndTryTarget == id and counter > 0:
destBall = michelle.GetBall(id)
if not destBall or destBall.surfaceDist > const.minWarpDistance:
break
destBall.GetVectorAt(blue.os.GetSimTime())
if destBall.surfaceDist < interactionRange:
cmdMethod(*args)
break
blue.pyos.synchro.SleepWallclock(500)
counter -= 1
finally:
if self.warpAndTryTarget == id:
self.warpAndTryTarget = None
def ApproachAndTryCommand(self, id, cmdMethod, args, interactionRange):
bp = sm.StartService('michelle').GetRemotePark()
if not bp:
return
if self.approachAndTryTarget != id and not self.warpAndTryTarget:
self.warpAndTryTarget = None
self.approachAndTryTarget = id
localbp = sm.StartService('michelle').GetBallpark()
if not localbp:
return
try:
sm.GetService('menu').Approach(id)
michelle = sm.StartService('michelle')
while self.approachAndTryTarget == id:
ball = localbp.GetBall(id)
if not ball:
break
ball.GetVectorAt(blue.os.GetSimTime())
shipBall = localbp.GetBall(session.shipid)
if ball.surfaceDist < interactionRange and not shipBall.isCloaked:
cmdMethod(*args)
break
blue.pyos.synchro.SleepWallclock(500)
finally:
if self.approachAndTryTarget == id:
self.approachAndTryTarget = False
def AbortApproachAndTryCommand(self, nextID = None):
if nextID != self.approachAndTryTarget:
self.approachAndTryTarget = None
self.CancelSystemNavigation()
def AbortWarpAndTryCommand(self, nextID = None):
if nextID != self.warpAndTryTarget:
self.warpAndTryTarget = None
self.CancelSystemNavigation()
def OptimizeRoute(self, *args):
if self.isOptimizing:
return
try:
self.isOptimizing = True
starmapSvc = sm.GetService('starmap')
waypoints = list(starmapSvc.GetWaypoints())
originalWaypointsLen = len(waypoints)
isReturnTrip = False
for idx in reversed(xrange(len(waypoints))):
if waypoints[idx] == eve.session.solarsystemid2:
del waypoints[idx]
isReturnTrip = True
break
solarSystemToStations = defaultdict(list)
for i, waypoint in enumerate(waypoints):
if util.IsStation(waypoint):
solarSystemID = cfg.stations.Get(waypoint).solarSystemID
solarSystemToStations[solarSystemID].append(waypoint)
waypoints[i] = solarSystemID
waypoints = list(set(waypoints))
if session.solarsystemid2 in waypoints:
waypoints.remove(session.solarsystemid2)
numWaypoints = len(waypoints)
if numWaypoints == 0:
return
msg = None
if numWaypoints > 12:
msg = 'UI/Map/MapPallet/msgOptimizeQuestion1'
elif numWaypoints > 10:
msg = 'UI/Map/MapPallet/msgOptimizeQuestion2'
if msg:
yesNo = eve.Message('AskAreYouSure', {'cons': localization.GetByLabel(msg, numWaypoints=originalWaypointsLen)}, uiconst.YESNO)
if yesNo != uiconst.ID_YES:
return
distance = {}
waypoints.append(eve.session.solarsystemid2)
for fromID in waypoints:
distance[fromID] = {}
for toID in waypoints:
if fromID == toID:
continue
distance[fromID][toID] = sm.GetService('pathfinder').GetJumpCountFromCurrent(toID, fromID)
waypoints.pop()
startTime = blue.os.GetWallclockTimeNow()
prefix = [None]
_push = prefix.append
_pop = prefix.pop
def FindShortestRoute(prefix, distanceSoFar, toID):
distanceTo = distance[toID]
prefix[-1] = toID
shortestDist = shortestRouteSoFar[0]
if len(prefix) < numWaypoints:
_push(None)
for i in indexes:
toID = waypoints[i]
if not toID:
continue
candidateDist = distanceSoFar + distanceTo[toID]
if candidateDist >= shortestDist:
continue
waypoints[i] = None
FindShortestRoute(prefix, candidateDist, toID)
waypoints[i] = toID
_pop()
else:
for i in indexes:
toID = waypoints[i]
if not toID:
continue
candidateDist = distanceSoFar + distanceTo[toID]
if candidateDist < shortestDist:
shortestRouteSoFar[:] = [candidateDist, prefix[:], toID]
shortestDist = candidateDist
shortestRouteSoFar = [999999999, None, None]
indexes = range(len(waypoints))
FindShortestRoute(prefix, 0, eve.session.solarsystemid2)
distance, waypoints, last = shortestRouteSoFar
blue.pyos.synchro.SleepWallclock(1)
endTime = blue.os.GetWallclockTimeNow()
if waypoints is None:
raise UserError('AutoPilotDisabledUnreachable')
waypoints.append(last)
waypointsWithStations = []
for waypoint in waypoints:
if waypoint in solarSystemToStations:
waypointsWithStations.extend(solarSystemToStations[waypoint])
else:
waypointsWithStations.append(waypoint)
if isReturnTrip == True:
sm.GetService('starmap').SetWaypoints(waypointsWithStations + [session.solarsystemid2])
else:
sm.GetService('starmap').SetWaypoints(waypointsWithStations)
finally:
self.isOptimizing = False | [
"[email protected]"
]
| |
312796ff98fadf97216ff3c7db06d5b89af9ed2e | cfb01066c08fc4f4b0ab481dc0ff7c6ce2fb9981 | /tests/aggregation/tests.py | e81744df29bd81e34a3ccbb0ab02cadb210194f6 | [
"MIT"
]
| permissive | pombredanne/django-aggregate-if | 21a8dc460f73d629b79be0c54356c70d92048780 | 02f43633c620de53aa7b9479523bbba8013a3900 | refs/heads/master | 2021-01-23T01:30:13.182765 | 2013-01-02T23:47:45 | 2013-01-02T23:47:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,983 | py | from __future__ import absolute_import
import datetime
from decimal import Decimal
from django.db.models import Q, F
from django.test import TestCase, Approximate
from aggregate_if import Sum, Count, Avg, Max, Min
from .models import Author, Publisher, Book, Store
class BaseAggregateTestCase(TestCase):
fixtures = ["aggregation.json"]
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
vals = Author.objects.aggregate(Sum("age", only=Q(age__gt=29)))
self.assertEqual(vals, {"age__sum": 254})
vals = Author.objects.extra(select={'testparams':'age < %s'}, select_params=[0])\
.aggregate(Sum("age", only=Q(age__gt=29)))
self.assertEqual(vals, {"age__sum": 254})
vals = Author.objects.aggregate(Sum("age", only=Q(name__icontains='jaco')|Q(name__icontains='adrian')))
self.assertEqual(vals, {"age__sum": 69})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
vals = Author.objects.aggregate(Sum("age", only=Q(age__gt=29)), Avg("age"))
self.assertEqual(vals, {"age__sum": 254, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["age__sum"], 254)
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age", only=Q(age__lt=29)))
# If there are no matching aggregates, then None, not 0 is the answer.
self.assertEqual(vals["age__sum"], None)
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
vals = Author.objects.aggregate(Avg("friends__age", only=Q(age__lt=29)))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 33.67, places=2)
vals2 = Author.objects.filter(age__lt=29).aggregate(Avg("friends__age"))
self.assertEqual(vals, vals2)
vals = Author.objects.aggregate(Avg("friends__age", only=Q(friends__age__lt=35)))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 28.75, places=2)
# The average age of author's friends, whose age is lower than the authors age.
vals = Author.objects.aggregate(Avg("friends__age", only=Q(friends__age__lt=F('age'))))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 30.43, places=2)
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__rating__avg"], 4.0)
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["publisher__num_awards__sum"], 30)
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Store.objects.aggregate(Max("books__authors__age", only=Q(books__authors__age__lt=56)))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 46)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=1)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
'''
def test_f_expression(self):
publishers = Publisher.objects.annotate(avg_rating=Avg(F('book__rating') - 0))
publishers = publishers.values_list('id', 'avg_rating').order_by('id')
self.assertEqual(list(publishers), [(1, 4.25), (2, 3.0), (3, 4.0), (4, 5.0), (5, None)])
def test_only_condition_with_join(self):
# Test extra-select
books = Book.objects.annotate(mean_age=Avg("authors__age"))
books = books.annotate(mean_age2=Avg('authors__age', only=Q(authors__age__gte=0)))
books = books.extra(select={'testparams': 'publisher_id = %s'}, select_params=[1])
b = books.get(pk=1)
self.assertEqual(b.mean_age, 34.5)
self.assertEqual(b.mean_age2, 34.5)
self.assertEqual(b.testparams, True)
def test_relabel_aliases(self):
# Test relabel_aliases
excluded_authors = Author.objects.annotate(book_rating=Min(F('book__rating') + 5, only=Q(pk__gte=1)))
excluded_authors = excluded_authors.filter(book_rating__lt=0)
books = books.exclude(authors__in=excluded_authors)
b = books.get(pk=1)
self.assertEqual(b.mean_age, 34.5)
def test_joins_in_f(self):
# Test joins in F-based annotation
books = Book.objects.annotate(oldest=Max(F('authors__age')))
books = books.values_list('rating', 'oldest').order_by('rating', 'oldest')
self.assertEqual(
list(books),
[(3.0, 45), (4.0, 29), (4.0, 37), (4.0, 57), (4.5, 35), (5.0, 57)]
)
'''
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
self.assertEqual(
list(books), [
{
"pk": 1,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books), [
{
"name": "The Definitive Guide to Django: Web Development Done Right"
}
]
)
books = Book.objects.filter(pk=1).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertEqual(len(authors), 9)
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{
"rating": 4.5,
"oldest": 35,
},
{
"rating": 3.0,
"oldest": 45
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 5.0,
"oldest": 57,
}
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{
"rating": 5.0,
"oldest": 57,
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 3.0,
"oldest": 45,
},
{
"rating": 4.5,
"oldest": 35,
}
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008,12,1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008,12,2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008,12,3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Sams",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=1).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__ge=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
self.assertQuerysetEqual(
authors, [
"Brad Dayley",
],
lambda a: a.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
],
lambda p: p.name
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
self.assertQuerysetEqual(
books, [
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(earliest_book=Min("book__pubdate")).exclude(earliest_book=None).order_by("earliest_book").values()
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
self.assertEqual(
list(books), [
(1, "159059725", 34.5),
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(
list(books), [
('159059725',)
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(
list(books), [
(34.5,)
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
| [
"[email protected]"
]
| |
45ffaf4426419e33898e073ea47511d7dd5e942c | 632d417159013940d612f7364c2a7c5c88b52a56 | /esign/esign_app/migrations/0011_auto_20180119_1323.py | 1a72ae50f09c5a75f3a94acd9845c1324ab2f88a | []
| no_license | cityking/esign | e553d6197f383fab0435dec5805f140592e2fdfc | f88279e3b7f5800bd5ad3a0bd95ebf494078da4c | refs/heads/master | 2022-11-02T20:12:54.927931 | 2018-02-09T03:02:37 | 2018-02-09T03:02:37 | 120,849,522 | 0 | 0 | null | 2022-10-20T20:28:59 | 2018-02-09T03:07:20 | Python | UTF-8 | Python | false | false | 968 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-01-19 05:23
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('esign_app', '0010_auto_20180119_1322'),
]
operations = [
migrations.AlterField(
model_name='appversion',
name='url',
field=models.CharField(max_length=100, verbose_name='下载地址'),
),
migrations.AlterField(
model_name='myuser',
name='join_date',
field=models.DateTimeField(default=datetime.datetime(2018, 1, 19, 13, 23, 34, 652029), verbose_name='加入时间'),
),
migrations.AlterField(
model_name='sign',
name='create_time',
field=models.DateTimeField(default=datetime.datetime(2018, 1, 19, 13, 23, 34, 654114), verbose_name='创建时间'),
),
]
| [
"[email protected]"
]
| |
e6cb1c6ae8c2f5f50118d4848598853900007fbf | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqptcapacity/l3remoteusageper1year.py | 9f5ceefbfc23a4350dd913a6766bce7204c280e9 | []
| no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 19,817 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class L3RemoteUsagePer1year(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.eqptcapacity.L3RemoteUsagePer1year", "Layer3 remote entries usage percentage")
counter = CounterMeta("normalizedRemotev6", CounterCategory.GAUGE, "percentage", "Remote v6 L3 entries usage percentage")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "normalizedRemotev6Last"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "normalizedRemotev6Min"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "normalizedRemotev6Max"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "normalizedRemotev6Avg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "normalizedRemotev6Spct"
counter._propRefs[PropCategory.IMPLICIT_TOTAL] = "normalizedRemotev6Ttl"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "normalizedRemotev6Thr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "normalizedRemotev6TrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "normalizedRemotev6Tr"
meta._counters.append(counter)
counter = CounterMeta("normalizedRemotev4", CounterCategory.GAUGE, "percentage", "Remote v4 L3 entries usage percentage")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "normalizedRemotev4Last"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "normalizedRemotev4Min"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "normalizedRemotev4Max"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "normalizedRemotev4Avg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "normalizedRemotev4Spct"
counter._propRefs[PropCategory.IMPLICIT_TOTAL] = "normalizedRemotev4Ttl"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "normalizedRemotev4Thr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "normalizedRemotev4TrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "normalizedRemotev4Tr"
meta._counters.append(counter)
meta.moClassName = "eqptcapacityL3RemoteUsagePer1year"
meta.rnFormat = "CDeqptcapacityL3RemoteUsagePer1year"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current Layer3 remote entries usage percentage stats in 1 year"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.eqptcapacity.Entity")
meta.superClasses.add("cobra.model.eqptcapacity.L3RemoteUsagePer")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.stats.Item")
meta.rnPrefixes = [
('CDeqptcapacityL3RemoteUsagePer1year', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "normalizedRemotev4Avg", "normalizedRemotev4Avg", 36635, PropCategory.IMPLICIT_AVG)
prop.label = "Remote v4 L3 entries usage percentage average value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Avg", prop)
prop = PropMeta("str", "normalizedRemotev4Last", "normalizedRemotev4Last", 36632, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Remote v4 L3 entries usage percentage current value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Last", prop)
prop = PropMeta("str", "normalizedRemotev4Max", "normalizedRemotev4Max", 36634, PropCategory.IMPLICIT_MAX)
prop.label = "Remote v4 L3 entries usage percentage maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Max", prop)
prop = PropMeta("str", "normalizedRemotev4Min", "normalizedRemotev4Min", 36633, PropCategory.IMPLICIT_MIN)
prop.label = "Remote v4 L3 entries usage percentage minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Min", prop)
prop = PropMeta("str", "normalizedRemotev4Spct", "normalizedRemotev4Spct", 36636, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Remote v4 L3 entries usage percentage suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Spct", prop)
prop = PropMeta("str", "normalizedRemotev4Thr", "normalizedRemotev4Thr", 36638, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Remote v4 L3 entries usage percentage thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("normalizedRemotev4Thr", prop)
prop = PropMeta("str", "normalizedRemotev4Tr", "normalizedRemotev4Tr", 36640, PropCategory.IMPLICIT_TREND)
prop.label = "Remote v4 L3 entries usage percentage trend"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Tr", prop)
prop = PropMeta("str", "normalizedRemotev4TrBase", "normalizedRemotev4TrBase", 36639, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Remote v4 L3 entries usage percentage trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4TrBase", prop)
prop = PropMeta("str", "normalizedRemotev4Ttl", "normalizedRemotev4Ttl", 36637, PropCategory.IMPLICIT_TOTAL)
prop.label = "Remote v4 L3 entries usage percentage total sum"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev4Ttl", prop)
prop = PropMeta("str", "normalizedRemotev6Avg", "normalizedRemotev6Avg", 36650, PropCategory.IMPLICIT_AVG)
prop.label = "Remote v6 L3 entries usage percentage average value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Avg", prop)
prop = PropMeta("str", "normalizedRemotev6Last", "normalizedRemotev6Last", 36647, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Remote v6 L3 entries usage percentage current value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Last", prop)
prop = PropMeta("str", "normalizedRemotev6Max", "normalizedRemotev6Max", 36649, PropCategory.IMPLICIT_MAX)
prop.label = "Remote v6 L3 entries usage percentage maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Max", prop)
prop = PropMeta("str", "normalizedRemotev6Min", "normalizedRemotev6Min", 36648, PropCategory.IMPLICIT_MIN)
prop.label = "Remote v6 L3 entries usage percentage minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Min", prop)
prop = PropMeta("str", "normalizedRemotev6Spct", "normalizedRemotev6Spct", 36651, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Remote v6 L3 entries usage percentage suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Spct", prop)
prop = PropMeta("str", "normalizedRemotev6Thr", "normalizedRemotev6Thr", 36653, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Remote v6 L3 entries usage percentage thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("normalizedRemotev6Thr", prop)
prop = PropMeta("str", "normalizedRemotev6Tr", "normalizedRemotev6Tr", 36655, PropCategory.IMPLICIT_TREND)
prop.label = "Remote v6 L3 entries usage percentage trend"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Tr", prop)
prop = PropMeta("str", "normalizedRemotev6TrBase", "normalizedRemotev6TrBase", 36654, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Remote v6 L3 entries usage percentage trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6TrBase", prop)
prop = PropMeta("str", "normalizedRemotev6Ttl", "normalizedRemotev6Ttl", 36652, PropCategory.IMPLICIT_TOTAL)
prop.label = "Remote v6 L3 entries usage percentage total sum"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedRemotev6Ttl", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
]
| |
7edd65620a859a56b61c8982fe8c8e7e7b8822cf | e8274f167fd219ef78241ba8ea89e5d5875ed794 | /cloud/quantum/quantum/api/v2/resource.py | 757d20061e74194d59943d327de57d095cd418a3 | [
"Apache-2.0"
]
| permissive | virt2x/folsomCloud | 02db0147f7e0f2ab0375faf4f36ca08272084152 | e6fd612dd77f35a72739cf4d4750e9795c0fa508 | refs/heads/master | 2021-01-01T17:26:28.405651 | 2013-10-17T12:36:04 | 2013-10-17T12:36:04 | 13,647,787 | 0 | 1 | null | 2020-07-24T08:25:22 | 2013-10-17T12:10:24 | Python | UTF-8 | Python | false | false | 4,991 | py | # Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility methods for working with WSGI servers redux
"""
import netaddr
import webob
import webob.dec
import webob.exc
from quantum.common import exceptions
from quantum import context
from quantum.openstack.common import jsonutils as json
from quantum.openstack.common import log as logging
from quantum import wsgi
LOG = logging.getLogger(__name__)
class Request(webob.Request):
"""Add some Openstack API-specific logic to the base webob.Request."""
def best_match_content_type(self):
supported = ('application/json', )
return self.accept.best_match(supported,
default_match='application/json')
@property
def context(self):
#Eventually the Auth[NZ] code will supply this. (mdragon)
#when that happens this if block should raise instead.
if 'quantum.context' not in self.environ:
self.environ['quantum.context'] = context.get_admin_context()
return self.environ['quantum.context']
def Resource(controller, faults=None, deserializers=None, serializers=None):
"""Represents an API entity resource and the associated serialization and
deserialization logic
"""
default_deserializers = {'application/xml': wsgi.XMLDeserializer(),
'application/json': lambda x: json.loads(x)}
default_serializers = {'application/xml': wsgi.XMLDictSerializer(),
'application/json': lambda x: json.dumps(x)}
format_types = {'xml': 'application/xml',
'json': 'application/json'}
action_status = dict(create=201, delete=204)
default_deserializers.update(deserializers or {})
default_serializers.update(serializers or {})
deserializers = default_deserializers
serializers = default_serializers
faults = faults or {}
@webob.dec.wsgify(RequestClass=Request)
def resource(request):
route_args = request.environ.get('wsgiorg.routing_args')
if route_args:
args = route_args[1].copy()
else:
args = {}
# NOTE(jkoelker) by now the controller is already found, remove
# it from the args if it is in the matchdict
args.pop('controller', None)
fmt = args.pop('format', None)
action = args.pop('action', None)
content_type = format_types.get(fmt,
request.best_match_content_type())
deserializer = deserializers.get(content_type)
serializer = serializers.get(content_type)
try:
if request.body:
args['body'] = deserializer(request.body)
method = getattr(controller, action)
result = method(request=request, **args)
except (ValueError, AttributeError,
exceptions.QuantumException,
netaddr.AddrFormatError) as e:
LOG.exception('%s failed' % action)
body = serializer({'QuantumError': str(e)})
kwargs = {'body': body, 'content_type': content_type}
for fault in faults:
if isinstance(e, fault):
raise faults[fault](**kwargs)
raise webob.exc.HTTPInternalServerError(**kwargs)
except webob.exc.HTTPException as e:
LOG.exception('%s failed' % action)
e.body = serializer({'QuantumError': str(e)})
e.content_type = content_type
raise
except Exception as e:
# NOTE(jkoelker) Everyting else is 500
LOG.exception('%s failed' % action)
# Do not expose details of 500 error to clients.
msg = _('Request Failed: internal server error while '
'processing your request.')
body = serializer({'QuantumError': msg})
kwargs = {'body': body, 'content_type': content_type}
raise webob.exc.HTTPInternalServerError(**kwargs)
status = action_status.get(action, 200)
body = serializer(result)
# NOTE(jkoelker) Comply with RFC2616 section 9.7
if status == 204:
content_type = ''
body = None
return webob.Response(request=request, status=status,
content_type=content_type,
body=body)
return resource
| [
"[email protected]"
]
| |
f01a21e3061792d927357110f3970d7c03ba9050 | 8ed86b8e9c451abcb2ce0ddf2f2067c11f3993d8 | /tests/test_osmnx.py | 33ec027e2986a7c620183d88cd5c271556bd3600 | [
"MIT"
]
| permissive | surfcao/osmnx | 65830096c21b8353a536f776dfedba7de20eac4c | 51c9addb42425657fa6b11c7442f79f10b9e3e22 | refs/heads/master | 2021-01-19T23:32:40.068378 | 2017-04-19T20:22:01 | 2017-04-19T20:22:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,082 | py | """
OSMnx tests
-----------
"""
import matplotlib as mpl
mpl.use('Agg') #use agg backend so you don't need a display on travis-ci
import os, shutil
if os.path.exists('.temp'):
shutil.rmtree('.temp')
import osmnx as ox, logging as lg
ox.config(log_console=True, log_file=True, use_cache=True,
data_folder='.temp/data', logs_folder='.temp/logs', imgs_folder='.temp/imgs', cache_folder='.temp/cache')
ox.log('test debug', level=lg.DEBUG)
ox.log('test info', level=lg.INFO)
ox.log('test warning', level=lg.WARNING)
ox.log('test error', level=lg.ERROR)
def test_imports():
import json, math, sys, os, io, ast, unicodedata, hashlib, re, random, time, warnings, datetime as dt, logging as lg
from collections import OrderedDict, Counter
from itertools import groupby, chain
from dateutil import parser as date_parser
import requests, numpy as np, pandas as pd, geopandas as gpd, networkx as nx, matplotlib.pyplot as plt, matplotlib.cm as cm
from matplotlib.collections import LineCollection
from shapely.geometry import Point, LineString, Polygon, MultiPolygon
from shapely import wkt
from shapely.ops import unary_union
from descartes import PolygonPatch
from rtree.index import Index as RTreeIndex
def test_gdf_shapefiles():
city = ox.gdf_from_place('Manhattan, New York City, New York, USA')
city_projected = ox.project_gdf(city, to_crs={'init':'epsg:3395'})
ox.save_gdf_shapefile(city_projected)
city = ox.gdf_from_place('Manhattan, New York City, New York, USA', buffer_dist=100)
ox.plot_shape(city)
def test_network_saving_loading():
G = ox.graph_from_place('Piedmont, California, USA')
G_projected = ox.project_graph(G)
ox.save_graph_shapefile(G_projected)
ox.save_graphml(G_projected)
G2 = ox.load_graphml('graph.graphml')
gdf_edges = ox.graph_to_gdfs(G, nodes=False, edges=True, fill_edge_geometry=False)
gdf_nodes, gdf_edges = ox.graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True)
G3 = ox.gdfs_to_graph(gdf_nodes, gdf_edges)
def test_get_network_methods():
import geopandas as gpd
north, south, east, west = 37.79, 37.78, -122.41, -122.43
G1 = ox.graph_from_bbox(north, south, east, west, network_type='drive_service')
G1 = ox.graph_from_bbox(north, south, east, west, network_type='drive_service', truncate_by_edge=True)
location_point = (37.791427, -122.410018)
bbox = ox.bbox_from_point(location_point, project_utm=True)
G2 = ox.graph_from_point(location_point, distance=750, distance_type='bbox', network_type='drive')
G3 = ox.graph_from_point(location_point, distance=500, distance_type='network')
G4 = ox.graph_from_address(address='350 5th Ave, New York, NY', distance=1000, distance_type='network', network_type='bike')
places = ['Los Altos, California, USA', {'city':'Los Altos Hills', 'state':'California'}, 'Loyola, California']
G5 = ox.graph_from_place(places, network_type='all', clean_periphery=False)
calif = gpd.read_file('examples/input_data/ZillowNeighborhoods-CA')
mission_district = calif[(calif['CITY']=='San Francisco') & (calif['NAME']=='Mission')]
polygon = mission_district['geometry'].iloc[0]
G6 = ox.graph_from_polygon(polygon, network_type='walk')
def test_stats():
location_point = (37.791427, -122.410018)
G = ox.graph_from_point(location_point, distance=500, distance_type='network')
stats1 = ox.basic_stats(G)
stats1 = ox.basic_stats(G, area=1000)
stats2 = ox.extended_stats(G, connectivity=True, anc=True, ecc=True, bc=True, cc=True)
def test_plots():
G = ox.graph_from_place('Piedmont, California, USA', network_type='drive', simplify=False)
G2 = ox.simplify_graph(G, strict=False)
nc = ox.get_node_colors_by_attr(G2, 'osmid')
ec = ox.get_edge_colors_by_attr(G2, 'length')
fig, ax = ox.plot_graph(G, save=True, file_format='png')
G_simplified = ox.simplify_graph(G)
fig, ax = ox.plot_graph(G_simplified, show=False, save=True, close=True, file_format='svg')
G_projected = ox.project_graph(G_simplified)
fig, ax = ox.plot_graph(G_projected)
fig, ax = ox.plot_graph(G_projected, fig_height=5, fig_width=5, margin=0.05, axis_off=False, bgcolor='y',
file_format='png', filename='x', dpi=180, annotate=True, node_color='k', node_size=5,
node_alpha=0.1, node_edgecolor='b', node_zorder=5, edge_color='r', edge_linewidth=2,
edge_alpha=0.1, use_geom=False, show=False, save=True, close=True)
fig, ax = ox.plot_figure_ground(G=G_simplified, file_format='png')
fig, ax = ox.plot_figure_ground(point=(33.694981, -117.841375), file_format='png')
fig, ax = ox.plot_figure_ground(address='Denver, Colorado, USA', file_format='png')
def test_routing_folium():
import networkx as nx
G = ox.graph_from_address('N. Sicily Pl., Chandler, Arizona', distance=800, network_type='drive')
origin = (33.307792, -111.894940)
destination = (33.312994, -111.894998)
origin_node = ox.get_nearest_node(G, origin)
destination_node = ox.get_nearest_node(G, destination)
route = nx.shortest_path(G, origin_node, destination_node)
attributes = ox.get_route_edge_attributes(G, route, 'length')
fig, ax = ox.plot_graph_route(G, route, save=True, filename='route', file_format='png')
fig, ax = ox.plot_graph_route(G, route, origin_point=origin, destination_point=destination,
save=True, filename='route', file_format='png')
graph_map = ox.plot_graph_folium(G, popup_attribute='name')
route_map = ox.plot_route_folium(G, route)
def test_buildings():
gdf = ox.buildings_from_place(place='Piedmont, California, USA')
gdf = ox.buildings_from_address(address='San Francisco, California, USA', distance=300)
fig, ax = ox.plot_buildings(gdf)
| [
"[email protected]"
]
| |
b93919749d0ffe49d019f0a0385ed8d83d3592da | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /v8/tools/testrunner/local/testsuite.py | f7fa19b20a0b5ce18abe0cd934fbbe12145291b8 | [
"BSD-3-Clause",
"SunPro",
"bzip2-1.0.6"
]
| permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 12,313 | py | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import imp
import os
from . import commands
from . import statusfile
from . import utils
from ..objects import testcase
from variants import ALL_VARIANTS, ALL_VARIANT_FLAGS, FAST_VARIANT_FLAGS
FAST_VARIANTS = set(["default", "turbofan"])
STANDARD_VARIANT = set(["default"])
class VariantGenerator(object):
def __init__(self, suite, variants):
self.suite = suite
self.all_variants = ALL_VARIANTS & variants
self.fast_variants = FAST_VARIANTS & variants
self.standard_variant = STANDARD_VARIANT & variants
def FilterVariantsByTest(self, testcase):
result = self.all_variants
if testcase.outcomes:
if statusfile.OnlyStandardVariant(testcase.outcomes):
return self.standard_variant
if statusfile.OnlyFastVariants(testcase.outcomes):
result = self.fast_variants
return result
def GetFlagSets(self, testcase, variant):
if testcase.outcomes and statusfile.OnlyFastVariants(testcase.outcomes):
return FAST_VARIANT_FLAGS[variant]
else:
return ALL_VARIANT_FLAGS[variant]
class TestSuite(object):
@staticmethod
def LoadTestSuite(root, global_init=True):
name = root.split(os.path.sep)[-1]
f = None
try:
(f, pathname, description) = imp.find_module("testcfg", [root])
module = imp.load_module("testcfg", f, pathname, description)
return module.GetSuite(name, root)
except ImportError:
# Use default if no testcfg is present.
return GoogleTestSuite(name, root)
finally:
if f:
f.close()
def __init__(self, name, root):
# Note: This might be called concurrently from different processes.
self.name = name # string
self.root = root # string containing path
self.tests = None # list of TestCase objects
self.rules = None # dictionary mapping test path to list of outcomes
self.wildcards = None # dictionary mapping test paths to list of outcomes
self.total_duration = None # float, assigned on demand
def shell(self):
return "d8"
def suffix(self):
return ".js"
def status_file(self):
return "%s/%s.status" % (self.root, self.name)
# Used in the status file and for stdout printing.
def CommonTestName(self, testcase):
if utils.IsWindows():
return testcase.path.replace("\\", "/")
else:
return testcase.path
def ListTests(self, context):
raise NotImplementedError
def _VariantGeneratorFactory(self):
"""The variant generator class to be used."""
return VariantGenerator
def CreateVariantGenerator(self, variants):
"""Return a generator for the testing variants of this suite.
Args:
variants: List of variant names to be run as specified by the test
runner.
Returns: An object of type VariantGenerator.
"""
return self._VariantGeneratorFactory()(self, set(variants))
def PrepareSources(self):
"""Called once before multiprocessing for doing file-system operations.
This should not access the network. For network access use the method
below.
"""
pass
def DownloadData(self):
pass
def ReadStatusFile(self, variables):
with open(self.status_file()) as f:
self.rules, self.wildcards = (
statusfile.ReadStatusFile(f.read(), variables))
def ReadTestCases(self, context):
self.tests = self.ListTests(context)
@staticmethod
def _FilterSlow(slow, mode):
return (mode == "run" and not slow) or (mode == "skip" and slow)
@staticmethod
def _FilterPassFail(pass_fail, mode):
return (mode == "run" and not pass_fail) or (mode == "skip" and pass_fail)
def FilterTestCasesByStatus(self, warn_unused_rules,
slow_tests="dontcare",
pass_fail_tests="dontcare",
variants=False):
# Use only variants-dependent rules and wildcards when filtering
# respective test cases and generic rules when filtering generic test
# cases.
if not variants:
rules = self.rules[""]
wildcards = self.wildcards[""]
else:
# We set rules and wildcards to a variant-specific version for each test
# below.
rules = {}
wildcards = {}
filtered = []
# Remember used rules as tuples of (rule, variant), where variant is "" for
# variant-independent rules.
used_rules = set()
for t in self.tests:
slow = False
pass_fail = False
testname = self.CommonTestName(t)
variant = t.variant or ""
if variants:
rules = self.rules[variant]
wildcards = self.wildcards[variant]
if testname in rules:
used_rules.add((testname, variant))
# Even for skipped tests, as the TestCase object stays around and
# PrintReport() uses it.
t.outcomes = t.outcomes | rules[testname]
if statusfile.DoSkip(t.outcomes):
continue # Don't add skipped tests to |filtered|.
for outcome in t.outcomes:
if outcome.startswith('Flags: '):
t.flags += outcome[7:].split()
slow = statusfile.IsSlow(t.outcomes)
pass_fail = statusfile.IsPassOrFail(t.outcomes)
skip = False
for rule in wildcards:
assert rule[-1] == '*'
if testname.startswith(rule[:-1]):
used_rules.add((rule, variant))
t.outcomes = t.outcomes | wildcards[rule]
if statusfile.DoSkip(t.outcomes):
skip = True
break # "for rule in wildcards"
slow = slow or statusfile.IsSlow(t.outcomes)
pass_fail = pass_fail or statusfile.IsPassOrFail(t.outcomes)
if (skip
or self._FilterSlow(slow, slow_tests)
or self._FilterPassFail(pass_fail, pass_fail_tests)):
continue # "for t in self.tests"
filtered.append(t)
self.tests = filtered
if not warn_unused_rules:
return
if not variants:
for rule in self.rules[""]:
if (rule, "") not in used_rules:
print("Unused rule: %s -> %s (variant independent)" % (
rule, self.rules[""][rule]))
for rule in self.wildcards[""]:
if (rule, "") not in used_rules:
print("Unused rule: %s -> %s (variant independent)" % (
rule, self.wildcards[""][rule]))
else:
for variant in ALL_VARIANTS:
for rule in self.rules[variant]:
if (rule, variant) not in used_rules:
print("Unused rule: %s -> %s (variant: %s)" % (
rule, self.rules[variant][rule], variant))
for rule in self.wildcards[variant]:
if (rule, variant) not in used_rules:
print("Unused rule: %s -> %s (variant: %s)" % (
rule, self.wildcards[variant][rule], variant))
def FilterTestCasesByArgs(self, args):
"""Filter test cases based on command-line arguments.
An argument with an asterisk in the end will match all test cases
that have the argument as a prefix. Without asterisk, only exact matches
will be used with the exeption of the test-suite name as argument.
"""
filtered = []
globs = []
exact_matches = []
for a in args:
argpath = a.split('/')
if argpath[0] != self.name:
continue
if len(argpath) == 1 or (len(argpath) == 2 and argpath[1] == '*'):
return # Don't filter, run all tests in this suite.
path = '/'.join(argpath[1:])
if path[-1] == '*':
path = path[:-1]
globs.append(path)
else:
exact_matches.append(path)
for t in self.tests:
for a in globs:
if t.path.startswith(a):
filtered.append(t)
break
for a in exact_matches:
if t.path == a:
filtered.append(t)
break
self.tests = filtered
def GetFlagsForTestCase(self, testcase, context):
raise NotImplementedError
def GetSourceForTest(self, testcase):
return "(no source available)"
def IsFailureOutput(self, testcase):
return testcase.output.exit_code != 0
def IsNegativeTest(self, testcase):
return False
def HasFailed(self, testcase):
execution_failed = self.IsFailureOutput(testcase)
if self.IsNegativeTest(testcase):
return not execution_failed
else:
return execution_failed
def GetOutcome(self, testcase):
if testcase.output.HasCrashed():
return statusfile.CRASH
elif testcase.output.HasTimedOut():
return statusfile.TIMEOUT
elif self.HasFailed(testcase):
return statusfile.FAIL
else:
return statusfile.PASS
def HasUnexpectedOutput(self, testcase):
outcome = self.GetOutcome(testcase)
return not outcome in (testcase.outcomes or [statusfile.PASS])
def StripOutputForTransmit(self, testcase):
if not self.HasUnexpectedOutput(testcase):
testcase.output.stdout = ""
testcase.output.stderr = ""
def CalculateTotalDuration(self):
self.total_duration = 0.0
for t in self.tests:
self.total_duration += t.duration
return self.total_duration
class StandardVariantGenerator(VariantGenerator):
def FilterVariantsByTest(self, testcase):
return self.standard_variant
class GoogleTestSuite(TestSuite):
def __init__(self, name, root):
super(GoogleTestSuite, self).__init__(name, root)
def ListTests(self, context):
shell = os.path.abspath(os.path.join(context.shell_dir, self.shell()))
if utils.IsWindows():
shell += ".exe"
output = None
for i in xrange(3): # Try 3 times in case of errors.
output = commands.Execute(context.command_prefix +
[shell, "--gtest_list_tests"] +
context.extra_flags)
if output.exit_code == 0:
break
print "Test executable failed to list the tests (try %d).\n\nStdout:" % i
print output.stdout
print "\nStderr:"
print output.stderr
print "\nExit code: %d" % output.exit_code
else:
raise Exception("Test executable failed to list the tests.")
tests = []
test_case = ''
for line in output.stdout.splitlines():
test_desc = line.strip().split()[0]
if test_desc.endswith('.'):
test_case = test_desc
elif test_case and test_desc:
test = testcase.TestCase(self, test_case + test_desc)
tests.append(test)
tests.sort(key=lambda t: t.path)
return tests
def GetFlagsForTestCase(self, testcase, context):
return (testcase.flags + ["--gtest_filter=" + testcase.path] +
["--gtest_random_seed=%s" % context.random_seed] +
["--gtest_print_time=0"] +
context.mode_flags)
def _VariantGeneratorFactory(self):
return StandardVariantGenerator
def shell(self):
return self.name
| [
"[email protected]"
]
| |
47f0c143537b99ffb3e3284dfa05d57756f3b79d | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_1_1_neat/16_1_1_DanielBraithwaite_last_word.py | f312da31370a6413f77cbed11a1a43de2b88ee5d | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 554 | py | def last_word(s):
w = s[0]
for i in range(1,len(s)):
si = ord(w[0])
ei = ord(w[len(w)-1])
ci = ord(s[i])
if ci >= si:
w = s[i] + w
else:
w = w + s[i]
return w
o = open('output.txt', 'w+')
f = open('A-large.in', 'r+')
##f = open('test.txt', 'r+')
N = int(f.readline())
for i in range(N):
s = f.readline().strip()
res = last_word(s)
print(res)
o.write("Case #" + str(i + 1) + ": " + str(res) + "\n")
f.close()
o.close()
| [
"[[email protected]]"
]
| |
b100512b577573496e2a4b06b4ecba61f76b9160 | 0cb8f0f422c790e75da3d3e4d277390039a72637 | /assignment1/question.py | f461ff4af675756623bb5336d0721daddea9cf3d | []
| no_license | curow/CS231N | aa62812bb5698e5b10856dd8f700f56bca2e980a | feafeee3081e22cfc23f5643d71f45e0e6e636f4 | refs/heads/master | 2021-09-04T17:44:25.291119 | 2018-01-20T16:57:51 | 2018-01-20T16:57:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | def svm_loss_vectorized(W, X, y, reg):
"""
Structured SVM loss function, vectorized implementation.
Inputs and outputs are the same as svm_loss_naive.
"""
loss = 0.0
dW = np.zeros(W.shape) # initialize the gradient as zero
# transpose X and W
# X.shape will be (D,N)
# W.shape will be (C,D)
X = X.T
W = W.T
dW = dW.T
num_train = X.shape[1]
# W_y shape from (N,D) to (D,N)
W_y = W[y].T
S_y = np.sum(W_y*X ,axis=0)
margins = np.dot(W,X) + 1 - S_y
mask = np.array(margins>0)
# get the value of num_train examples made on W's gradient
# that is,only when the mask is positive
# the train example has impact on W's gradient
dW_j = np.dot(mask, X.T)
dW += dW_j
mul_mask = np.sum(mask, axis=0, keepdims=True).T
# dW[y] -= mul_mask * X.T
dW_y = mul_mask * X.T
for i,label in enumerate(y):
dW[label] -= dW_y[i]
loss = np.sum(margins*mask) - num_train
loss /= num_train
dW /= num_train
# add regularization term
loss += reg * np.sum(W*W)
dW += reg * 2 * W
dW = dW.T
return loss, dW | [
"[email protected]"
]
| |
0331c7d64d9c4561496104cd1e73f30ef345945b | 403a8c7d9ba2956c3f5873d0721921e0d8ae7c65 | /tests/test_cli.py | 13cee7104e6617fae0738f12d22c77060adeb91a | [
"MIT"
]
| permissive | kazhala/fzf.aws | b0c83f0ac47f1b2da0d0b064d6a688ba2e69028c | 4abefb2301f7b489b11ed3f0b303faafa5941d5b | refs/heads/master | 2021-07-05T00:50:12.632284 | 2021-05-25T23:09:51 | 2021-05-25T23:09:51 | 242,327,229 | 68 | 3 | MIT | 2021-03-25T23:42:00 | 2020-02-22T11:09:11 | Python | UTF-8 | Python | false | false | 3,065 | py | from botocore.exceptions import ClientError
from fzfaws.utils.exceptions import InvalidFileType
import os
from fzfaws.utils.fileloader import FileLoader
import unittest
from unittest.mock import patch
from fzfaws.cli import main, copy_config
import sys
import io
from pathlib import Path
import tempfile
class TestCLI(unittest.TestCase):
def setUp(self):
self.capturedOuput = io.StringIO()
sys.stdout = self.capturedOuput
config_path = Path(__file__).resolve().parent.joinpath("../fzfaws/fzfaws.yml")
fileloader = FileLoader()
fileloader.load_config_file(config_path=str(config_path))
def tearDown(self):
sys.stdout = sys.__stdout__
@patch("fzfaws.cli.s3")
@patch("fzfaws.cli.ec2")
@patch("fzfaws.cli.cloudformation")
def test_subparser(self, mocked_cloudformation, mocked_ec2, mocked_s3):
sys.argv = [__file__, "cloudformation", "-h"]
main()
mocked_cloudformation.assert_called_once_with(["-h"])
sys.argv = [__file__, "ec2", "ssh", "-A"]
main()
mocked_ec2.assert_called_once_with(["ssh", "-A"])
mocked_ec2.reset_mock()
sys.argv = [__file__, "ec2", "start"]
main()
mocked_ec2.assert_called_once_with(["start", "--wait"])
sys.argv = [__file__, "s3", "download"]
main()
mocked_s3.assert_called_once_with(["download", "--hidden"])
@patch("fzfaws.cli.copy_config")
def test_parser(self, mocked_copy):
sys.argv = [__file__, "-h"]
self.assertRaises(SystemExit, main)
self.assertRegex(
self.capturedOuput.getvalue(), r"usage: fzfaws .*",
)
sys.argv = [__file__, "--copy-config"]
self.assertRaises(SystemExit, main)
mocked_copy.assert_called_once()
self.capturedOuput.truncate(0)
self.capturedOuput.seek(0)
sys.argv = [__file__]
self.assertRaises(SystemExit, main)
self.assertRegex(self.capturedOuput.getvalue(), r"^usage: fzfaws \[-h\].*")
def test_copy_config(self):
with tempfile.TemporaryDirectory() as tmpdirname:
os.environ["XDG_CONFIG_HOME"] = tmpdirname
copy_config()
if not Path("%s/fzfaws/fzfaws.yml" % tmpdirname).is_file():
self.fail("config file not properly copied")
@patch("fzfaws.cli.get_default_args")
def test_exceptions(self, mocked_args):
mocked_args.side_effect = InvalidFileType
sys.argv = [__file__, "s3"]
self.assertRaises(SystemExit, main)
self.assertEqual(
self.capturedOuput.getvalue(), "Selected file is not a valid file type\n"
)
mocked_args.side_effect = SystemExit
sys.argv = [__file__, "s3"]
self.assertRaises(SystemExit, main)
mocked_args.side_effect = KeyboardInterrupt
sys.argv = [__file__, "s3"]
self.assertRaises(SystemExit, main)
mocked_args.side_effect = ClientError
sys.argv = [__file__, "s3"]
self.assertRaises(SystemExit, main)
| [
"[email protected]"
]
| |
40d60b41be552dcfd2df4f67bf167172d1075756 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_classification/coral-cnn_ID1064_for_PyTorch/model-code/cacd-coral.py | 45795ac09a7b420de8ae37d8e096ba753a70016d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 16,909 | py | # coding: utf-8
#
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
#############################################
# Consistent Cumulative Logits with ResNet-34
#############################################
# Imports
import os
import time
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
import argparse
import sys
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from torchvision import transforms
from PIL import Image
import torch.npu
import os
NPU_CALCULATE_DEVICE = 0
if os.getenv('NPU_CALCULATE_DEVICE') and str.isdigit(os.getenv('NPU_CALCULATE_DEVICE')):
NPU_CALCULATE_DEVICE = int(os.getenv('NPU_CALCULATE_DEVICE'))
if torch.npu.current_device() != NPU_CALCULATE_DEVICE:
torch.npu.set_device(f'npu:{NPU_CALCULATE_DEVICE}')
torch.backends.cudnn.deterministic = True
TRAIN_CSV_PATH = './cacd_train.csv'
VALID_CSV_PATH = './cacd_valid.csv'
TEST_CSV_PATH = './cacd_test.csv'
IMAGE_PATH = '/shared_datasets/CACD/centercropped/jpg'
# Argparse helper
parser = argparse.ArgumentParser()
parser.add_argument('--cuda',
type=int,
default=-1)
parser.add_argument('--seed',
type=int,
default=-1)
parser.add_argument('--numworkers',
type=int,
default=3)
parser.add_argument('--outpath',
type=str,
required=True)
parser.add_argument('--imp_weight',
type=int,
default=0)
args = parser.parse_args()
NUM_WORKERS = args.numworkers
if args.cuda >= 0:
DEVICE = torch.device(f'npu:{NPU_CALCULATE_DEVICE}')
else:
DEVICE = torch.device(f'npu:{NPU_CALCULATE_DEVICE}')
if args.seed == -1:
RANDOM_SEED = None
else:
RANDOM_SEED = args.seed
IMP_WEIGHT = args.imp_weight
PATH = args.outpath
if not os.path.exists(PATH):
os.mkdir(PATH)
LOGFILE = os.path.join(PATH, 'training.log')
TEST_PREDICTIONS = os.path.join(PATH, 'test_predictions.log')
TEST_ALLPROBAS = os.path.join(PATH, 'test_allprobas.tensor')
# Logging
header = []
header.append('PyTorch Version: %s' % torch.__version__)
header.append('CUDA device available: %s' % torch.npu.is_available())
header.append('Using CUDA device: %s' % DEVICE)
header.append('Random Seed: %s' % RANDOM_SEED)
header.append('Task Importance Weight: %s' % IMP_WEIGHT)
header.append('Output Path: %s' % PATH)
header.append('Script: %s' % sys.argv[0])
with open(LOGFILE, 'w') as f:
for entry in header:
print(entry)
f.write('%s\n' % entry)
f.flush()
##########################
# SETTINGS
##########################
# Hyperparameters
learning_rate = 0.0005
num_epochs = 200
# Architecture
NUM_CLASSES = 49
BATCH_SIZE = 256
GRAYSCALE = False
df = pd.read_csv(TRAIN_CSV_PATH, index_col=0)
ages = df['age'].values
del df
ages = torch.tensor(ages, dtype=torch.float)
def task_importance_weights(label_array):
uniq = torch.unique(label_array)
num_examples = label_array.size(0)
m = torch.zeros(uniq.shape[0])
for i, t in enumerate(torch.arange(torch.min(uniq), torch.max(uniq))):
m_k = torch.max(torch.tensor([label_array[label_array > t].size(0),
num_examples - label_array[label_array > t].size(0)]))
m[i] = torch.sqrt(m_k.float())
imp = m/torch.max(m)
return imp
# Data-specific scheme
if not IMP_WEIGHT:
imp = torch.ones(NUM_CLASSES-1, dtype=torch.float)
elif IMP_WEIGHT == 1:
imp = task_importance_weights(ages)
imp = imp[0:NUM_CLASSES-1]
else:
raise ValueError('Incorrect importance weight parameter.')
imp = imp.to(f'npu:{NPU_CALCULATE_DEVICE}')
###################
# Dataset
###################
class CACDDataset(Dataset):
"""Custom Dataset for loading CACD face images"""
def __init__(self,
csv_path, img_dir, transform=None):
df = pd.read_csv(csv_path, index_col=0)
self.img_dir = img_dir
self.csv_path = csv_path
self.img_names = df['file'].values
self.y = df['age'].values
self.transform = transform
def __getitem__(self, index):
img = Image.open(os.path.join(self.img_dir,
self.img_names[index]))
if self.transform is not None:
img = self.transform(img)
label = self.y[index]
levels = [1]*label + [0]*(NUM_CLASSES - 1 - label)
levels = torch.tensor(levels, dtype=torch.float32)
return img, label, levels
def __len__(self):
return self.y.shape[0]
custom_transform = transforms.Compose([transforms.Resize((128, 128)),
transforms.RandomCrop((120, 120)),
transforms.ToTensor()])
train_dataset = CACDDataset(csv_path=TRAIN_CSV_PATH,
img_dir=IMAGE_PATH,
transform=custom_transform)
custom_transform2 = transforms.Compose([transforms.Resize((128, 128)),
transforms.CenterCrop((120, 120)),
transforms.ToTensor()])
test_dataset = CACDDataset(csv_path=TEST_CSV_PATH,
img_dir=IMAGE_PATH,
transform=custom_transform2)
valid_dataset = CACDDataset(csv_path=VALID_CSV_PATH,
img_dir=IMAGE_PATH,
transform=custom_transform2)
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
shuffle=True,
num_workers=NUM_WORKERS)
valid_loader = DataLoader(dataset=valid_dataset,
batch_size=BATCH_SIZE,
shuffle=False,
num_workers=NUM_WORKERS)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
shuffle=False,
num_workers=NUM_WORKERS)
##########################
# MODEL
##########################
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes, grayscale):
self.num_classes = num_classes
self.inplanes = 64
if grayscale:
in_dim = 1
else:
in_dim = 3
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(in_dim, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(4)
self.fc = nn.Linear(512, 1, bias=False)
self.linear_1_bias = nn.Parameter(torch.zeros(self.num_classes-1).float())
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, (2. / n)**.5)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
logits = self.fc(x)
logits = logits + self.linear_1_bias
probas = torch.sigmoid(logits)
return logits, probas
def resnet34(num_classes, grayscale):
"""Constructs a ResNet-34 model."""
model = ResNet(block=BasicBlock,
layers=[3, 4, 6, 3],
num_classes=num_classes,
grayscale=grayscale)
return model
###########################################
# Initialize Cost, Model, and Optimizer
###########################################
def cost_fn(logits, levels, imp):
val = (-torch.sum((F.logsigmoid(logits)*levels
+ (F.logsigmoid(logits) - logits)*(1-levels))*imp,
dim=1))
return torch.mean(val)
torch.manual_seed(RANDOM_SEED)
torch.npu.manual_seed(RANDOM_SEED)
model = resnet34(NUM_CLASSES, GRAYSCALE)
model.to(f'npu:{NPU_CALCULATE_DEVICE}')
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
def compute_mae_and_mse(model, data_loader, device):
mae, mse, num_examples = 0, 0, 0
for i, (features, targets, levels) in enumerate(data_loader):
features = features.to(f'npu:{NPU_CALCULATE_DEVICE}')
targets = targets.to(f'npu:{NPU_CALCULATE_DEVICE}')
logits, probas = model(features)
predict_levels = probas > 0.5
predicted_labels = torch.sum(predict_levels, dim=1)
num_examples += targets.size(0)
mae += torch.sum(torch.abs(predicted_labels - targets))
mse += torch.sum((predicted_labels - targets)**2)
mae = mae.float() / num_examples
mse = mse.float() / num_examples
return mae, mse
start_time = time.time()
best_mae, best_rmse, best_epoch = 999, 999, -1
for epoch in range(num_epochs):
model.train()
for batch_idx, (features, targets, levels) in enumerate(train_loader):
features = features.to(f'npu:{NPU_CALCULATE_DEVICE}')
targets = targets
targets = targets.to(f'npu:{NPU_CALCULATE_DEVICE}')
levels = levels.to(f'npu:{NPU_CALCULATE_DEVICE}')
# FORWARD AND BACK PROP
logits, probas = model(features)
cost = cost_fn(logits, levels, imp)
optimizer.zero_grad()
cost.backward()
# UPDATE MODEL PARAMETERS
optimizer.step()
# LOGGING
if not batch_idx % 50:
s = ('Epoch: %03d/%03d | Batch %04d/%04d | Cost: %.4f'
% (epoch+1, num_epochs, batch_idx,
len(train_dataset)//BATCH_SIZE, cost))
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
model.eval()
with torch.set_grad_enabled(False):
valid_mae, valid_mse = compute_mae_and_mse(model, valid_loader,
device=DEVICE)
if valid_mae < best_mae:
best_mae, best_rmse, best_epoch = valid_mae, torch.sqrt(valid_mse), epoch
########## SAVE MODEL #############
torch.save(model.state_dict(), os.path.join(PATH, 'best_model.pt'))
s = 'MAE/RMSE: | Current Valid: %.2f/%.2f Ep. %d | Best Valid : %.2f/%.2f Ep. %d' % (
valid_mae, torch.sqrt(valid_mse), epoch, best_mae, best_rmse, best_epoch)
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
s = 'Time elapsed: %.2f min' % ((time.time() - start_time)/60)
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
model.eval()
with torch.set_grad_enabled(False): # save memory during inference
train_mae, train_mse = compute_mae_and_mse(model, train_loader,
device=DEVICE)
valid_mae, valid_mse = compute_mae_and_mse(model, valid_loader,
device=DEVICE)
test_mae, test_mse = compute_mae_and_mse(model, test_loader,
device=DEVICE)
s = 'MAE/RMSE: | Train: %.2f/%.2f | Valid: %.2f/%.2f | Test: %.2f/%.2f' % (
train_mae, torch.sqrt(train_mse),
valid_mae, torch.sqrt(valid_mse),
test_mae, torch.sqrt(test_mse))
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
s = 'Total Training Time: %.2f min' % ((time.time() - start_time)/60)
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
########## EVALUATE BEST MODEL ######
model.load_state_dict(torch.load(os.path.join(PATH, 'best_model.pt')))
model.eval()
with torch.set_grad_enabled(False):
train_mae, train_mse = compute_mae_and_mse(model, train_loader,
device=DEVICE)
valid_mae, valid_mse = compute_mae_and_mse(model, valid_loader,
device=DEVICE)
test_mae, test_mse = compute_mae_and_mse(model, test_loader,
device=DEVICE)
s = 'MAE/RMSE: | Best Train: %.2f/%.2f | Best Valid: %.2f/%.2f | Best Test: %.2f/%.2f' % (
train_mae, torch.sqrt(train_mse),
valid_mae, torch.sqrt(valid_mse),
test_mae, torch.sqrt(test_mse))
print(s)
with open(LOGFILE, 'a') as f:
f.write('%s\n' % s)
########## SAVE PREDICTIONS ######
all_pred = []
all_probas = []
with torch.set_grad_enabled(False):
for batch_idx, (features, targets, levels) in enumerate(test_loader):
features = features.to(f'npu:{NPU_CALCULATE_DEVICE}')
logits, probas = model(features)
all_probas.append(probas)
predict_levels = probas > 0.5
predicted_labels = torch.sum(predict_levels, dim=1)
lst = [str(int(i)) for i in predicted_labels]
all_pred.extend(lst)
torch.save(torch.cat(all_probas).to(f'npu:{NPU_CALCULATE_DEVICE}'), TEST_ALLPROBAS)
with open(TEST_PREDICTIONS, 'w') as f:
all_pred = ','.join(all_pred)
f.write(all_pred)
| [
"[email protected]"
]
| |
536610ba716a9b8715ef45dffd40ac555213c201 | 1dcea2a511f14a43701994f6a7785afd21a20d74 | /Algorithm/61_RotateList.py | ad33302d777bb10f3daee4eb21b7a5bd9a4a46c7 | []
| no_license | lingtianwan/Leetcode2 | 66031e256a2928c6197516f83f14748c52e91b8c | 80a604cc09d5d2d62dd05157d8b829de675e4404 | refs/heads/master | 2021-01-13T11:17:18.238465 | 2017-02-09T01:43:38 | 2017-02-09T01:43:38 | 81,395,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | # Given a list, rotate the list to the right by k places, where k is non-negative.
#
# For example:
# Given 1->2->3->4->5->NULL and k = 2,
# return 4->5->1->2->3->NULL.
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if not head:
return None
fast = head
cnt = 0
while fast:
fast = fast.next
cnt += 1
k %= cnt
if k == 0:
return head
fast = head
slow = head
for _ in range(k):
fast = fast.next
while fast.next:
fast = fast.next
slow = slow.next
res = slow.next
slow.next = None
fast.next = head
return res
| [
"[email protected]"
]
| |
278269ce0336906a35c4a57f21cb02693fa64334 | 28b098b11832f1f0d06afe498cf76a64a9a90750 | /backend/thejacobblog_24666/settings.py | 53c464969db683b34de86ff561e4bcb4b06f15ed | []
| no_license | crowdbotics-apps/thejacobblog-24666 | 4fa5ca6c37a47bfcfd325a2a421a6ca0f53ad9f6 | d53ba1565a19577f391422d90f41e8ab6a7f9eb0 | refs/heads/master | 2023-03-18T22:33:56.488920 | 2021-02-22T23:34:51 | 2021-02-22T23:34:51 | 341,368,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,041 | py | """
Django settings for thejacobblog_24666 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'modules',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'thejacobblog_24666.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'thejacobblog_24666.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
]
| |
7f0416307b8c694260d09aa9e48f3b5b5eef0c40 | 71460476c5f5ebdca719def124f1a0650861fdab | /mint_work/custom/pos_order_history_type/models/pos_sales_multi_report.py | af374a7c0fa77f540ef7d46732c590d9499bb3eb | []
| no_license | merdhah/dubai_work | fc3a70dc0b1db6df19c825a3bf1eef2a373d79c0 | e24eb12b276a4cd5b47a4bd5470d915179872a4f | refs/heads/master | 2022-01-07T11:22:07.628435 | 2018-10-17T13:37:24 | 2018-10-17T13:37:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,051 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Bista Solutions Pvt. Ltd
# Copyright (C) 2018 (http://www.bistasolutions.com)
#
##############################################################################
from odoo import models, fields, api, _
from odoo.exceptions import UserError
class PosSalesReportProductType(models.TransientModel):
_name = 'pos.sales.report.type'
company_id = fields.Many2one('res.company', default=lambda self: self.env.user.company_id.id)
type = fields.Selection([
('consu', 'Consumable'),
('service', 'Service'),
('product', 'Stockable Product')], string = 'Product Type',
default = 'consu',
help = 'A stockable product is a product for which you manage stock. The "Inventory" app has to be installed.\n'
'A consumable product, on the other hand, is a product for which stock is not managed.\n'
'A service is a non-material product you provide.\n'
'A digital content is a non-material product you sell online. The files attached to the products are the one that are sold on '
'the e-commerce such as e-books, music, pictures,... The "Digital Product" module has to be installed.')
state = fields.Selection(
[('draft', 'New'), ('cancel', 'Cancelled'), ('paid', 'Paid'), ('done', 'Posted'), ('invoiced', 'Invoiced')],
'State')
# This method is called from the wizard which will get all the pos order
# which will have the the product type which is selected in wizard.
@api.multi
def sales_order_report_type(self):
self.ensure_one()
data = {
'ids': self.id,
'model': 'pos.sales.report',
'form': self.read()[0],
}
query = """
select po.name as order,pt.name,pp.barcode, pol.qty, pol.price_unit
from pos_order_line pol
left join pos_order po ON (po.id = pol.order_id)
left join product_product pp ON (pp.id = pol.product_id)
left join product_template pt ON (pt.id = pp.product_tmpl_id)
where pt.type='%s'""" % (self.type)
if self.state :
query += """ and po.state='%s'""" % (self.state)
self.env.cr.execute(query)
result = self._cr.dictfetchall()
if result :
data.update({
'company_logo' : self.company_id.logo,
'company_name' : self.company_id.partner_id.name,
'company_street' : self.company_id.partner_id.street,
'company_street2' : self.company_id.partner_id.street2,
'company_city' : self.company_id.partner_id.city,
'company_state_id' :
self.company_id.partner_id.state_id.name,
'company_country_id' :
self.company_id.partner_id.country_id.name,
'company_zip' : self.company_id.partner_id.zip,
'company_phone' : self.company_id.partner_id.phone,
'company_mobile' : self.company_id.partner_id.mobile,
'company_fax' : self.company_id.partner_id.fax,
'company_email' : self.company_id.partner_id.email,
'company_website' : self.company_id.partner_id.website,
'product_type_name' : self.type,
'lines' : result,
})
else :
raise UserError(
_('There is no Record related to this Product Type.'))
return self.env['report'].get_action(self,
'pos_order_history_type.report_sale_orders_type', data=data)
class ReportPOSSaleOrderProductTypeMulti(models.AbstractModel):
_name = 'report.pos_order_history_type.report_sale_orders_type'
@api.multi
def render_html(self, docids, data=None):
return self.env['report'].render('pos_order_history_type.report_sale_orders_type', dict(data or {}))
| [
"[email protected]"
]
| |
3d60ed7e99f218433773775f5e56aec334e9fb8d | 52b2e3470cd4b91975b2e1caed8d1c93c20e5d05 | /tools/parsertools/testbuffer.py | f8f8dd1bfbe81182baf42dae60fe879de314da4a | []
| no_license | xprime480/projects | c2f9a82bbe91e00859568dc27ae17c3b5dd873e3 | 3c5eb2d53bd7fa198edbe27d842ee5b5ff56e226 | refs/heads/master | 2020-04-27T03:51:29.456979 | 2019-04-12T14:34:39 | 2019-04-12T14:34:39 | 174,037,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py |
class TestBuffer(object) :
def __init__(self, tokens) :
self.tokens = tokens[:]
self.size = len(self.tokens)
self.index = 0
def get(self) :
rv = self.peek()
if rv :
self.index += 1
return rv
def peek(self) :
if self.index >= self.size :
rv = None
else :
rv = self.tokens[self.index]
#print 'TestBuffer returning %s' % rv
return rv
| [
"[email protected]"
]
| |
df6bb199aa33221b02a071eebb1a06018e2600d3 | e35fd52fe4367320024a26f2ee357755b5d5f4bd | /Chapter 3 - Stacks and Queues/linkedlist.py | b949d5439e2e9c0378f1d78a0e7ded42d00de4ba | []
| no_license | liseyko/CtCI | a451967b0a0ce108c491d30b81e88d20ad84d2cd | c27f19fac14b4acef8c631ad5569e1a5c29e9e1f | refs/heads/master | 2020-03-21T14:28:47.621481 | 2019-11-12T22:59:07 | 2019-11-12T22:59:07 | 138,658,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,690 | py | from random import randint
class Node():
"""implementation of a simple Node object"""
def __init__(self,data=None,next=None):
self.next = next
self.data = data
def __str__(self):
return str(self.data)
class LinkedList():
"""linked list implementation"""
def __init__(self,lst=[]):
self.head = None
self.len = 0
for n in reversed(lst):
self.insert(n)
def insert(self,data):
self.head = Node(data,self.head)
self.len += 1
def populate(self,q=10, rng=16):
while self.len < q:
self.insert(randint(0,rng))
def append(self,data):
if self.head is None:
self.head = Node(data)
return
end = Node(data)
n = self.head
while n.next is not None:
n = n.next
n.next = end
self.len += 1
def deleteNode(self,n):
cn = self.head
if not cn:
return False
while cn.next:
if cn.next == n:
cn.next = cn.next.next
return True
cn = cn.next
return False
def deleteNode_fast(self,n):
if not n:
return False
if not n.next:
return self.deleteNode(n)
n.data = n.next.data
n.next = n.next.next
return True
def mkunique(self):
buffer = set()
n = self.head
if n:
buffer.add(n.data)
else:
return
while n.next:
if n.next.data not in buffer:
buffer.add(n.next.data)
n = n.next
else:
n.next = n.next.next
self.len -= 1
def print_data(self):
n=self.head
while n:
print(n.data,end=', ')
n = n.next
print(n)
if not self.head:
print("The list is empty.")
def __str__(self):
l = []
n=self.head
while n:
l.append(n.data)
n = n.next
return str(l)
def __iter__(self):
cur_node = self.head
while cur_node:
yield cur_node
cur_node = cur_node.next
def __len__(self):
return self.len
def deleteNodeByData(self, data):
""" deletes the first occurance of node, containing <data> from <head> list """
if self.head.data == data:
self.head = self.head.next
return
n = self.head
while n.next is not None:
if n.next.data == data:
n.next = n.next.next
return self
n = n.next
return
| [
"[email protected]"
]
| |
6e18893137c3c85ef8e273ad56b8170fbe604a00 | e2b9873da7723ef8ae505c4286e4eccbf7416315 | /leagueofclass/cadastros/migrations/0013_remove_atividadesprofessor_teste.py | aca869bbd971ba5dda0a5981a69d7e9f85b5439c | []
| no_license | lucaasaragao/LOC_PAS | 94fc50dd429ce2e9ec71cebdd748f3ff9df1ceac | 22939ab9f7b54ddc6355dce11e55e801e9501327 | refs/heads/master | 2020-03-27T17:57:57.824525 | 2018-11-01T05:22:20 | 2018-11-01T05:22:20 | 146,888,554 | 0 | 1 | null | 2018-10-31T21:37:59 | 2018-08-31T12:23:48 | CSS | UTF-8 | Python | false | false | 349 | py | # Generated by Django 2.0.7 on 2018-09-28 04:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cadastros', '0012_atividadesprofessor_teste'),
]
operations = [
migrations.RemoveField(
model_name='atividadesprofessor',
name='teste',
),
]
| [
"[email protected]"
]
| |
38a73f29cb633dc9396633c72607a3415fa7ed7e | 0640c53cd62def556a098f1e732deee8c1348c9e | /IIITSERC-ssad_2015_a3_group1-88a823ccd2d0/Abhishek Vinjamoori/DonkeyKongFinal/src/player.py | c284e27402935cc64dd01b598004dd4b0546ffa9 | []
| no_license | anirudhdahiya9/Open-data-projecy | 579867fe8716076819734cebdbc6e15bb471bb39 | 26d629f8348f0110fa84b02009e787a238aff441 | refs/heads/master | 2021-01-10T13:50:19.855983 | 2016-03-23T22:46:03 | 2016-03-23T22:46:03 | 54,598,189 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | import pygame
size=16
class player(person):
def __init__(self,name,initX,initY):
person.__init__(name,initX,initY)
self.marioright =pygame.image.load('../images/m1.png')
self.marioright=pygame.transform.scale(self.marioright,(size,size))
self.mariowalkright=pygame.image.load('../images/m2.png')
self.mariowalkright=pygame.transform.scale(self.mariowalkright,(size,size))
self.mariowalkleft=pygame.transform.scale(self.mariowalkright,(size,size))
self.mariowalkleft=pygame.transform.flip(mariowalkright,1,0)
self.marioleft=pygame.transform.scale(self.marioright,(size,size))
self.marioleft=pygame.transform.flip(marioleft,1,0)
self.mario=self.marioright
| [
"[email protected]"
]
| |
dc70e874342123a38005f05ad3a80c1ee0045ec1 | 2c69245fa6b65affaa40755785504df4c12dd3b5 | /phraser/tools/fix_header_guards.py | c4547145f126923db909ff79524774271c76e916 | [
"MIT"
]
| permissive | knighton/phraser | 1b711a20193e4722e50d41e0ea11c69dca7bfcef | a4b213260cd9b24fb3052973a1268c021f965ce8 | refs/heads/master | 2021-01-17T09:04:22.561009 | 2016-04-01T21:32:10 | 2016-04-01T21:32:10 | 34,379,599 | 1 | 2 | null | 2016-04-04T21:12:41 | 2015-04-22T08:51:15 | C++ | UTF-8 | Python | false | false | 1,193 | py | #!/usr/bin/python
#
# Fix each .h header guard (useful after moving files around).
import os
DOIT = True
def each_header(root_dir):
for root, dirs, files in os.walk(root_dir):
for name in files:
if name.endswith('.h'):
f = os.path.join(root, name)
yield f
def header_guard_from_file_name(f):
if f.startswith('./'):
f = f[2:]
return f.replace('/', '_').replace('.h', '_H_').upper()
def fix_header_guards(root_dir):
for fn in each_header(root_dir):
new_header = header_guard_from_file_name(fn)
text = open(fn).read()
ss = text.split()
try:
assert ss[0] == '#ifndef'
except:
print 'WTF:', fn
assert False
old_header = ss[1]
if old_header != new_header:
if DOIT:
open(fn, 'wb').write(text.replace(old_header, new_header))
else:
print 'Would change a header:'
print ' file:', fn
print ' old: ', old_header
print ' new: ', new_header
def main():
fix_header_guards('.')
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
64bdac7133fc0b26f17c3e74ef60a624dea2bf9a | f4b79529109fbb4055f334d0d9c7c96cb0710447 | /colour/utilities/tests/test_deprecated.py | bfbb70c00812c9381bc5e3c93242eec9a75ef368 | [
"BSD-3-Clause"
]
| permissive | trevorandersen/colour | 167381b3d03e506a270a8d2a519a164808995437 | 02b595b26313c4b4f55adc41d599f90c4c9edbcd | refs/heads/develop | 2021-07-15T04:48:19.585586 | 2021-01-23T23:51:44 | 2021-01-23T23:51:44 | 230,421,054 | 0 | 0 | BSD-3-Clause | 2019-12-28T12:54:20 | 2019-12-27T10:10:30 | null | UTF-8 | Python | false | false | 962 | py | # -*- coding: utf-8 -*-
import sys
from colour.utilities.deprecation import (ModuleAPI, ObjectRenamed,
ObjectRemoved)
class deprecated(ModuleAPI):
def __getattr__(self, attribute):
return super(deprecated, self).__getattr__(attribute)
NAME = None
"""
An non-deprecated module attribute.
NAME : object
"""
NEW_NAME = None
"""
A module attribute with a new name.
NAME : object
"""
sys.modules['colour.utilities.tests.test_deprecated'] = (deprecated(
sys.modules['colour.utilities.tests.test_deprecated'], {
'OLD_NAME':
ObjectRenamed(
name='colour.utilities.tests.test_deprecated.OLD_NAME',
new_name='colour.utilities.tests.test_deprecated.NEW_NAME'),
'REMOVED':
ObjectRemoved(name='colour.utilities.tests.test_deprecated.REMOVED'
)
}))
del ModuleAPI
del ObjectRenamed
del ObjectRemoved
del sys
| [
"[email protected]"
]
| |
6fd9f967435ec0b4885b32a2c99175b9c69f4247 | c2849586a8f376cf96fcbdc1c7e5bce6522398ca | /ch28/ex28-25.py | d6a7549a80d0c5a59f5e9b81527f0036703e34dc | []
| no_license | freebz/Learning-Python | 0559d7691517b4acb0228d1cc76de3e93915fb27 | 7f577edb6249f4bbcac4f590908b385192dbf308 | refs/heads/master | 2020-09-23T01:48:24.009383 | 2019-12-02T12:26:40 | 2019-12-02T12:26:40 | 225,371,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,775 | py | # 셸브에서 객체 업데이트하기
# updatedb.py 파일: 데이터베이스의 Person 객체를 업데이트
import shelve
db = shelve.open('persondb') # 동일한 파일명으로 셸브를 다시 염
for key in sorted(db): # 데이터베이스 객체들을 보여 주기 위해 반복
print(key, '\t=>', db[key]) # 커스터마이즈 포맷으로 출력
sue = db['Sue Jones'] # 가져오기 위해 키에 의해 인덱싱
sue.giveRaise(.10) # 클래스의 메서드를 사용하여 메모리를 업데이트함
db['Sue Jones'] = sue # 셸브에 업데이트하기 위해 키에 할당함
db.close() # 변경 후 데이터베이스를 닫음
# python updatedb.py
# Bob Smith => [Person: job=None, name=Bob Smith, pay=0]
# Sue Jones => [Person: job=dev, name=Sue Jones, pay=100000]
# Tom Jones => [Manager: job=mgr, name=Tom Jones, pay=50000]
# python updatedb.py
# Bob Smith => [Person: job=None, name=Bob Smith, pay=0]
# Sue Jones => [Person: job=dev, name=Sue Jones, pay=110000]
# Tom Jones => [Manager: job=mgr, name=Tom Jones, pay=50000]
# python updatedb.py
# Bob Smith => [Person: job=None, name=Bob Smith, pay=0]
# Sue Jones => [Person: job=dev, name=Sue Jones, pay=121000]
# Tom Jones => [Manager: job=mgr, name=Tom Jones, pay=50000]
# python updatedb.py
# Bob Smith => [Person: job=None, name=Bob Smith, pay=0]
# Sue Jones => [Person: job=dev, name=Sue Jones, pay=133100]
# Tom Jones => [Manager: job=mgr, name=Tom Jones, pay=50000]
# python
import shelve
db = shelve.open('persondb') # 데이터베이스를 다시 염
rec = db['Sue Jones'] # 객체를 키에 의해 가져옴
rec
# [Person: Sue Jones, 146410]
rec.lastName()
# 'Jones'
rec.pay
# 146410
| [
"[email protected]"
]
| |
3e0a856177c0d402b98ed229c8529ec154be6332 | 8419c3c826dd41630e57c6523fe6de79eca2facb | /workshop of python/samples/ex02Main.py | 7176c022b4c56ea5e02bb7f818bec18b4d7d882a | []
| no_license | thomasernste/python | 4d009f5a891fd7c4a3432a42ea94f94379f6d0de | 7a59d2e37626d4de3a3b7e6942363c424798ad46 | refs/heads/master | 2016-09-05T23:17:18.093787 | 2012-04-29T14:38:28 | 2012-04-29T14:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""docstring
"""
__revision__ = '0.1'
import sys,os
def usage():
#定义函数usage
print "inputFile"
pass
def error():
usage()
sys.exit(-1)
#强制该脚本马上结束,并返回-1作为error code
def readLines(inf):
for line in open(inf):
#打开文件inf,并按行读入
print line
if __name__=="__main__":
argvNum=2
if len(sys.argv)<argvNum:
#获取命令行的参数,sys.argv为数组,len(list)为求该数组list的元素个数。
error()
print sys.argv[0]
inf=sys.argv[1]
#sys.argv的index从0开始,但是sys.argv[0]为该脚本的名字
readLines(inf)
#函数传递按照引用(即c里面的指针)的方法传递。
#如果该参数引用的值本身不能改变如string类/int类,可以看成是pass by value
#如果该参数引用的值能改变如数组,可以看成是pass by reference
| [
"[email protected]"
]
| |
54390514accfef3d5b46666a5f12457557475660 | b75918b2ac1dfaf2c1219f40d63004900c9338b1 | /app/main.py | bb88fce0c41afd8a113ef6dc5777bfc4d1d5a774 | []
| no_license | solashirai/ExplainableCourseRecommender | e0f036da9814a0187daa5635da0ff2f86386026d | 6a2795cfc4536548ac3679b3d23b953e55a50a37 | refs/heads/main | 2023-04-14T14:27:36.054830 | 2021-04-19T02:29:48 | 2021-04-19T02:29:48 | 302,346,189 | 1 | 0 | null | 2021-04-18T16:13:48 | 2020-10-08T13:17:44 | Python | UTF-8 | Python | false | false | 4,531 | py | from flask import Flask, request, abort
import rdflib
from escore.pipeline import RecommendCoursesPipeline
from escore.services.course import GraphCourseQueryService
from escore.services import PlanOfStudyRecommenderService
from escore.utils.path import DATA_DIR
from escore.models import StudentPOSRequirementContext, CourseCandidate, Student, PlanOfStudy
from typing import Tuple
from frex.stores import LocalGraph
app = Flask(__name__)
# for testing locally
kg_files = tuple((DATA_DIR / file).resolve() for file in [
"courses.ttl",
"scheduled_courses.ttl",
"rpi_departments.ttl",
"parsed_grad_requirements.ttl",
"users.ttl",
])
COURSEKG_GRAPH = LocalGraph(file_paths=kg_files)
# COURSEKG_GRAPH = RemoteGraph(
# sparql_endpoint="?"
# )
COURSE_QS = GraphCourseQueryService(queryable=COURSEKG_GRAPH)
PLACEHOLDER_PIPE = RecommendCoursesPipeline(course_query_service=COURSE_QS)
PR_SERVICE = PlanOfStudyRecommenderService(
course_query_service=COURSE_QS
)
@app.route("/escore_api/")
def hello_world():
return "Hello, World!"
@app.route("/escore_api/dummy_get_rec", methods=["GET"])
def dummy_recommend_courses():
args = request.args
# dummy plan of study and student to test
pos = PlanOfStudy(
uri=rdflib.URIRef('placeholder_pos1'),
class_year=2022,
planned_major=None,
planned_degree=None,
completed_courses=frozenset(),
completed_course_sections=frozenset(),
ongoing_course_sections=frozenset(),
planned_courses=frozenset()
)
student = Student(
uri=rdflib.URIRef('placeholder_stud1'),
study_plan=pos,
name='john doe',
class_year=2022,
topics_of_interest=frozenset(),
registered_courses=frozenset(),
advisor=None,
)
context = StudentPOSRequirementContext(student=student, plan_of_study=pos,
requirements=frozenset(COURSE_QS.get_all_requirements()))
rec_courses: Tuple[CourseCandidate, ...] = PLACEHOLDER_PIPE(context=context)
app.logger.info(f'retrieved recommended courses.')
rec_course_codes = [rc.domain_object.course_code.name for rc in rec_courses]
return {'recommend_course_codes': rec_course_codes}
@app.route("/escore_api/get_recommended_courses_for_student", methods=["GET"])
def get_course_recommendation_for_student():
args = request.args
#https%3A%2F%2Ftw.rpi.edu%2Fontology-engineering%2Foe2020%2Fcourse-recommender-individuals%2Fusrowen
student_uri = rdflib.URIRef(args["student_uri"])
student = COURSE_QS.get_student_by_uri(student_uri=student_uri)
print(f'got student {student.name}')
# will plan of study be saved somehow...? or have person input and pass it via this method...?
# assuming POS will have some structure... ignoring for now since it's not properly used anyways
pos = args.get('plan_of_study', None)
if pos is None:
pos = student.study_plan
print(f'got student plan of study')
context = StudentPOSRequirementContext(student=student, plan_of_study=pos,
requirements=frozenset(COURSE_QS.get_all_requirements()))
rec_courses: Tuple[CourseCandidate, ...] = PLACEHOLDER_PIPE(context=context)
app.logger.info(f'retrieved recommended courses.')
rec_course_codes = [rc.domain_object.course_code.name for rc in rec_courses]
return {'recommend_course_codes': rec_course_codes}
#
# except NotFoundException as e:
# abort(404, description=e)
# except MalformedContentException as e:
# abort(500, description=e)
@app.route("/escore_api/get_pos_rec_for_student", methods=["GET"])
def get_pos_recommendation_for_student():
args = request.args
# ?student_uri=https%3A%2F%2Ftw.rpi.edu%2Fontology-engineering%2Foe2020%2Fcourse-recommender-individuals%2Fusrowen
student_uri = rdflib.URIRef(args["student_uri"])
student = COURSE_QS.get_student_by_uri(student_uri=student_uri)
print(f'got student {student.name}')
pos_rec = PR_SERVICE.get_pos_recommendation_for_target_student(student=student)
rec_sem_courses = {f'{sec.section_object.term} {sec.section_object.year} semester': [cand.domain_object.name
for cand in sec.section_candidates]
for sec in pos_rec.solution_section_sets[1].sections}
return {'recommend_course_per_semester': rec_sem_courses}
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
| [
"[email protected]"
]
| |
337555e92f42e1fa73ca39f8a52b53366558da88 | bc8f02c870e939359c32d04016f989f0c7526226 | /constraint_adder.py | be8ac25b95b5f8d031b9f6af0994cabbceb480ea | []
| no_license | iamgroot42/gpu_image_segmentation | c53a85267cd373a391c27297ac1befb944067550 | ca97a78a28bf5b76659dcb990a3a312d6d2c6fe3 | refs/heads/master | 2021-01-18T23:46:45.615098 | 2017-10-01T11:35:17 | 2017-10-01T11:35:17 | 80,756,196 | 1 | 0 | null | 2017-10-01T11:35:18 | 2017-02-02T18:37:35 | Cuda | UTF-8 | Python | false | false | 952 | py | import cv2
import sys
object_points = []
background_points = []
counter = 0
data = None
def mouse_callback(event, x, y, flags, params):
global object_points
global background_points
global counter
global data
if event == cv2.EVENT_LBUTTONDOWN:
object_points.append((x, y))
elif event == cv2.EVENT_RBUTTONDOWN:
background_points.append((x, y))
def annotate_images(img_path):
global data
data = cv2.imread(img_path)
cv2.imshow('Image',data)
cv2.setMouseCallback('Image', mouse_callback)
cv2.waitKey(0)
cv2.destroyAllWindows()
def write_points(data, filename):
f = open(filename, 'w')
for point in data:
x,y = point
f.write(str(x) + " " + str(y) + "\n")
f.close()
if __name__ == "__main__":
file_path = sys.argv[1]
print("Left click to label object points")
print("Right click to label background points")
annotate_images(file_path)
write_points(object_points, "OBJECT")
write_points(background_points, "BACKGROUND")
| [
"[email protected]"
]
| |
bce2bf6200fd54e982429be07bc78ceb941aa813 | e0d9844e123fa0706388814b9f29758258589487 | /version_info.py | 0e3295c8309a411c0cfb18f664f84b223ed0a2ca | []
| no_license | pigpigman8686/seg | b5cf5261a5744e89ed5e5b145f60b0ccc3ba2c0c | 61c3816f7ba76243a872fe5c5fc0dede17026987 | refs/heads/master | 2023-04-10T22:22:35.035542 | 2021-04-22T06:24:36 | 2021-04-22T06:24:36 | 360,398,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,701 | py | # UTF-8
#
# For more details about fixed file info 'ffi' see:
# http://msdn.microsoft.com/en-us/library/ms646997.aspx
VSVersionInfo(
ffi=FixedFileInfo(
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
# Set not needed items to zero 0. Must always contain 4 elements.
filevers=(
int('0.0.0'.split('.')[0]),
int('0.0.0'.split('.')[1]),
int('0.0.0'.split('.')[2]),
0
),
prodvers=(
int('0.0.0'.split('.')[0]),
int('0.0.0'.split('.')[1]),
int('0.0.0'.split('.')[2]),
0
),
# Contains a bitmask that specifies the valid bits 'flags'r
mask=0x3f,
# Contains a bitmask that specifies the Boolean attributes of the file.
flags=0x0,
# The operating system for which this file was designed.
# 0x4 - NT and there is no need to change it.
OS=0x40004,
# The general type of file.
# 0x1 - the file is an application.
fileType=0x1,
# The function of the file.
# 0x0 - the function is not defined for this fileType
subtype=0x0,
# Creation date and time stamp.
date=(0, 0)
),
kids=[
StringFileInfo(
[
StringTable(
'040904B0',
[StringStruct('CompanyName', 'caicy'),
StringStruct('FileDescription', 'seg'),
StringStruct('FileVersion', '0.0.0.0'),
StringStruct('InternalName', 'seg'),
StringStruct('LegalCopyright', '© caicy. All rights reserved.'),
StringStruct('OriginalFilename', 'seg.exe'),
StringStruct('ProductName', 'seg'),
StringStruct('ProductVersion', '0.0.0.0')])
]),
VarFileInfo([VarStruct('Translation', [1033, 1200])])
]
) | [
"[email protected]"
]
| |
55e40b3bf8071fca05b6467d0f35479817d7a0dd | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_11546.py | 1fc582462bd10e1e682a285ef7047c01e969c822 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20 | py | # export PYTHONPATH
| [
"[email protected]"
]
| |
729ffe87975dbbc443c0865d023fd89fd57f7aa9 | 99e57f00fcaf4469c1c1b79f2d17176aaef9a790 | /sales_forecast/models/sale_allocation.py | cdce00e85495ffde7e478e89044f78b1410f3649 | []
| no_license | detian08/mcl | d007ffd0e869f3bd9a8c74bc8473119901f0de2a | 32d61148326c931aca0107c3894061773f287e33 | refs/heads/master | 2022-03-23T19:36:29.608645 | 2019-12-11T10:15:50 | 2019-12-11T10:15:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,678 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import uuid
from itertools import groupby
from datetime import datetime, timedelta
from werkzeug.urls import url_encode
from odoo import api, fields, models, _,exceptions
from odoo.exceptions import UserError, AccessError
from odoo.osv import expression
from odoo.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT
from odoo.tools.misc import formatLang
from odoo.addons import decimal_precision as dp
class SaleAllocation(models.Model):
_name = "sale.forecaste"
z_period = fields.Selection([('Monthly','Monthly'),('weekly','weekly'),('Daily','Daily')],string ='Period',store = True)
z_from_date = fields.Date(string = 'From Date',store = True)
z_to_date = fields.Date(string = 'To Date',store = True)
z_allow_linw = fields.One2many('sale.forecaste.line', 'z_allow_id', string='allow Lines', copy=True, auto_join=True)
@api.constrains('z_to_date')
def _check_date(self):
for r in self:
if r.z_to_date < self.z_from_date:
raise models.ValidationError('To Date should be greater than From Date')
class SaleAllocationLine(models.Model):
_name = "sale.forecaste.line"
z_allow_id = fields.Many2one('sale.forecaste',string = 'allow id',store = True)
z_team_id = fields.Many2one('crm.team',string = 'Sale Team',store = True)
z_user_id = fields.Many2one('res.users',string = 'Sale Person',store = True)
z_product_id = fields.Many2one('product.product',string = 'Product',store = True)
z_forecasted_qnty = fields.Float(string = 'Forecasted quantity',store = True)
z_forecasted_val = fields.Float(string = 'Forecasted Value',store = True)
| [
"[email protected]"
]
| |
93d874fcb0503c0266f53ab533313773a94261c8 | 159d4ae61f4ca91d94e29e769697ff46d11ae4a4 | /venv/lib/python3.9/site-packages/nbclient/util.py | 9b672357b05be0de493e5f59054ae05e0086f448 | [
"MIT"
]
| permissive | davidycliao/bisCrawler | 729db002afe10ae405306b9eed45b782e68eace8 | f42281f35b866b52e5860b6a062790ae8147a4a4 | refs/heads/main | 2023-05-24T00:41:50.224279 | 2023-01-22T23:17:51 | 2023-01-22T23:17:51 | 411,470,732 | 8 | 0 | MIT | 2023-02-09T16:28:24 | 2021-09-28T23:48:13 | Python | UTF-8 | Python | false | false | 3,205 | py | """General utility methods"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import asyncio
import sys
import inspect
from typing import Callable, Awaitable, Any, Union
def check_ipython() -> None:
# original from vaex/asyncio.py
IPython = sys.modules.get('IPython')
if IPython:
version_str = IPython.__version__ # type: ignore
# We get rid of any trailing ".dev"
version_str = version_str.replace(".dev", "")
IPython_version = tuple(map(int, version_str.split('.')))
if IPython_version < (7, 0, 0):
raise RuntimeError(f'You are using IPython {IPython.__version__} ' # type: ignore
'while we require 7.0.0+, please update IPython')
def check_patch_tornado() -> None:
"""If tornado is imported, add the patched asyncio.Future to its tuple of acceptable Futures"""
# original from vaex/asyncio.py
if 'tornado' in sys.modules:
import tornado.concurrent # type: ignore
if asyncio.Future not in tornado.concurrent.FUTURES:
tornado.concurrent.FUTURES = \
tornado.concurrent.FUTURES + (asyncio.Future, ) # type: ignore
def just_run(coro: Awaitable) -> Any:
"""Make the coroutine run, even if there is an event loop running (using nest_asyncio)"""
# original from vaex/asyncio.py
loop = asyncio._get_running_loop()
if loop is None:
had_running_loop = False
try:
loop = asyncio.get_event_loop()
except RuntimeError:
# we can still get 'There is no current event loop in ...'
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
else:
had_running_loop = True
if had_running_loop:
# if there is a running loop, we patch using nest_asyncio
# to have reentrant event loops
check_ipython()
import nest_asyncio
nest_asyncio.apply()
check_patch_tornado()
return loop.run_until_complete(coro)
def run_sync(coro: Callable) -> Callable:
"""Runs a coroutine and blocks until it has executed.
An event loop is created if no one already exists. If an event loop is
already running, this event loop execution is nested into the already
running one if `nest_asyncio` is set to True.
Parameters
----------
coro : coroutine
The coroutine to be executed.
Returns
-------
result :
Whatever the coroutine returns.
"""
def wrapped(*args, **kwargs):
return just_run(coro(*args, **kwargs))
wrapped.__doc__ = coro.__doc__
return wrapped
async def ensure_async(obj: Union[Awaitable, Any]) -> Any:
"""Convert a non-awaitable object to a coroutine if needed,
and await it if it was not already awaited.
"""
if inspect.isawaitable(obj):
try:
result = await obj
except RuntimeError as e:
if str(e) == 'cannot reuse already awaited coroutine':
# obj is already the coroutine's result
return obj
raise
return result
# obj doesn't need to be awaited
return obj
| [
"[email protected]"
]
| |
16f08340f13e5ef8e599df67e8d5494e198b58e8 | cb8c63aea91220a9272498d5ea6cca0a0738b16a | /numberfun.py | 1215b93e182f08eb2efaf9e5f70760eb790eb933 | []
| no_license | akantuni/Kattis | 1265de95bfe507ce7b50451a16f19720b86bef44 | 12f31bb31747096bf157fcf6b1f9242d91654533 | refs/heads/master | 2021-12-14T11:18:27.723045 | 2021-12-11T05:43:37 | 2021-12-11T05:43:37 | 111,472,667 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | n = int(input())
for i in range(n):
nums = input().split()
nums = [int(num) for num in nums]
a, b, c = nums
if a + b == c:
print("Possible")
elif a * b == c:
print("Possible")
elif a - b == c or b - a == c:
print("Possible")
elif a == c * b or b == c * a:
print("Possible")
else:
print("Impossible")
a, b = b, a
| [
"[email protected]"
]
| |
fbd79e2ea249c672bce152386465c6b69ec3b0fa | c67d38c1417c6579175ab4716ac0d84441b5aaa6 | /format_check.py | d7e2d8fa0bdca686a461112932e8dd8839d4259f | []
| no_license | huangy10/AutoUpdateArtworks | 348ff549f40b1c895e186a8a6753c071592e70d0 | eae91f486213624619ad9481351bac22af6df0d1 | refs/heads/master | 2021-01-20T06:22:30.394478 | 2017-03-07T04:51:21 | 2017-03-07T04:51:21 | 83,503,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | # check the format of file names to guarantee the integrity
def check_folder_structure(root_path):
pass
def check_data_integrity(root_path):
"""
Check the integrity of image set
:param root_path: the root path where images are stored
:return: Boolean
"""
pass
| [
"[email protected]"
]
| |
62ef51945e8fd4e850c7446372a0058b0ce54a21 | 33f2f4ed5242f256e2a31145125dad91699c1ead | /Leetcode/Contests/weekly_200_find_winner.py | 4d5aff5422154a9770e74affeafcf60927731bf5 | []
| no_license | Zahidsqldba07/competitive-programming-1 | b04b2962ce7bc4454008a3cbb7bee88c0e02251d | e35b3a1c95c559b7754d4fabf8d2c4f09b0c42b2 | refs/heads/master | 2023-07-11T01:39:38.188489 | 2021-08-29T19:29:47 | 2021-08-29T19:29:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,714 | py | # 5476. Find the Winner of an Array Game
'''
Given an integer array arr of distinct integers and an integer k.
A game will be played between the first two elements of the array (i.e. arr[0] and arr[1]). In each round of the game, we compare arr[0] with arr[1], the larger integer wins and remains at position 0 and the smaller integer moves to the end of the array. The game ends when an integer wins k consecutive rounds.
Return the integer which will win the game.
It is guaranteed that there will be a winner of the game.
'''
# TO COMPLETE/CORRECT
class Solution:
def getWinner(self, arr: List[int], k: int) -> int:
past_winner = -1
won_rounds = 0
while won_rounds <= k:
if arr[0] > arr[1]:
# print("winner arr[0]", arr[0], "won_rounds", won_rounds)
# arr[-1], arr[1] = arr[1], arr[-1]
temp = arr.pop(1)
arr.append(temp)
if arr[0] == past_winner:
won_rounds += 1
# if arr[0] > arr[1]:
# will win all the rest
# return arr[0]
else:
won_rounds = 1
past_winner = arr[0]
else: # new winner
# print("winner arr[1]", arr[1], "won_rounds", won_rounds)
# arr[-1], arr[1] = arr[1], arr[-1]
# arr[0], arr[-1] = arr[-1], arr[0]
temp = arr.pop(0)
arr.append(temp)
if arr[0] == past_winner:
won_rounds += 1
else:
won_rounds = 1
past_winner = arr[0]
return past_winner
| [
"[email protected]"
]
| |
d6133e030913d6f52d6462bdb35d3f36d7348abf | dd861ad8a33e1ec43a969746ec58efbbd877ca58 | /telusko/urls.py | 607c4907b6abde60fe968f17fa05fe98bf638a5c | []
| no_license | sajibuzzaman/telusko-DjangoProject | 54c74b1136f4d69dda092fe4ab03958214bc4e60 | c4a8cadfa18544bbfe4c359c730cbc4e2ef318e8 | refs/heads/master | 2023-03-05T22:10:58.583721 | 2021-02-14T20:39:09 | 2021-02-14T20:39:09 | 338,895,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | """telusko URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('travello.urls')),
path('admin/', admin.site.urls),
path('accounts/', include('accounts.urls'))
]
urlpatterns = urlpatterns + static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
| [
"[email protected]"
]
| |
07fccbff6dc36b6923a3339363c214e6ebc79309 | ecce8a10aabb24019296cebaa46503f91876796f | /football_app/football_app/referee/urls.py | da7d9c7f2911a0055d6de80c5285c7b36bb0e0b1 | []
| no_license | Yeldarmt/DJangoFootballApp | 28450d60fbd0ec98bdf6d223545e17062442f970 | d9568cd48089c0be55217d8aecadf65053b72420 | refs/heads/master | 2022-11-26T16:19:34.252927 | 2020-04-26T18:09:52 | 2020-04-26T18:09:52 | 237,893,654 | 0 | 0 | null | 2022-11-22T05:28:37 | 2020-02-03T05:42:24 | Python | UTF-8 | Python | false | false | 216 | py | from django.urls import path
from football_app.referee.views import RefereesListView, RefereeDetailView
urlpatterns = [
path('', RefereesListView.as_view()),
path('<int:pk>/', RefereeDetailView.as_view())
]
| [
"[email protected]"
]
| |
31d025fb82a70ab3eec3800b715a265d2df76bf7 | 5301656114df4d80c3353536d85e1d15829b9839 | /Frontales/.metadata/.plugins/org.eclipse.core.resources/.history/4b/3004f7adcb4500111f76ccb337b0ec7c | 6cadea6b43bd7bbc821b8e36d65d68fbe7346c30 | []
| no_license | Ducatel/TeXloud | 5f383c3fa2c27b01555574fd513d6f551e302b81 | 38f8be8857e1c6de2e103bbbe39707b49e1375aa | refs/heads/master | 2021-01-01T19:34:51.563829 | 2012-03-01T12:05:13 | 2012-03-01T12:05:13 | 2,885,054 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,575 | #!/usr/bin/python
# -*-coding:utf-8 -*
'''
Created on 10 dec. 2011
@author: Davis Ducatel
'''
import socket
from re import match
import threading
import json
import Ordonnanceur
class Frontal(object):
'''
Classe qui gere le serveur de la frontale de compilation grace à
Son adresse IP
Son port d'ecoute
Son socket
Son ordonnanceur de serveur de compilation
Son ordonnanceur de serveur de données
'''
def __init__(self,adresse,port):
'''
Constructeur du serveur de la frontale
@param adresse: Adresse IP du serveur
@param port: Numéro du port d'ecoute
@raise ValueError: Declencher si le port ou l'adresse est incorrect
'''
regexAdresse="^[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}$"
regexPort="^[0-9]{1,5}$"
if match(regexAdresse,adresse) and match(regexPort,str(port)):
if isinstance(port, int):
self._adresse = adresse
self._port = port
else:
raise ValueError
else:
raise ValueError
self._ordonnanceurData=Ordonnanceur.Ordonnanceur("./../fichierServeur.xml","data")
self._ordonnanceurCompilation=Ordonnanceur.Ordonnanceur("./../fichierServeur.xml","compilation")
def lanceServeur(self):
"""
Methode qui lance la socket en ecoute
Puis qui accepte les connexions et appelle pour chacune d'elle la methode voulu dans un nouveau thread
"""
self._sock=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.bind((self._adresse, self._port))
self._sock.listen(5)
while 1:
client, addr=self._sock.accept()
threading.Thread(target=self.getTrameOfHTTPServer,args=(client,addr)).start()
def getRequestOfHTTPServer(self,client,addr):
"""
Methode qui va recupere la demande du serveur web
et la traiter
"""
taille=1
messageComplet=""
while taille>0:
message=client.recv(1024)
message=message.decode()
messageComplet+=message
taille=len(message)
client.close()
self.examineRequete(messageComplet)
def routeRequest(self,requeteJSON):
'''
Méthode qui va router la requete vers le bon traitement afin de reformater la requete
puis qui va renvoyé au bon serveur de données
@param requeteJSON: la requete a examiner (au format json)
'''
requete=json.loads(requeteJSON)
if requete['label']=="create":
adresseIP,port,req=self.requestCreateNewUserDataSpace(requete)
elif requete['label']=="getProject":
adresseIP,port,req=self.requestGetProject(requete)
elif requete['label']=="compile":
adresseIP,port,req=self.requestCompile(requete)
elif requete['label']=="getFile":
adresseIP,port,req=self.requestGetFile(requete)
elif requete['label']=="deleteFile":
adresseIP,port,req=self.requestDeleteFile(requete)
elif requete['label']=="deleteProject":
adresseIP,port,req=self.requestDeleteProject(requete)
elif requete['label']=="sync":
adresseIP,port,req=self.requestSync(requete)
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((adresseIP,port))
s.send(json.dumps(req))
s.send(self._messageEnd)
s.close()
def sendRequest
def requestCreateNewUserDataSpace(self,requete):
"""
Méthode qui va demande au serveur de donnée de créer un nouvel
espace de stockage pour l'utilisateur
@param requete: requete a reformater et a router (dico python)
@return: l'adresse IP du serveur de données
@return: le port de connexion sur le serveur de données
@return: la requete (dico)
"""
# Obtention d'un serveur de données
serveur=self._ordonnanceurData.getServeur()
return serveur.adresseIP,serveur.port,requete
def requestGetProject(self,requete):
"""
Méthode qui va demande au serveur de donnée de renvoyer un projet
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
return adresseIP,port,requete
def requestCompile(self,requete):
"""
Méthode qui va demande au serveur de donnée de lancer une compilation
via un serveur de compilation
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
serveur=self._ordonnanceurCompilation.getServeur()
requete['servCompileIP']=serveur.adresseIP
requete['servCompilePort']=serveur.port
return adresseIP,port,requete
def requestGetFile(self,requete):
"""
Méthode qui va demande au serveur de donnée de renvoyer un fichier
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
return adresseIP,port,requete
def requestDeleteFile(self,requete):
"""
Méthode qui va demande au serveur de donnée de supprimer un fichier
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
return adresseIP,port,requete
def requestDeleteProject(self,requete):
"""
Méthode qui va demande au serveur de donnée de supprimer un projet
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
return adresseIP,port,requete
def requestSync(self,requete):
"""
Méthode qui va demande au serveur de donnée de faire une synchro
@param requete: requete a reformater et a router (dico python)
"""
adresseIP=requete.pop('servDataIp')
port=requete.pop('servDataPort')
return adresseIP,port,requete
| [
"hannibal@hannibal-laptop.(none)"
]
| hannibal@hannibal-laptop.(none) |
|
7972415cb517ef3bfc66323af6cb5649b2c53fb7 | c705252e5368efab6324f2c1716d50002ad22e80 | /1-2 first_exam_2.py | 7898110c7f46dc85665e8b2452fb488bb3fa3e84 | []
| no_license | younkyounghwan/python-exam | dd93124b91bc1d4f2690e9e3e9cb58ff8ef5623d | c0ed718d5dfcde65320da7c30f9a014b00e35151 | refs/heads/master | 2020-04-05T03:48:50.878125 | 2018-11-07T10:20:38 | 2018-11-07T10:20:38 | 156,528,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | """
day 2
"""
print("day 2")
"""
"""
#
f = 3.4
print(f)
i = 1
print(i)
b = True
print(b)
s ="1"
print(i+f)
#print(s+i)
print(int(s)+i)
print(s+int(s))
i = 57
j = 28
| [
"[email protected]"
]
| |
e0a494ce4ad4d72e812f2860cf7f862e5d9881f4 | bfce201a7971b05b2fbe0af4819081d71c3850db | /fermi/Pass7Validation/compare_flight_mc_psf/v1/plot.py | 8b5dbc36066c171de57395421b6155e1fcbbbf1a | []
| no_license | joshualande/PhD-Work | 2fe52f82f726ad6166937a3daed342c8cd9aee2f | 1d834a19b5a9347ccad75bd5a76126d5fd840c64 | refs/heads/master | 2020-04-18T15:14:19.127171 | 2014-01-26T22:58:10 | 2014-01-26T22:58:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,911 | py | from matplotlib import rc
rc('ps',usedistiller='xpdf')
rc('text', usetex=True)
rc('font', family='serif', serif="Computer Modern Roman")
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
from matplotlib.patheffects import withStroke
import matplotlib
import h5py
import pylab as P
import numpy as np
from scipy.stats import chi2
file=h5py.File('/nfs/slac/g/ki/ki03/lande/fermi/data/monte_carlo/compare_flight_mc_psf/v1/merged.hdf5')
flux=np.asarray(file['flux_mc'])
ts_ext_P7SOURCE_V4=np.asarray(file['ts_ext_P7SOURCE_V4'])
ts_ext_P7SOURCE_V6=np.asarray(file['ts_ext_P7SOURCE_V6'])
ts=np.asarray(file['ts_P7SOURCE_V6'])
ts_point = ts - ts_ext_P7SOURCE_V6
index=np.asarray(file['index_mc'])
fig=P.figure(figsize=(5,3))
fig.subplots_adjust(right=0.95, top=0.95, bottom=0.15)
from mpl_toolkits.axes_grid.axes_grid import Grid
grid = Grid(fig, 111, nrows_ncols = (1, 2), axes_pad=0.0)
min_cdf = 1e-4
format_float = lambda f: r'$%s$' % str(f).replace('e-0',r'\times 10^')
print format_float(1e-4)
for i,(name,irf,all_ts_ext) in enumerate([
['(a)','P7SOURCE_V6',ts_ext_P7SOURCE_V6],
['(b)','P7SOURCE_V4',ts_ext_P7SOURCE_V4]
]):
max_ts=max(all_ts_ext) + 1
axes = grid[i]
grid[i].add_artist(
AnchoredText(name, frameon=False, loc=2, prop=dict(size=14,
path_effects=[withStroke(linewidth=5,foreground='w')])))
index_mc=2
for flux_mc, color in zip(
reversed([ 1e-8, 3e-8, 1e-7, 3e-7, 1e-6, 3e-6 ]),
['red', 'blue', 'green', 'black', 'orange', 'gray']
):
kwargs=dict(color=color)
select = (flux==flux_mc) & (index==index_mc) & (ts_point>25)
print 'index=%s, flux=%s, num=%s' % (index_mc,flux_mc,np.sum(select))
print np.mean(ts_point[select])
if np.sum(select) < 100:
continue
print irf, flux_mc, select
ts_ext = all_ts_ext[select]
ts_ext[ts_ext<0] = 0
bins=np.linspace(0,max_ts,1e3)
bin_center=bins[:-1] + (bins[1]-bins[0])/2
binned=np.histogram(ts_ext,bins=bins)[0]
if any(ts_ext>max_ts):
print '> max: ',irf,ts_ext[np.where(ts_ext>max_ts)]
cdf=np.cumsum(binned[::-1])[::-1]
cdf=cdf.astype(float)/cdf[0] # normalize
cdf[cdf == 0] = min_cdf
axes.semilogy(bin_center,cdf,linewidth=1,label=format_float(flux_mc), **kwargs)
y = chi2.sf(bins,1)/2
axes.semilogy(bins, y, 'red', linewidth=1, label='$\chi^2_1/2$', zorder=0, dashes=(5,3))
axes.set_ylim(min_cdf,1)
axes.set_xlabel(r'$\mathrm{TS}_\mathrm{ext}$')
axes.set_ylabel('Cumulative Density')
from lande_plotting import fix_axesgrid
fix_axesgrid(grid)
prop = matplotlib.font_manager.FontProperties(size=10)
grid[0].legend(loc=1, prop=prop, columnspacing=1)
grid[1].set_xlim(0,100)
P.savefig('extension_test.eps')
P.savefig('extension_test.pdf')
| [
"lande@37a9682d-6443-41a2-8582-b44379b6e86f"
]
| lande@37a9682d-6443-41a2-8582-b44379b6e86f |
5ad042bd73c818fb6e254df6b4cf72c179ab9b10 | 87b904ebf11d416567a7e49b91b8e9934f67c6f3 | /insert_row_simple.py | e642a918cd16c4942e211712d5181e4c9c09765b | [
"MIT"
]
| permissive | NathanKr/pandas-playground | a701f524aa48f22f6680e48c597206e10f8222e5 | a5355c59cb61ca3a7dcce590ed42d56a6b943783 | refs/heads/main | 2023-06-05T11:07:52.061327 | 2021-07-02T02:35:15 | 2021-07-02T02:35:15 | 328,917,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | import pandas as pd
df = pd.DataFrame([])
# most simple insert row
df = df.append(pd.Series([1,2,3]) , ignore_index=True) # insert at the end
df = df.append(pd.Series([4,5,6]) , ignore_index=True) # insert at the end
print(df) | [
"[email protected]"
]
| |
c116c570c2a56abc674c9fb63d86381c90576e47 | 4d6975caece0acdc793a41e8bc6d700d8c2fec9a | /leetcode/1484.linked-list-in-binary-tree/1484.linked-list-in-binary-tree.py | 9077aff3937204041003e1371d38f9ce9273451b | []
| no_license | guiconti/workout | 36a3923f2381d6e7023e127100409b3a2e7e4ccb | 5162d14cd64b720351eb30161283e8727cfcf376 | refs/heads/master | 2021-08-03T10:32:02.108714 | 2021-07-26T04:38:14 | 2021-07-26T04:38:14 | 221,025,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isSubPath(self, head: ListNode, root: TreeNode) -> bool:
| [
"[email protected]"
]
| |
7e8bd342fc0bb96f6ba727e866bcc18731ac5afa | d5735851b605e8960ca321c0e332c7ed810f3e6d | /Python/files/eatenApples.py | 213e017a02f984a27f4332e28fc80d8f3097ee15 | []
| no_license | pyj4104/LeetCode-Practice | 6ed0cffd3605be6e187bedeb99e3b4b430604913 | 6a7d033bfd687ad2a0d79ac6a7f50ace1625f631 | refs/heads/master | 2023-03-12T00:23:50.913239 | 2021-02-25T03:38:11 | 2021-02-25T03:38:11 | 306,699,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 851 | py | import heapq as h
class Solution:
def eatenApples(self, apples: [int], days: [int]) -> int:
pQueue = []
dateToday = 0
numEaten = 0
for i in range(len(days)):
h.heappush(pQueue, [days[dateToday]+dateToday, apples[dateToday]])
foundStoredApple = False
while pQueue and not foundStoredApple:
thingsToEat = h.heappop(pQueue)
if thingsToEat[0] > dateToday and thingsToEat[1] > 0:
foundStoredApple = True
if foundStoredApple:
numEaten += 1
thingsToEat[1] -= 1
if thingsToEat[0] > dateToday and thingsToEat[1] > 0:
h.heappush(pQueue, thingsToEat)
dateToday += 1
while pQueue:
thingsToEat = h.heappop(pQueue)
if thingsToEat[0] > dateToday and thingsToEat[1] > 0:
numEaten += 1
thingsToEat[1] -= 1
else:
continue
h.heappush(pQueue, thingsToEat)
dateToday += 1
return numEaten
| [
"[email protected]"
]
| |
d16ae67b41b1528bb0116d6a8e0870f587fefd41 | 89e4c3dd91ceb3a4a5e74cfaedbb795152ebd1f9 | /lc105_bt.py | dd379a2e68cceb2a57826262c2634dc06cff9c75 | []
| no_license | Mela2014/lc_punch | a230af2c9d40b1af4932c800e72698de5b77d61a | 498308e6a065af444a1d5570341231e4c51dfa3f | refs/heads/main | 2023-07-13T03:44:56.963033 | 2021-08-25T05:44:40 | 2021-08-25T05:44:40 | 313,742,939 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def buildTree(self, preorder: List[int], inorder: List[int]) -> TreeNode:
if not preorder: return None
root_val = preorder[0]
root = TreeNode(root_val)
idx = inorder.index(root_val)
root.left = self.buildTree(preorder[1:idx+1], inorder[:idx])
root.right = self.buildTree(preorder[idx+1:], inorder[idx+1:])
return root | [
"[email protected]"
]
| |
43ba18e1ca65424bedf81bd23bc6740a4005af6f | 70876e11e6a102c34c9d7a626a33bda26fcfbf58 | /gino/schema.py | 192cbe80271c90ff2ee13d025cc0c85d70923146 | [
"BSD-3-Clause"
]
| permissive | Urielable/gino | c275ad52b108d9ff3cd24a532d348a32cf3b3cb2 | 226d6df131ee32534fd37849a83e2513df5b5f37 | refs/heads/master | 2020-03-25T03:42:23.093065 | 2018-08-02T08:45:37 | 2018-08-02T08:45:37 | 143,355,539 | 1 | 0 | null | 2018-08-02T23:48:54 | 2018-08-02T23:48:54 | null | UTF-8 | Python | false | false | 14,910 | py | # noinspection PyProtectedMember
from sqlalchemy import exc, util
from sqlalchemy.sql.base import _bind_or_error
from sqlalchemy.sql.ddl import (
AddConstraint,
CreateIndex,
CreateSequence,
CreateTable,
DropConstraint,
DropIndex,
DropSequence,
DropTable,
SchemaDropper,
SchemaGenerator,
SetColumnComment,
SetTableComment,
sort_tables_and_constraints,
)
from sqlalchemy.types import SchemaType
class AsyncVisitor:
async def traverse_single(self, obj, **kw):
# noinspection PyUnresolvedReferences
for v in self._visitor_iterator:
meth = getattr(v, "visit_%s" % obj.__visit_name__, None)
if meth:
return await meth(obj, **kw)
class AsyncSchemaGenerator(AsyncVisitor, SchemaGenerator):
async def _can_create_table(self, table):
self.dialect.validate_identifier(table.name)
effective_schema = self.connection.schema_for_object(table)
if effective_schema:
self.dialect.validate_identifier(effective_schema)
return (not self.checkfirst or
not (await self.dialect.has_table(self.connection,
table.name,
schema=effective_schema)))
async def _can_create_sequence(self, sequence):
effective_schema = self.connection.schema_for_object(sequence)
return self.dialect.supports_sequences and (
(not self.dialect.sequences_optional or
not sequence.optional) and
(
not self.checkfirst or
not await self.dialect.has_sequence(
self.connection,
sequence.name,
schema=effective_schema)
)
)
async def visit_metadata(self, metadata):
if self.tables is not None:
tables = self.tables
else:
tables = list(metadata.tables.values())
tables_create = []
for t in tables:
if await self._can_create_table(t):
tables_create.append(t)
collection = sort_tables_and_constraints(tables_create)
seq_coll = []
# noinspection PyProtectedMember
for s in metadata._sequences.values():
if s.column is None and await self._can_create_sequence(s):
seq_coll.append(s)
event_collection = [
t for (t, fks) in collection if t is not None
]
await _Async(metadata.dispatch.before_create)(
metadata, self.connection,
tables=event_collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
for seq in seq_coll:
await self.traverse_single(seq, create_ok=True)
for table, fkcs in collection:
if table is not None:
await self.traverse_single(
table, create_ok=True,
include_foreign_key_constraints=fkcs,
_is_metadata_operation=True)
else:
for fkc in fkcs:
await self.traverse_single(fkc)
await _Async(metadata.dispatch.after_create)(
metadata, self.connection,
tables=event_collection,
checkfirst=self.checkfirst,
_ddl_runner=self)
async def visit_table(
self, table, create_ok=False,
include_foreign_key_constraints=None,
_is_metadata_operation=False):
if not create_ok and not await self._can_create_table(table):
return
await _Async(table.dispatch.before_create)(
table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
_is_metadata_operation=_is_metadata_operation)
for column in table.columns:
if column.default is not None:
await self.traverse_single(column.default)
if not self.dialect.supports_alter:
# e.g., don't omit any foreign key constraints
include_foreign_key_constraints = None
await self.connection.status(
CreateTable(
table,
include_foreign_key_constraints=include_foreign_key_constraints
))
if hasattr(table, 'indexes'):
for index in table.indexes:
await self.traverse_single(index)
if self.dialect.supports_comments and not self.dialect.inline_comments:
if table.comment is not None:
await self.connection.status(SetTableComment(table))
for column in table.columns:
if column.comment is not None:
await self.connection.status(SetColumnComment(column))
await _Async(table.dispatch.after_create)(
table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
_is_metadata_operation=_is_metadata_operation)
async def visit_foreign_key_constraint(self, constraint):
if not self.dialect.supports_alter:
return
await self.connection.status(AddConstraint(constraint))
async def visit_sequence(self, sequence, create_ok=False):
if not create_ok and not await self._can_create_sequence(sequence):
return
await self.connection.status(CreateSequence(sequence))
async def visit_index(self, index):
await self.connection.status(CreateIndex(index))
class AsyncSchemaDropper(AsyncVisitor, SchemaDropper):
async def visit_metadata(self, metadata):
if self.tables is not None:
tables = self.tables
else:
tables = list(metadata.tables.values())
try:
unsorted_tables = []
for t in tables:
if await self._can_drop_table(t):
unsorted_tables.append(t)
collection = list(reversed(
sort_tables_and_constraints(
unsorted_tables,
filter_fn=lambda constraint: False
if not self.dialect.supports_alter
or constraint.name is None else None
)
))
except exc.CircularDependencyError as err2:
if not self.dialect.supports_alter:
util.warn(
"Can't sort tables for DROP; an "
"unresolvable foreign key "
"dependency exists between tables: %s, and backend does "
"not support ALTER. To restore at least a partial sort, "
"apply use_alter=True to ForeignKey and "
"ForeignKeyConstraint "
"objects involved in the cycle to mark these as known "
"cycles that will be ignored."
% (
", ".join(sorted([t.fullname for t in err2.cycles]))
)
)
collection = [(t, ()) for t in unsorted_tables]
else:
util.raise_from_cause(
exc.CircularDependencyError(
err2.args[0],
err2.cycles, err2.edges,
msg="Can't sort tables for DROP; an unresolvable "
"foreign key dependency exists between tables: %s."
" Please ensure that the ForeignKey and "
"ForeignKeyConstraint objects involved in the "
"cycle have names so that they can be dropped "
"using DROP CONSTRAINT."
% (
", ".join(
sorted([t.fullname for t in err2.cycles]))
)
)
)
seq_coll = []
for s in metadata._sequences.values():
if s.column is None and await self._can_drop_sequence(s):
seq_coll.append(s)
event_collection = [
t for (t, fks) in collection if t is not None
]
await _Async(metadata.dispatch.before_drop)(
metadata, self.connection, tables=event_collection,
checkfirst=self.checkfirst, _ddl_runner=self)
for table, fkcs in collection:
if table is not None:
await self.traverse_single(
table, drop_ok=True, _is_metadata_operation=True)
else:
for fkc in fkcs:
await self.traverse_single(fkc)
for seq in seq_coll:
await self.traverse_single(seq, drop_ok=True)
await _Async(metadata.dispatch.after_drop)(
metadata, self.connection, tables=event_collection,
checkfirst=self.checkfirst, _ddl_runner=self)
async def _can_drop_table(self, table):
self.dialect.validate_identifier(table.name)
effective_schema = self.connection.schema_for_object(table)
if effective_schema:
self.dialect.validate_identifier(effective_schema)
return not self.checkfirst or (await self.dialect.has_table(
self.connection, table.name, schema=effective_schema))
async def _can_drop_sequence(self, sequence):
effective_schema = self.connection.schema_for_object(sequence)
return self.dialect.supports_sequences and (
(not self.dialect.sequences_optional or
not sequence.optional) and
(not self.checkfirst or
await self.dialect.has_sequence(
self.connection,
sequence.name,
schema=effective_schema))
)
async def visit_index(self, index):
await self.connection.status(DropIndex(index))
async def visit_table(self, table, drop_ok=False,
_is_metadata_operation=False):
if not drop_ok and not await self._can_drop_table(table):
return
await _Async(table.dispatch.before_drop)(
table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
_is_metadata_operation=_is_metadata_operation)
for column in table.columns:
if column.default is not None:
await self.traverse_single(column.default)
await self.connection.status(DropTable(table))
await _Async(table.dispatch.after_drop)(
table, self.connection,
checkfirst=self.checkfirst,
_ddl_runner=self,
_is_metadata_operation=_is_metadata_operation)
async def visit_foreign_key_constraint(self, constraint):
if not self.dialect.supports_alter:
return
await self.connection.status(DropConstraint(constraint))
async def visit_sequence(self, sequence, drop_ok=False):
if not drop_ok and not await self._can_drop_sequence(sequence):
return
await self.connection.status(DropSequence(sequence))
class GinoSchemaVisitor:
__slots__ = ('_item',)
def __init__(self, item):
self._item = item
async def create(self, bind=None, *args, **kwargs):
if bind is None:
bind = _bind_or_error(self._item)
await getattr(bind, '_run_visitor')(AsyncSchemaGenerator,
self._item, *args, **kwargs)
return self._item
async def drop(self, bind=None, *args, **kwargs):
if bind is None:
bind = _bind_or_error(self._item)
await getattr(bind, '_run_visitor')(AsyncSchemaDropper,
self._item, *args, **kwargs)
async def create_all(self, bind=None, tables=None, checkfirst=True):
await self.create(bind=bind, tables=tables, checkfirst=checkfirst)
async def drop_all(self, bind=None, tables=None, checkfirst=True):
await self.drop(bind=bind, tables=tables, checkfirst=checkfirst)
class AsyncSchemaTypeMixin:
async def create_async(self, bind=None, checkfirst=False):
if bind is None:
bind = _bind_or_error(self)
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await t.create_async(bind=bind, checkfirst=checkfirst)
async def drop_async(self, bind=None, checkfirst=False):
if bind is None:
bind = _bind_or_error(self)
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await t.drop_async(bind=bind, checkfirst=checkfirst)
async def _on_table_create_async(self, target, bind, **kw):
if not self._is_impl_for_variant(bind.dialect, kw):
return
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await getattr(t, '_on_table_create_async')(target, bind, **kw)
async def _on_table_drop_async(self, target, bind, **kw):
if not self._is_impl_for_variant(bind.dialect, kw):
return
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await getattr(t, '_on_table_drop_async')(target, bind, **kw)
async def _on_metadata_create_async(self, target, bind, **kw):
if not self._is_impl_for_variant(bind.dialect, kw):
return
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await getattr(t, '_on_metadata_create_async')(target, bind, **kw)
async def _on_metadata_drop_async(self, target, bind, **kw):
if not self._is_impl_for_variant(bind.dialect, kw):
return
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
await getattr(t, '_on_metadata_drop_async')(target, bind, **kw)
async def _call_portable_instancemethod(fn, args, kw):
m = getattr(fn.target, fn.name + '_async', None)
if m is None:
return fn(*args, **kw)
else:
kw.update(fn.kwargs)
return await m(*args, **kw)
class _Async:
def __init__(self, listener):
self._listener = listener
async def call(self, *args, **kw):
for fn in self._listener.parent_listeners:
await _call_portable_instancemethod(fn, args, kw)
for fn in self._listener.listeners:
await _call_portable_instancemethod(fn, args, kw)
def __call__(self, *args, **kwargs):
return self.call(*args, **kwargs)
def patch_schema(db):
for st in {'Enum'}:
setattr(db, st, type(st, (getattr(db, st), AsyncSchemaTypeMixin), {}))
| [
"[email protected]"
]
| |
5cd707b6becf1983598806138c1b602763026b7a | 1f5f8f95530003c6c66419519d78cb52d21f65c0 | /projects/golem_api/tests/users/edit_user.py | 8ede3ba2a8db79addec2b7d17f38ee7b733cf6f1 | []
| no_license | golemhq/golem-tests | c5d3ab04b1ea3755d8b812229feb60f513d039ac | dff8fd3a606c3d1ef8667aece6fddef8ac441230 | refs/heads/master | 2023-08-17T23:05:26.286718 | 2021-10-04T20:34:17 | 2021-10-04T20:34:17 | 105,579,436 | 4 | 1 | null | 2018-11-19T00:14:24 | 2017-10-02T20:05:55 | Python | UTF-8 | Python | false | false | 3,058 | py | from golem import actions
from projects.golem_api.pages import users
def test_edit_user(data):
username = actions.random_str()
users.create_new_user(username, '123456', '[email protected]')
new_username = actions.random_str()
new_email = '[email protected]'
new_project_permissions = [{'project': "projectname", 'permission': "admin"}]
response = users.edit_user(username, new_username, new_email, False, new_project_permissions)
assert response.status_code == 200
response = users.get_user(new_username)
assert response.json()['username'] == new_username
assert response.json()['email'] == new_email
assert response.json()['is_superuser'] is False
assert response.json()['projects'] == {'projectname': 'admin'}
def test_edit_user_convert_to_superuser(data):
username = actions.random_str()
users.create_new_user(username, '123456', '[email protected]')
response = users.get_user(username)
assert response.json()['is_superuser'] is False
users.edit_user(username, new_is_superuser=True)
response = users.get_user(username)
assert response.json()['is_superuser'] is True
def test_edit_user_invalid_email(data):
username = actions.random_str()
users.create_new_user(username, '123456', '[email protected]')
invalid_email = 'test@test'
response = users.edit_user(username, new_email=invalid_email)
assert response.status_code == 200
assert response.json() == ['{} is not a valid email address'.format(invalid_email)]
def test_edit_user_existing_username(data):
username1 = actions.random_str()
username2 = actions.random_str()
users.create_new_user(username1, '123456')
users.create_new_user(username2, '123456')
response = users.edit_user(username1, new_username=username2)
assert response.status_code == 200
assert response.json() == ['Username {} already exists'.format(username2)]
def test_edit_user_blank_username(data):
username = actions.random_str()
users.create_new_user(username, '123456', '[email protected]')
response = users.edit_user(username, new_username='')
assert response.status_code == 200
assert response.json() == ['Username cannot be blank']
def test_edit_user_doesnt_exist(data):
username = actions.random_str()
response = users.edit_user(username, new_username=actions.random_str())
assert response.status_code == 200
assert response.json() == ['Username {} does not exist'.format(username)]
def test(data):
username = actions.random_str()
users.create_new_user(username, '123456', '[email protected]')
new_username = actions.random_str()
users.edit_user(username, new_username=new_username)
response = users.get_user(new_username)
assert response.json()['username'] == new_username
assert response.json()['email'] == '[email protected]'
users.edit_user(new_username, new_email='[email protected]')
response = users.get_user(new_username)
assert response.json()['username'] == new_username
assert response.json()['email'] == '[email protected]'
| [
"[email protected]"
]
| |
6c942b2bf53f2c5dd3278b4989aed9c2f3790bae | c6ec292a52ea54499a35a7ec7bc042a9fd56b1aa | /Python/1396.py | 42df2b39f50cb44cdc466e7f32d62dd8cd8ccc59 | []
| no_license | arnabs542/Leetcode-38 | ad585353d569d863613e90edb82ea80097e9ca6c | b75b06fa1551f5e4d8a559ef64e1ac29db79c083 | refs/heads/master | 2023-02-01T01:18:45.851097 | 2020-12-19T03:46:26 | 2020-12-19T03:46:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | class UndergroundSystem:
def __init__(self):
self.history = collections.defaultdict(dict)
self.idToStation = dict()
self.stationTostation = collections.defaultdict(dict)
def checkIn(self, id: int, stationName: str, t: int) -> None:
self.history[stationName][id] = t
self.idToStation[id] = stationName
def checkOut(self, id: int, stationName: str, t: int) -> None:
startStation = self.idToStation[id]
startTime = self.history[startStation][id]
self.stationTostation[startStation][stationName] = self.stationTostation[startStation].get(stationName, [])
self.stationTostation[startStation][stationName].append(t - startTime)
def getAverageTime(self, startStation: str, endStation: str) -> float:
return mean(self.stationTostation[startStation][endStation])
# Your UndergroundSystem object will be instantiated and called as such:
# obj = UndergroundSystem()
# obj.checkIn(id,stationName,t)
# obj.checkOut(id,stationName,t)
# param_3 = obj.getAverageTime(startStation,endStation) | [
"[email protected]"
]
| |
ff685150be501c205b1a25081fb6692c47358299 | 2a86db4e93cfb66a10242438c41660b6555cc8f6 | /Day10/college/college/settings.py | f59a76f3a4f1bb5da72ccae53585e8be043aff5d | []
| no_license | Srinivasareddymediboina/Djano-Batch4 | 97c8b5c192c82c155bd8c95d1b1bff58b1b372aa | 9a8a3df4001ff4cc4c7e31aac194e99c87670482 | refs/heads/master | 2023-01-31T11:54:01.186971 | 2020-12-18T06:00:18 | 2020-12-18T06:00:18 | 284,895,371 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,106 | py | """
Django settings for college project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*d$&w9#jkq_xg4ac40o(qt!5-_h%+rzw_cl=@kg_4^=j6u5b(r'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'student',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'college.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'college.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
41f66d5f189dd3871733de89a22fb69901999860 | de8b832a3c804837300b9974dc0151d9294fa573 | /code/experiment/GenderSoundNet/ex16_1_1_1_1_1_1_1_1_1_1_1_1_1/expUtil.py | 71ae408bd5de7553b28a9f4472613859dad14ddd | []
| no_license | YuanGongND/Deep_Speech_Visualization | fcff2ac93e5adffd707b98eb7591f50fe77c1274 | 73a79e3596d9a5ee338eafb9a87b227696de25d1 | refs/heads/master | 2021-07-19T23:00:36.294817 | 2017-10-28T01:04:59 | 2017-10-28T01:04:59 | 105,332,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,429 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Oct 7 00:40:02 2017
@author: Kyle
"""
import numpy as np
import tensorflow as tf
from keras.utils import np_utils
from imblearn.under_sampling import NearMiss, AllKNN, RandomUnderSampler
from imblearn.over_sampling import ADASYN, SMOTE, RandomOverSampler
import sys
sys.path.append("../model/")
import soundNet
import waveCNN
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
import os
import math
import seaborn as sns
#%% slice the matrix using discontinuous row index
def discontSliceRow( matrix, index ):
outputMatrix = np.zeros( [ len( index ), matrix.shape[ 1 ] ] )
outputIndex = 0
for processLine in range( 0, len( matrix ) ):
if processLine in index:
outputMatrix[ outputIndex, : ] = matrix[ processLine, : ]
outputIndex += 1
return outputMatrix
#%% slice the matrix using discontinuous column index
def discontSliceCol( matrix, index ):
outputMatrix = np.zeros( [ matrix.shape[ 0 ], len( index ) ] )
outputIndex = 0
for processCol in range( 0, matrix.shape[1] ):
if processCol in index:
outputMatrix[ :, outputIndex ] = matrix[ :, processCol ]
outputIndex += 1
return outputMatrix
#%%
def iter_loadtxt(filename, delimiter=',', skiprows=0, dtype= float):
def iter_func():
with open(filename, 'r') as infile:
for _ in range(skiprows):
next(infile)
for line in infile:
line = line.rstrip().split(delimiter)
for item in line:
yield dtype(item)
iter_loadtxt.rowlength = len(line)
data = np.fromiter(iter_func(), dtype=dtype)
data = data.reshape((-1, iter_loadtxt.rowlength))
return data
#%%
def processData( dataSet, task = 'nonEmotion', balance = 'imbalance', dataType = 'waveform' ):
if dataType == 'waveform' or dataType == 'toyWaveform':
dataSize = 96000
else:
dataSize = 256 *256
# for speaker and gender task, use all database
if task != 'emotion':
dataSet = dataSet.astype( 'float32' )
np.random.seed(seed= 7 )
np.random.shuffle( dataSet )
#np.savetxt( 'dataSetAfterShuffle.csv', dataSet, delimiter = ',' )
print( dataSet[ 17, -17 ] )
feature = dataSet[ :, 0: dataSize ]
# normalize the data
feature = ( feature - np.mean( feature ) ) /math.sqrt( np.var( feature ) )
emotionLabel = dataSet[ :, dataSize + 0 ]
speakerLabel = dataSet[ :, dataSize + 1 ]
genderLabel = dataSet[ :, dataSize + 2 ]
emotionLabel = np_utils.to_categorical( emotionLabel )
speakerLabel = np_utils.to_categorical( speakerLabel )
genderLabel = np_utils.to_categorical( genderLabel )
if task == 'speaker':
return feature, speakerLabel
elif task == 'gender':
return feature, genderLabel
# for emotion task, only select 4 classes ( 0, 1, 2, 3 ), label 4 means other emotion, should abandon
if task == 'emotion':
dataSet = dataSet.astype( 'float32' )
np.random.seed(seed= 7 )
np.random.shuffle( dataSet )
#np.savetxt( 'dataSetAfterShuffle.csv', dataSet, delimiter = ',' )
print( dataSet[ 17, -17 ] )
# select only label with 0,1,2,3
emotionLabel = dataSet[ :, dataSize ]
emotionIndices = [ i for i, x in enumerate( emotionLabel ) if x != 4]
dataSet = discontSliceRow( dataSet, emotionIndices )
feature = dataSet[ :, 0: dataSize ]
# normalize the data
feature = ( feature - np.mean( feature ) ) /math.sqrt( np.var( feature ) )
emotionLabel = dataSet[ :, dataSize ]
emotionLabel = np_utils.to_categorical( emotionLabel )
# random oversampling
if balance == 'balance':
ros = RandomOverSampler( random_state= 7 )
feature, emotionLabel = ros.fit_sample( feature, np.argmax( emotionLabel, 1 ) )
numSamples = len( emotionLabel )
emotionLabel = np.array( emotionLabel )
emotionLabel.resize( [ numSamples, 1 ] )
dataSet = np.concatenate( ( feature, emotionLabel ), axis = 1 )
np.random.shuffle( dataSet )
feature = dataSet[ :, 0: dataSize ]
emotionLabel = dataSet[ :, dataSize ]
emotionLabel = np_utils.to_categorical( emotionLabel )
assert emotionLabel.shape[ 1 ] == 4
return feature, emotionLabel
#%%
def train( testFeature, testLabel, trainFeature, trainLabel, newFolderName, iteration_num = 100, \
lr_decay = 0.1, batch_size = 32, learningRate = 0.0001, iterationNum = 100, \
modelT = soundNet.soundNet, init = 'lecun_uniform', saveSign = False, denseUnitNum = 64,\
dataType = 'waveform' ):
if dataType == 'waveform' or dataType == 'toyWaveform':
dataSize = 96000
else:
dataSize = 256 *256
os.mkdir( newFolderName + '/weight' )
os.mkdir( newFolderName + '/models' )
result = np.zeros( [ 2, iteration_num ] )
class_num = testLabel.shape[ 1 ]
train_datasize = trainFeature.shape[ 0 ]
tf.set_random_seed( 7 )
with tf.Session() as sess:
# changable learning rate
global_step = tf.Variable(0)
learning_rate = tf.train.exponential_decay( learningRate, global_step, int( iteration_num *(train_datasize/batch_size) ), lr_decay, staircase=False)
# fix random index for reproducing result
tf.set_random_seed( 17 )
input_x = tf.placeholder( tf.float32, shape = ( batch_size, dataSize ), name = 'inputx' )
input_y = tf.placeholder( tf.float32, shape = ( batch_size, class_num ), name = 'inputy' )
prediction = modelT( input_x, numClass = class_num, l2_reg = 0.5, init = init, denseUnitNum = denseUnitNum )
loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits = prediction, labels= input_y ) )
train_step = tf.train.AdamOptimizer( learning_rate ).minimize( loss, global_step = global_step )
#train_step = tf.train.GradientDescentOptimizer( learning_rate ).minimize( loss, global_step = global_step )
correct_prediction = tf.equal( tf.argmax( prediction, 1 ), tf.argmax( input_y, 1 ) )
accuracy = tf.reduce_mean( tf.cast( correct_prediction, tf.float32 ), name="acc_restore" )
# initialize the data
init_op = tf.global_variables_initializer( )
sess.run( init_op )
saver = tf.train.Saver( max_to_keep= 100 )
print( tf.trainable_variables() )
# number of iterations
for iteration in range( 0, iteration_num ):
# each batch
for i in range( 0, 1 *int( train_datasize / batch_size ) ):
start = ( i * batch_size ) % train_datasize
end = min( start + batch_size, train_datasize )
inputTrainFeature = trainFeature[ start: end ]
inputTrainLabel = trainLabel[ start: end ]
_, lossShow = sess.run( [ train_step, loss ], feed_dict = { input_x: inputTrainFeature, input_y: inputTrainLabel } )
#print( 'loss = ' + str( lossShow ) )
# get accuracy on a small subset of test data (just several epoch), a very fast approximation of the performance
testBatchNum = 3
testSubsetResult = [ None ] *( batch_size *testBatchNum )
testSubsetLabel = [ None ] *( batch_size *testBatchNum )
for testBatch in range( 0, testBatchNum ): # 3*32=96 test samples
start = testBatch * batch_size
end = start + batch_size
inputTestFeature = testFeature[ start: end, : ]
inputTestLabel = testLabel[ start: end, : ]
tempTestResult, tempAccuracyTest = sess.run( [ prediction, accuracy ], feed_dict = { input_x: inputTestFeature, input_y: inputTestLabel } )
testSubsetLabel[ start :end ] = np.argmax( inputTestLabel, 1 )
testSubsetResult[ start :end ] = np.argmax( tempTestResult, 1 )
#np.savetxt( newFolderName + '/testResult.csv', testResult, delimiter = ',' )
#np.savetxt( newFolderName + '/testLabel.csv', inputTestLabel, delimiter = ',' )
accuracyTest = accuracy_score( testSubsetLabel, testSubsetResult )
print( confusion_matrix( testSubsetLabel, testSubsetResult ) )
result[ 0, iteration ] = accuracyTest
print( 'Epoch:' + str( iteration ) + ' result on test: ' + str( accuracyTest ) )
# get accuracy on a small subset of training data (just one epoch), a very fast approximation of the training loss/ overfitting
inputTestTrainFeature = trainFeature[ 0: batch_size, : ]
inputTestTrainLabel = trainLabel[ 0: batch_size, : ]
testTrainResult, accuracyTrain = sess.run( [ prediction, accuracy ], feed_dict = { input_x: inputTestTrainFeature, input_y: inputTestTrainLabel } )
print( 'Epoch:' + str( iteration ) + ' result on train: ' + str( accuracyTrain ) )
np.savetxt( newFolderName + '/testTrainResult.csv', testTrainResult, delimiter = ',' )
np.savetxt( newFolderName + '/testTrainLabel.csv', inputTestTrainLabel, delimiter = ',' )
result[ 1, iteration ] = accuracyTrain
print( '-----------------------------' )
#print( sess.run(global_step) )
#print( sess.run(learning_rate) )
# record the accuracy of both test/ training error approximation on the small subset
np.savetxt( newFolderName + '/accuracy.csv', result, delimiter = ',' )
# print variable
# if iteration == 0:
# lastState = printVariable( sess, newFolderName = newFolderName )
# else:
# lastState = printVariable( sess, lastState, iteration + 1, newFolderName = newFolderName )
#np.savetxt( newFolderName + '/weightConv1' + str( iteration + 1 ) + '.csv', lastState, delimiter = ',' )
# save model every 10 epoches
if ( iteration + 1 )%10 == 0 and saveSign == True:
save_path = saver.save( sess, newFolderName + '/models/' + str( iteration + 1 ) + '_.ckpt' )
print("Model saved in file: %s" % save_path)
resultOnTest = result[ 0, : ]
resultOnTrain = result[ 1, : ]
plt.plot( list( range( iteration_num ) ), resultOnTrain )
plt.plot( list( range( iteration_num ) ), resultOnTest )
plt.savefig( newFolderName + '/accuracy.png' )
return resultOnTrain, resultOnTest
#%%
def printVariable( sess, lastState = -1, iteration = 1, newFolderName = -1 ):
layerList = [ 'conv1', 'conv2', 'conv3', 'conv4', 'conv5', 'conv6', 'conv7', 'conv8', 'dense1', 'dense2' ]
currentState = [ 0 ] *len( layerList )
for layerIndex in range( len( layerList ) ):
allFilter = tf.get_collection( tf.GraphKeys.GLOBAL_VARIABLES, scope= layerList[ layerIndex ] )
kernal = allFilter[ 0 ].eval( )
if layerIndex <= 7:
filterNum = kernal.shape[ 3 ]
filterSize = kernal.shape[ 1 ]
else:
filterNum = kernal.shape[ 1 ]
filterSize = kernal.shape[ 0 ]
currentState[ layerIndex ] = np.zeros( [ filterNum, filterSize ] )
for filterIndex in range( 0, filterNum ):
if layerIndex <= 7:
tempFilter = kernal[ 0, :, 0, filterIndex ]
else:
tempFilter = kernal[ :, filterIndex ]
#tempFilter = kernal[ : , filterIndex ]
#filterFFT = np.fft.fft( tempFilter )
currentState[ layerIndex ][ filterIndex, : ] = tempFilter
np.savetxt( newFolderName + '/weight/' + str( iteration ) + '_' + layerList[ layerIndex ] + '.csv', currentState[ layerIndex ], delimiter = ',' )
# plot filter
if lastState != -1:
diff = 100 *np.mean( ( abs(lastState[ layerIndex ] - currentState[ layerIndex ] ) / currentState[ layerIndex ] ) )
print( layerList[ layerIndex ] + ' : ' + str( diff ) )
return currentState
#%% load data, devide it into training/test set, and seperate out the laebls
# normalize the feature to [0, 1]
# for emotion tests, filter out value = 4 (other emotions)
# folder list, i.e., IEMCOCAP has 5 sessions, speakers are independent between sessions, always use leave-one-session-out stragegy
def loadData( testTask, testFolder = 4, precision = 'original', sampleRate = 16000, dataType = 'toyWaveform' ):
folderList = [ 0, 1, 2, 3, 4 ]
trainFolderList = folderList.copy( )
del trainFolderList[ testFolder ]
if dataType == 'toyWaveform':
dataFileFolder = '../../../processedData/toyWaveform/' + str( sampleRate ) + '_' + precision + '/session_'
elif dataType == 'waveform':
dataFileFolder = '../../../processedData/waveform/' + str( sampleRate ) + '_' + precision + '/session_'
elif dataType == 'toySpectrogram':
dataFileFolder = '../../../processedData/toySpectrogram/' + str( sampleRate ) + '_' + precision + '/session_'
elif dataType == 'spectrogram':
dataFileFolder = '../../../processedData/spectrogram/' + str( sampleRate ) + '_' + precision + '/session_'
fold = [ 0, 0, 0, 0, 0 ]
for i in folderList:
fold[ i ] = eval( 'iter_loadtxt( dataFileFolder + str(' + str( i + 1 ) + ') + ".csv" )' )
# seperate training and testing data
trainData = eval( 'np.concatenate( ( fold[ ' + str( trainFolderList[ 0 ] ) + \
' ], fold[ ' + str( trainFolderList[ 1 ] ) + \
' ], fold[ ' + str( trainFolderList[ 2 ] ) + \
' ], fold[ ' + str( trainFolderList[ 3 ] ) + ' ] ), axis=0 )' )
testData = eval( 'fold[ ' + str( testFolder ) + ' ]' )
if testTask == 'emotion':
trainFeature, trainLabel = processData( trainData, task = testTask, balance = 'balance', dataType = dataType ) # emotion is not
else:
trainFeature, trainLabel = processData( trainData, task = testTask, dataType = dataType )
testFeature, testLabel = processData( testData, task = testTask, dataType = dataType ) # note: don't balance the test set
plotInputDistribution( trainFeature )
#plotInputDistribution( testFeature[ 0, : ] )
return trainFeature, trainLabel , testFeature, testLabel
#%% calculate the number of elements of an high-dimensional tensor
def countElements( inputM ):
inputShape = inputM.shape
dim = 1
for i in inputShape:
dim *= i
return dim
#%%
def plotInputDistribution( inputM ):
output = np.reshape( inputM, [ countElements( inputM ) ] )
fig1 = plt.figure( )
ax1 = fig1.gca()
ax1.hist( output )
fig1.savefig( 'hist.png', bins = 500 ) | [
"[email protected]"
]
| |
af8b45eb284b3530ad3e6119002d939b6b2c6eed | d87243c4f3bdd058115846b267964a8b513457a5 | /shortstories/migrations/0001_initial.py | cda48a52ee47a215154c75720299e5dfa2e06f0f | [
"MIT"
]
| permissive | evenset/ketabdan-project | 33678b1afafe3cd0f969f624e4aabac10fae718b | ea56ad18f64b35714c6c3a0d85e59a3f8514057a | refs/heads/develop | 2021-07-26T16:29:24.011024 | 2018-09-24T23:20:10 | 2018-09-24T23:20:10 | 125,778,476 | 4 | 0 | MIT | 2018-09-28T04:00:46 | 2018-03-18T23:49:39 | Python | UTF-8 | Python | false | false | 946 | py | # Generated by Django 2.0.3 on 2018-07-13 01:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ShortStory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=500)),
('status', models.CharField(choices=[('dr', 'Draft'), ('p', 'Published'), ('b', 'Banned'), ('de', 'Deleted')], max_length=1)),
('publication_date', models.DateField()),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
]
| |
82a1d4a6d1260f47e4cd6b966110c9fd65ca757c | 1fa6c2650c791e35feaf57b87e832613e98797dd | /LeetCode/DS - Heap/M K Closest Points to Origin.py | 2659259e57c56c802f5c7355b47c50a2a063b30e | []
| no_license | hz336/Algorithm | 415a37313a068478225ca9dd1f6d85656630f09a | 0d2d956d498742820ab39e1afe965425bfc8188f | refs/heads/master | 2021-06-17T05:24:17.030402 | 2021-04-18T20:42:37 | 2021-04-18T20:42:37 | 194,006,383 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,591 | py | """
We have a list of points on the plane. Find the K closest points to the origin (0, 0).
(Here, the distance between two points on a plane is the Euclidean distance.)
You may return the answer in any order. The answer is guaranteed to be unique (except for the order that it is in.)
Example 1:
Input: points = [[1,3],[-2,2]], K = 1
Output: [[-2,2]]
Explanation:
The distance between (1, 3) and the origin is sqrt(10).
The distance between (-2, 2) and the origin is sqrt(8).
Since sqrt(8) < sqrt(10), (-2, 2) is closer to the origin.
We only want the closest K = 1 points from the origin, so the answer is just [[-2,2]].
Example 2:
Input: points = [[3,3],[5,-1],[-2,4]], K = 2
Output: [[3,3],[-2,4]]
(The answer [[-2,4],[3,3]] would also be accepted.)
"""
"""
Quick Select
Time Complexity: O(n)
Space Complexity: O(1)
"""
class Solution:
def kClosest(self, points: List[List[int]], K: int) -> List[List[int]]:
vectors = [(p[0] ** 2 + p[1] ** 2, p[0], p[1]) for p in points]
self.quick_select_with_target(vectors, 0, len(vectors) - 1, K)
return [[x, y] for _, x, y in vectors[:K]]
def quick_select_with_target(self, vectors, start, end, target):
if start >= end:
return start
left, right = start, end
pivot = vectors[(start + end) // 2]
while left <= right:
while left <= right and vectors[left] < pivot:
left += 1
while left <= right and vectors[right] > pivot:
right -= 1
if left <= right:
vectors[left], vectors[right] = vectors[right], vectors[left]
left += 1
right -= 1
if target - 1 <= right:
return self.quick_select_with_target(vectors, start, right, target)
if target - 1 >= left:
return self.quick_select_with_target(vectors, left, end, target)
return target
"""
Priority Queue
Time Complexity: O(nlogk)
Space Complexity: O(k)
"""
import heapq
class Solution:
def kClosest(self, points: List[List[int]], K: int) -> List[List[int]]:
heap = []
for point in points:
distance = self.dist(point, [0, 0])
heapq.heappush(heap, [-distance, point[0], point[1]])
if len(heap) > K:
heapq.heappop(heap)
heap.sort(key=lambda x: (-x[0], x[1], x[2]))
return [[x, y] for _, x, y in heap]
def dist(self, a, b):
if a is None or b is None:
return float('-inf')
return (a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2
| [
"[email protected]"
]
| |
63f331aa35178b96b3b7a5bff53d76affbd12d84 | 687a57837c2ce1ec366ce05d1a3a3a113552137e | /src/neurounits/unit_term_parsing/__init__.py | a794123e054901c0ba4e9d7778810a3f939fb2b4 | []
| no_license | mikehulluk/NeuroUnits | ba9974897b2a1807010fdcd141eac7503ba09766 | ee59a8f7dcce382cb28a0f87b56952e0b7c59f17 | refs/heads/master | 2020-04-05T08:07:13.422241 | 2013-07-29T09:06:06 | 2013-07-29T09:06:06 | 2,848,923 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -------------------------------------------------------------------------------
from .unitterm_parsing import parse_term
| [
"[email protected]"
]
| |
43c114108be58675f3315ffc4f23538067730145 | 15581a76b36eab6062e71d4e5641cdfaf768b697 | /LeetCode_30days_challenge/2021/August/Set Matrix Zeroes.py | 51c881a0fe2ad8b202fa896fd86dffc76c4635b2 | []
| no_license | MarianDanaila/Competitive-Programming | dd61298cc02ca3556ebc3394e8d635b57f58b4d2 | 3c5a662e931a5aa1934fba74b249bce65a5d75e2 | refs/heads/master | 2023-05-25T20:03:18.468713 | 2023-05-16T21:45:08 | 2023-05-16T21:45:08 | 254,296,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,805 | py | from typing import List
# Approach 1 with O(M + N) extra memory
class Solution:
def setZeroes(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
rows = set()
cols = set()
n = len(matrix)
m = len(matrix[0])
for i in range(n):
for j in range(m):
if matrix[i][j] == 0:
rows.add(i)
cols.add(j)
for row in rows:
for col in range(m):
matrix[row][col] = 0
for col in cols:
for row in range(n):
matrix[row][col] = 0
# Approach 2 with O(1) extra memory
class Solution:
def setZeroes(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
n = len(matrix)
m = len(matrix[0])
first_row = first_col = False
for i in range(n):
for j in range(m):
if matrix[i][j] == 0:
if i == 0:
first_row = True
if j == 0:
first_col = True
matrix[0][j] = matrix[i][0] = 2 ** 31
for row in range(1, n):
if matrix[row][0] == 2 ** 31:
for col in range(m):
matrix[row][col] = 0
for col in range(1, m):
if matrix[0][col] == 2 ** 31:
for row in range(n):
matrix[row][col] = 0
if matrix[0][0] == 2 ** 31:
if first_row:
for col in range(m):
matrix[0][col] = 0
if first_col:
for row in range(n):
matrix[row][0] = 0
| [
"[email protected]"
]
| |
2ec4138b6920016a1f4c5a330d3efbf33d001b78 | aa7eca0eeccc7c71678a90fc04c02dce9f47ec46 | /Codes_13TeV/LimitTool_HiggsCombine/ExcitedQuarksShapeInterpolator/inputs/PhMID_JetTID_Pt200_170_DEta1p5_NoDPhi_CSVL_Summer16_35866pb/input_shapes_Bstar_1bTag_f1p0_13TeV_PhMID-JetTID-Pt200_170-DEta1p5-noDPhi-CSVL_mass700_80X_Summer16_BSFUP.py | 6ea3f07571854d8fbfed47972e78c70de2ef12ec | []
| no_license | rockybala/Analyses_codes | 86c055ebe45b8ec96ed7bcddc5dd9c559d643523 | cc727a3414bef37d2e2110b66a4cbab8ba2bacf2 | refs/heads/master | 2021-09-15T10:25:33.040778 | 2018-05-30T11:50:42 | 2018-05-30T11:50:42 | 133,632,693 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 164,354 | py | shapes = {
500 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.719812400163282e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.865716791838332e-05, 5.977569284706053e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.67530355174229e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.947211023705698e-05, 0.0, 0.0, 0.0, 0.0, 6.069732442932561e-05, 0.0, 5.752445992197769e-05, 0.0, 0.0, 3.8352599264915734e-05, 5.801688987934455e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.690822300899203e-05, 0.0, 0.0, 0.0, 0.0, 5.677552670598218e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.8129659228619466e-05, 0.00012040985732351282, 0.0, 0.0, 5.7589072941251475e-05, 0.0, 5.725893625506697e-05, 0.0, 0.0, 0.0, 0.0, 0.00011766651354584125, 0.0, 5.686997602201208e-05, 0.0, 0.0, 6.436145074262042e-05, 0.0, 5.933364725484217e-05, 6.567043563745048e-05, 0.0, 5.799596572317793e-05, 0.0, 6.346529625791008e-05, 6.383377110386828e-05, 0.0, 6.416828408415301e-05, 0.0, 0.0, 0.00021583469945338465, 0.00023772018155735427, 0.00011959896649586162, 0.000457456392787472, 0.00030735880477527033, 0.0003460050602122696, 0.0005814876790653045, 0.0004769161771272102, 0.00034630795902632, 0.0006883028522848998, 0.0006930790758449121, 0.0007716148645209419, 0.0008863692278614754, 0.0013432161305226996, 0.0010744028124541314, 0.001115329914879285, 0.0012124761677348344, 0.001869556668866572, 0.0016050374070382463, 0.002119818329207592, 0.0018229560658316727, 0.0020861958086724496, 0.0021863606103836183, 0.003003779592413221, 0.003346623577900035, 0.0027386636958225672, 0.003291392558208349, 0.004441424661846416, 0.004435574650707961, 0.004576446099028985, 0.004298394241769288, 0.005032622972828908, 0.0063219886221029505, 0.00559068617303167, 0.006434433852096067, 0.007171822632187213, 0.006842450338705542, 0.007373149665973223, 0.0071419092032475855, 0.009693864875725891, 0.009056570694294722, 0.008369506378826578, 0.0103697171257093, 0.010050799798833027, 0.011491121336799675, 0.011010974770830557, 0.011010642901861292, 0.012441025146544014, 0.012840945489068101, 0.011766248735526907, 0.015012073131416831, 0.013857939346133344, 0.014399787282621306, 0.014243813243360461, 0.014349308188940096, 0.016014628135480782, 0.015860508186153727, 0.016854223076135256, 0.019651929543810794, 0.016788677860631727, 0.01655093130734378, 0.018359175184138488, 0.018177878448449137, 0.01967888897423278, 0.019944340386704448, 0.020365522224732053, 0.019793695214892958, 0.02068874801314998, 0.01881468321432343, 0.01837182121532339, 0.017412059073735983, 0.01830723294946311, 0.019104774621343978, 0.016428316299244128, 0.016396812816781424, 0.015797841113403436, 0.014747944818528123, 0.014750109625343026, 0.014196982520194126, 0.012794454658076883, 0.01043574519232855, 0.010453801781786013, 0.012105837487585515, 0.009014491897138864, 0.009084513332124556, 0.00913236519058094, 0.007276150566006793, 0.007282981231714869, 0.006364203084375915, 0.006787201813837455, 0.006907532396416799, 0.00503936757141968, 0.005108735479819742, 0.004409328920915483, 0.004521833595570128, 0.004986833807658423, 0.004083513460463757, 0.0032512411539124576, 0.003382291468890263, 0.0036790767824282536, 0.002342844852598312, 0.002456352427984255, 0.0032547119198250984, 0.0023598226855600724, 0.002284902171674784, 0.0018112549494812393, 0.0025311436588479996, 0.0024814783735238417, 0.00178864409663016, 0.0016492910400163212, 0.0021164518649710064, 0.001666824297637633, 0.002389188713045999, 0.0014850773506927208, 0.0016819812275625315, 0.0009993337780878173, 0.0009930066597196466, 0.0015634255969319978, 0.0015675714062095957, 0.001309983008989682, 0.001252994542525587, 0.0013956960111628483, 0.001148923260120426, 0.001661564356820357, 0.0012915090304824127, 0.0011303310304996573, 0.001430589936306895, 0.0009777501691039683, 0.00024384302728111838, 0.0011431935058969337, 0.0010192796501774567, 0.0014231653710203031, 0.0010403804080455826, 0.0010823384924225103, 0.0007261336810477586, 0.0011576003572476151, 0.0005086687436624507, 0.001101170141799678, 0.0009843127867984066, 0.0008946953097327103, 0.0006727279862734313, 0.0012010799332067153, 0.0009411295224190079, 0.0005868390661947163, 0.0007406880955651314, 0.00081976285221144, 0.0009345297973975117, 0.000536954874992754, 0.000828897272072101, 0.0006995129575545692, 0.00031361266580365944, 0.0005284293982215783, 0.0005962570790303502, 0.0007959788790231235, 0.00039597866913170643, 0.0006686961429880265, 0.0005251011809719726, 0.0005090012964276698, 0.00039665731382201695, 0.00039839834949155, 0.0006285056711500112, 0.0009215730493314932, 0.00051322341733364, 0.0004713421460354504, 0.0005068901448018909, 0.0004342172682146316, 0.0007322269867863463, 0.0005044862829217637, 0.00030060408600439165, 0.00029477244618387896, 0.0006133014225451543, 0.0007052866114782542, 0.0005629157353911463, 0.0002471569987817, 0.0006004637005613822, 0.0006327723755521274, 0.0006821241632249611, 0.0003649229590523288, 0.00023894898567837698, 0.0006191149189797099, 0.0005278272930916246, 0.00011531676271880723, 0.00019335381341980228, 0.00024421900108936287, 0.0003627975160911289, 0.00034578394339421686, 3.315336813168086e-05, 0.00030142630505166685, 0.0005567931631857634, 0.00033121758524086225, 0.00024780165881313856, 0.0003871989114387499, 0.0004268643647439306, 0.0003589286441745599, 0.00029108459784930917, 0.00024966753283051985, 0.0004218595709899761, 0.00018696799116905293, 0.00036313884423774676, 0.00017340983316563316, 0.0001719879706538356, 0.00048055574946748706, 0.0003632467016527582, 0.00024361069620943386, 0.00042320942978790613, 0.00039607531229308605, 0.0002536019579857982, 0.0003468422999773158, 0.0003319508880210285, 0.0003975076824693638, 0.00012120815490902671, 0.00022359573793532698, 0.00017218298925967616, 0.00017818932754962144, 0.00024691078665216635, 0.0003139456288464503, 0.00013047833105958793, 0.00023384647748271625, 0.00026670104957606563, 0.00017807803975824503, 0.0004059275811453719, 6.50366255246124e-07, 6.424237337567764e-05, 0.00040975000052352313, 0.00011607226607425266, 0.00018387268621769193, 0.0003914720891092226, 0.0003277413489605963, 0.00018481650697863697, 0.0002687739314216662, 0.00013544378375303742, 0.0003072253733916108, 6.500285134668229e-05, 0.0001388793455706897, 0.00035390210570929496, 0.00013665687196373673, 0.00031400311329291247, 5.6687305631425626e-05, 0.00011385282396269204, 0.0003541049651753984, 0.00017176126950216541, 0.00035292049382533875, 0.00028815574022954596, 0.000345824538080636, 0.00017983768607430324, 0.00023988977494392968, 0.00011823850886065762, 0.00042408705910058556, 0.0003569169392735686, 0.00011583220810829745, 0.0002480868473120022, 0.0003872743569255816, 0.00031100580769823815, 0.0001677501794239388, 0.00013345213686667204, 0.00025703723488847, 0.00023533298150528565, 6.703822128595248e-05, 0.00023881532636273309, 9.804807714837126e-05, 0.00011914965557164095, 0.00018777593027750634, 0.00018498560971785284, 0.00018543818006945025, 0.00017773881998248275, 0.0001121267634245331, 0.00023028613430021194, 0.00020930310330202135, 0.00010419135697453558, 0.00025700092532335734, 0.00022700512176457253, 7.154407192608631e-05, 0.00036606152490073317, 0.00011365012404897767, 0.0001812504540999238, 0.00028867790961257557, 7.602425742477827e-05, 0.0002440299770947117, 0.00025127807743899203, 0.00012232225225890722, 0.00014787139911390324, 0.0002555689654246515, 0.0003182993348870066, 6.434834465351824e-05, 0.00012652596765713834, 0.00023124695878719217, 0.00027771141804781776, 6.302966695105218e-05, 0.00012350282597203388, 0.0002488102350508477, 9.533939042892558e-05, 0.00011795335455159166, 0.00018668076267892907, 0.00023506789660744486, 6.734039071765284e-05, 0.00011605125074529228, 0.0002481588054394728, 0.0, 0.00019889855525156847, 6.615331524467158e-05, 0.0, 0.00011516890324053611, 0.00017180854259572702, 0.0, 0.00014596426940734816, 0.0001945012230292008, 0.00011514082202005977, 0.0, 6.505295649514992e-05, 0.00011428313675601414, 0.00012162467781729346, 0.0001241335137728296, 5.4226153047023e-05, 0.0001862533674150071, 0.0001166273488358276, 0.00024142717617861245, 0.00018233651581572486, 0.0, 0.0, 6.14539218582961e-05, 0.00011706905822836899, 0.00013159951709418283, 5.665580543118349e-05, 0.00012249687095214524, 7.068501337183476e-05, 2.6452162092436397e-05, 5.946959728692913e-05, 6.951055963248885e-05, 0.00012265902176583738, 0.00011828183873088938, 0.00020196569920625864, 0.0002825855156019254, 0.00019777013237646932, 0.0001761054935721712, 0.0, 4.925427267161381e-05, 5.8961514100534564e-05, 0.000122941509270686, 0.0001146208635772787, 0.0001448300570598456, 0.0001221524771203374, 8.05362622284905e-05, 0.00021062823147964554, 3.2964780056949784e-05, 0.00018544443680242162, 0.0003126653576833533, 0.0003184850766610826, 0.0, 0.00012800534880519566, 0.00012136633970619051, 0.00020099142673219963, 5.810523061819291e-05, 0.0, 6.48220499983678e-05, 6.148314843699399e-05, 0.00029813838617690055, 0.00012550408019154112, 4.9267327476019515e-05, 0.0001729203720223629, 0.0, 0.00011533034746507663, 0.0, 0.00012225357635201175, 0.00024094790359504426, 0.00025041302997185074, 5.9835553484459876e-05, 5.683631616621789e-05, 0.00012940462325734058, 5.811916865903811e-05, 0.0, 0.00011686520725813348, 5.666039826066887e-05, 0.0, 5.887605670128869e-05, 7.561079450340189e-05, 0.0001065535073015202, 5.7230131350540205e-05, 0.0, 0.0002945998332921035, 0.00012175000621890804, 5.9503040607338146e-05, 6.503120038724028e-05, 7.043001446430305e-05, 0.0, 0.00011569966565937707, 2.6797177038901694e-05, 5.891588211726051e-05, 0.00010276825223342313, 5.7144400432913065e-05, 0.00011620102485223225, 6.974108434319712e-05, 0.0001253508415184385, 0.00015965696426455171, 0.00013561102884658056, 0.00011668388736455451, 0.00012159435146677092, 5.8883196670699574e-05, 0.0, 0.0001246932919332829, 0.0, 4.449460267196749e-05, 6.447798666793732e-05, 0.0, 5.81092992041142e-05, 0.0, 0.00010703717897238704, 6.215796956826642e-05, 0.0, 0.0, 2.188760187142439e-05, 5.657753358774538e-05, 0.0, 0.00013990228152341508, 6.52433822715051e-05, 0.00018827570534307004, 5.810635888151562e-05, 0.00011825730185277024, 5.528832749086041e-05, 0.0, 0.0001902230422516708, 0.00011444877492906737, 0.00013858546146662177, 5.744445579545825e-05, 0.0, 0.0, 0.00011471011034576499, 5.8031614285535884e-05, 6.151828985068668e-05, 8.344025816705685e-05, 5.6647440327356535e-05, 0.0, 0.00013930881501566955, 5.813957996823986e-05, 0.0, 5.7211708747902217e-05, 5.7403325468875815e-05, 6.438565711936221e-05, 0.0, 5.669965384667973e-05, 0.00017392075410523332, 7.646771619529983e-05, 6.0208752220794487e-05, 0.0, 5.8141768115289965e-05, 0.0, 5.712674710072238e-05, 0.00012165265646837683, 0.00011940060868644981, 0.00011761921765908119, 0.0, 0.0, 6.513439089487138e-05, 7.253166132633305e-05, 5.6663697576142855e-05, 5.418130956595743e-06, 5.681025214380337e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00014195902578452104, 0.0, 0.00011833240544162539, 0.0, 6.923905844928761e-05, 0.00012085222771880894, 0.0, 5.984506964480799e-05, 7.316774541685911e-05, 5.6989606124017005e-05, 0.0, 0.00012201733624679501, 7.644119630891653e-05, 5.7168726473946645e-05, 0.0, 1.6999813796360844e-06, 0.0, 0.0, 5.727979203163828e-05, 1.660947350636924e-05, 0.0, 7.801602397543544e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 6.771848999165175e-05, 5.988005150611161e-05, 3.3433997990856754e-05, 9.079643531088355e-06, 0.0, 0.00019203987670261144, 6.18904913845896e-05, 0.0, 0.0, 0.0, 8.410568280227298e-05, 0.0, 0.00011852960079791683, 0.0, 0.00012212874940076282, 5.424981238700225e-05, 7.482142615337632e-05, 5.7417331889316325e-05, 0.0, 0.0, 0.0, 0.0, 5.951958277110495e-05, 0.0, 0.0, 0.0, 0.0, 0.00012319375020120664, 0.0, 0.0, 0.0, 8.12215112480486e-05, 6.153573234579181e-05, 5.4547200945330116e-05, 0.0, 2.6956336245298742e-05, 4.931121577961301e-05, 0.0, 5.8839108926619735e-05, 5.727344412587313e-05, 6.02219950690873e-05, 0.0, 0.0, 5.739424807759765e-05, 5.889964196337302e-05, 0.00012002121049693646, 0.0, 5.931117885948133e-05, 5.7772655059775474e-05, 0.0, 0.0, 0.0001303595898923221, 0.0, 9.076994534057319e-06, 0.00011481320398230273, 0.0, 0.0, 7.140742670144697e-05, 0.0, 4.590228211603663e-05, 0.0, 0.00012732289765074592, 0.0, 1.1467879249111036e-05, 0.0, 5.4265494107528e-05, 0.0, 6.512910287283363e-05, 0.0, 6.24218065384927e-05, 5.74743775667085e-05, 5.757775611822672e-05, 0.0, 7.74435727966503e-05, 0.0, 0.0, 0.0, 0.0, 0.00012344755246582032, 5.097392547186171e-07, 0.0, 0.0, 6.022516332367027e-05, 2.2001082083575605e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 6.18849925254663e-05, 0.0, 6.440421078289122e-05, 0.00011877369315999567, 0.00011692250935900809, 6.477479969800461e-05, 0.0, 0.0, 7.067619810233862e-05, 0.0, 0.0, 0.0, 6.47756829344441e-05, 0.0, 0.0, 0.0, 0.00012984779141458205, 6.021626257968e-05, 5.916390630607e-05, 5.8956773115259335e-05, 0.0, 0.0, 0.0, 5.670183629543022e-05, 5.749975209487026e-05, 0.0, 5.83930061450793e-05, 5.756619426831874e-05, 0.0, 0.0, 6.475348805746453e-05, 6.441121399311147e-05, 0.0, 2.739825362577589e-05, 6.024498770801223e-05, 5.812270160479609e-05, 0.0, 0.0, 0.0, 5.715838975845475e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.195902483399483e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011405866933779093, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.847578534350864e-05, 0.0, 0.0, 0.0, 0.0, 5.701236513265794e-05, 0.00013053388948078198, 0.0, 0.0, 0.0, 4.958033507357742e-05, 5.981530172764719e-05, 0.00013817606143286707, 0.0, 0.0, 0.0, 0.0, 0.0, 6.481511516774286e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.192627100783857e-05, 0.0, 7.524204044076281e-05, 0.0, 4.4394707918373124e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.1292325085121636e-06, 6.0348457432324196e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.481269339040876e-05, 0.0, 6.0261564061576694e-05, 5.7591078742714075e-05, 6.551951189856428e-07, 0.0, 0.0, 0.0, 5.84873357968174e-05, 0.0, 6.549627850648084e-05, 5.7153301176903336e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0288836123508466e-05, 0.0, 0.0, 0.0, 0.0, 7.258102569585143e-05, 0.0, 0.0, 2.698522377516985e-05, 0.0, 0.0, 0.0, 6.552308900614423e-05, 0.0, 0.0, 5.808876823062064e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 8.181179810461193e-05, 0.0, 0.0, 0.0, 5.9894832895299035e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.894568422421896e-05, 0.0, 0.0, 0.0, 6.120101422672115e-05, 0.0, 0.0, 0.0001251450531263357, 5.702523189317652e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0001442537424339251, 0.0, 5.701697505704214e-05, 3.379423594472922e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.482243748274126e-05, 0.0, 5.927384360043892e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011511144158727242, 0.0, 0.0, 0.0, 0.0, 0.0, 6.194795303785327e-05, 0.0, 0.0, 0.0, 5.9216057144097495e-05, 0.0, 0.0, 0.0, 0.0, 5.093051155171071e-07, 0.0, 0.00010419111764595197, 7.272960315987335e-05, 0.0, 0.0, 4.488832098519644e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.3595687242280485e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.7812036008377745e-05, 5.847775125687397e-05, 0.0, 0.0, 5.13611733658839e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.327695036091241e-05, 0.0, 7.281435966826725e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.103663822703146e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.705572349438774e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.706720556810118e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.782012189552383e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.629364726915835e-05, 0.0, 6.026346729364631e-05, 0.0, 6.267194479645736e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.484162935582655e-05, 0.0, 0.0, 6.499001877596136e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.622530186364026e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.887506519715661e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.843484306081333e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.157049767171026e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.026723386968829e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
1000 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.569725232378688e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.179146554374504e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.890784851151843e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.797377707236985e-05, 0.0, 0.0, 3.6171102141786894e-05, 0.0, 0.0, 0.0, 4.11157026803288e-05, 4.158179585179559e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.847968352756976e-05, 3.789548002768657e-05, 4.1089112224669715e-05, 0.0, 0.0, 0.0, 0.0, 3.0755455630321896e-05, 0.0, 7.92642856714733e-05, 0.0, 0.0, 6.514547113225895e-05, 0.0, 0.0, 4.7487605162535624e-05, 3.7423427039982325e-05, 0.0, 0.00010872851798950443, 0.0001209798598169567, 0.00011056179827127914, 3.7824297904430876e-05, 1.7070779643211753e-05, 3.594517541382806e-05, 0.0, 0.0, 0.0, 8.362015114361684e-05, 7.21311589962697e-05, 6.758942360635078e-05, 8.565353776781808e-05, 3.7680061136970234e-05, 8.580901953366821e-05, 0.0, 0.00010938675023939395, 3.9911987636160644e-05, 3.629474117880568e-05, 3.682504635161159e-05, 6.447743858819212e-05, 0.00020220876550849112, 0.00014383771317187633, 0.00020386410035482053, 0.0001699109976319828, 7.440483382720406e-05, 0.00018317315851864565, 0.00022733281019234403, 7.838676772574377e-05, 0.00011417638897396618, 0.00032848853753683627, 8.464981388464723e-05, 0.00014812509505710583, 0.0001982845671240706, 0.00023103243769877248, 0.0002426768457940102, 0.00015442059605186447, 0.00028358552466436354, 0.0002802115117674317, 0.00023894817940382733, 0.0002774145907398534, 0.00035576165707239526, 0.00019766270587862961, 0.00019720327019377153, 0.00019175230576521082, 0.00023324439502903476, 0.00040973538520414456, 0.0005079115071925255, 0.00033450048159825216, 0.0003546062096288647, 0.00041398704110092913, 0.0006605489894113859, 0.00042212171966663837, 0.00043644465544381193, 0.00032385853368136185, 0.000906659663463969, 0.0003026308200578551, 0.0006164222072266879, 0.0007757231523249794, 0.000896074555936948, 0.0009943663595710379, 0.0006537444128266749, 0.0005153177652362787, 0.0007954750693322233, 0.0008010795475696435, 0.0006589618814619282, 0.0009738165182765509, 0.0011356444517867054, 0.0009614028298700672, 0.001040037073123727, 0.0008653693194097644, 0.00107311665305098, 0.0014199047421723978, 0.0007946199097473535, 0.0013993320488823544, 0.001103421031877307, 0.001776344587500019, 0.0016155125586994406, 0.001393706877478451, 0.0012358925757961566, 0.0016186074771206873, 0.0014241315188804717, 0.001687940642256638, 0.002044329202230176, 0.001660895779404806, 0.001772433210454119, 0.0017193302276000614, 0.0022150403487964346, 0.0021352694030541248, 0.002032976288714203, 0.0026497072084983742, 0.0023746252062450506, 0.0019669900458030324, 0.0021013269233499587, 0.0019312783793461036, 0.002694450152093216, 0.002353686249173691, 0.002651356343292914, 0.0031599160257511096, 0.002165495958725077, 0.002362769970061775, 0.0025763655716625924, 0.003167188647009788, 0.0036313619612508116, 0.0034880340345028508, 0.003209739273353498, 0.003422947549425942, 0.00360506215770111, 0.0030608249288162177, 0.0042089614187084574, 0.003648438825764384, 0.004312177037569463, 0.003683621210517778, 0.004107891149402648, 0.0045603096919938566, 0.004134766359881938, 0.004495319037866057, 0.00529680697271113, 0.004874303693169375, 0.005305818451806809, 0.004430437904822951, 0.004645665370585721, 0.005306837419129412, 0.005375234178991222, 0.004676667419783302, 0.0060485660165806275, 0.005892228459293214, 0.005864561748359533, 0.006567094855773171, 0.007211577575949132, 0.0074121603877484385, 0.007177233448728596, 0.007704455313401321, 0.008158367133858849, 0.007640580089397644, 0.00767318535878118, 0.008532802873032862, 0.008854880793963684, 0.00882606916646, 0.008983530157335417, 0.009629244568479255, 0.010804983741594124, 0.010092899553125213, 0.011848657335964661, 0.011746080730509357, 0.010863322253531536, 0.012601406703180766, 0.011094703236816912, 0.012917288258127504, 0.014027304670786883, 0.015773718620162094, 0.014339499577528817, 0.014911590545661255, 0.016351359779878914, 0.01615945862021638, 0.01735456280604506, 0.016750543914314433, 0.017808101412344745, 0.01814462769175646, 0.018378091260590805, 0.018517882288155534, 0.019291042173209347, 0.020345101736288482, 0.01766388067813905, 0.018058360460751984, 0.019040799974199753, 0.01732025069268005, 0.016677776420703415, 0.016646648843493376, 0.015928490447173316, 0.016537963488921746, 0.015245030015424342, 0.013990701039342102, 0.013144216788085096, 0.012822373073781545, 0.012277725562063989, 0.009182481104972771, 0.009070467994366182, 0.008541610895651974, 0.008266244559813386, 0.0071919362331139365, 0.007157719318845622, 0.005461793325809906, 0.004881893925573669, 0.005066270565482726, 0.005446520610549211, 0.0043814398564702445, 0.0036492084220018355, 0.0037872888144706196, 0.00332047204035651, 0.0031342154289051715, 0.0028024967038766165, 0.0027913506167167403, 0.002062547402816751, 0.0022577251384688956, 0.002671474945316415, 0.0021720981845749407, 0.0012922949866353817, 0.0010479756668243386, 0.0014188881969507063, 0.0016563017911367986, 0.0015073085703567913, 0.0013096677686951007, 0.001217598342302707, 0.0009192254176842021, 0.0008969372450963904, 0.0013706366822224241, 0.0010643283231653662, 0.0009700012881059649, 0.0012655137115479642, 0.000986513803107153, 0.0007792276690721114, 0.0007615312206144985, 0.0010824837597457123, 0.0006604434700586318, 0.000784815771801195, 0.0007062562450952187, 0.000619741222636618, 0.0007193015226415651, 0.0008113637880399595, 0.0005818601435784814, 0.00047504786282697125, 0.0003987374937619961, 0.0006530724904411985, 0.0007181514985979017, 0.0004452262192431839, 0.0008357501318695596, 0.0004425116759737811, 0.000683166462414429, 0.0001806816196597976, 0.0003515472278135334, 0.0005351316037798692, 0.0005157871789238049, 0.0005587095451030656, 0.0004873596650320682, 0.00015983393893474515, 0.0003472465507004744, 0.00042765672046595534, 0.0005150864019448609, 0.00022961542968398153, 0.0004003981335358654, 0.00040579515356477713, 0.00023716340695824256, 0.0005452180217909552, 0.0004099678279100991, 0.0003508768060775348, 0.00040079356783685494, 0.00017004448961754654, 0.0004319126676937027, 0.0002751630109979407, 0.00034473322609701455, 0.00037762901595401403, 0.00019358003101119933, 0.0002712819891949864, 0.0003047707988675511, 0.0003811019137533771, 0.00034603542126316266, 0.0002613167288838441, 0.000334602657398661, 0.00020025703922670626, 0.00018220580563992333, 0.0003336391351251846, 0.00023478961642903336, 0.0004093946587911348, 0.0001241551288032248, 0.0001536065726917149, 0.0003065682610157375, 0.00019184308189502518, 0.00025775534541965905, 0.00016673023942788805, 0.00012543971108339673, 0.00026145033934173504, 0.00018456826231649765, 0.0001985630560745278, 0.00021341781412847493, 0.00019612185479114526, 0.00019265278759702812, 4.582396408971071e-05, 0.0001933711379711726, 0.0003625183445097361, 7.386375754531744e-05, 0.00023264410891073498, 0.00011144475936226366, 7.532188203944065e-05, 0.00020490128608862773, 0.0003546465165472945, 0.0001202213868146692, 0.00030583457505701736, 0.00015418810069154232, 0.000199574520149742, 9.315899005841246e-05, 7.327489742479592e-05, 0.0002501246481338328, 0.00013939102549734671, 0.0001554608620661787, 0.00011083494280342073, 6.983621838260403e-05, 0.000235493394706933, 0.0001223351305856877, 0.0002730448574236322, 4.134488739728547e-05, 0.00013034607138743884, 0.0002777433909385923, 3.76171753675161e-05, 0.00014529338265610825, 0.0002622054293009015, 0.00011311566673782313, 0.0001737341391213069, 4.139993095685897e-05, 0.00012025565164441206, 0.0002697137051819607, 4.703618939604897e-05, 0.00012331039478314819, 0.0002138908346401846, 0.0001178918654450548, 0.0001958469463376877, 0.00015304636971078244, 4.155490263352257e-05, 0.00011884515960761677, 4.110292083258377e-05, 0.00015713484972224522, 0.00023133019814778659, 0.0001790176231699911, 7.282642043020328e-05, 0.00011436923559545409, 3.872192323680361e-06, 3.8458628362309014e-05, 6.328277680436036e-05, 7.693750891077223e-05, 3.9527199390128665e-05, 5.1873977258931604e-05, 0.00010143320269837233, 0.00012495302676328405, 0.0001327227579068683, 0.00015551246334646762, 0.0002051372171464385, 3.707096857568238e-05, 0.00010926314411135494, 7.534015310501233e-05, 4.138052782238298e-05, 3.760937923020714e-05, 7.688397916426844e-05, 0.00020198245703636686, 4.111046357074845e-05, 0.00017771412480824386, 3.8426581597803054e-05, 8.032618604909234e-05, 5.222943044039117e-05, 7.331967338267928e-05, 7.734639640269425e-05, 3.7032669105018964e-05, 4.7617812831917715e-05, 0.00010904561575503496, 7.345720659095953e-05, 6.5377834856662e-05, 0.00012286484668775561, 7.411741996143157e-05, 0.00011584769125728297, 7.523736519757968e-05, 3.639883228183746e-05, 3.666414447679233e-05, 5.529217375211086e-05, 8.29237971315547e-05, 0.00011138960391215893, 3.6495923644870876e-05, 3.846903089081624e-05, 9.6454712492457e-05, 0.0001454657204013997, 0.0, 4.301896390759738e-05, 3.594683731730675e-05, 8.054344455177815e-05, 5.7852275179424435e-05, 0.0, 4.426596084196706e-05, 0.00014216934636955558, 4.510719334659548e-05, 1.7227653458906414e-05, 3.672561186921812e-05, 0.00011561285277760076, 4.134005306815636e-05, 7.379867674691224e-05, 0.0, 5.455658894525764e-05, 4.770167149418316e-05, 9.757471696476871e-05, 4.181638093233352e-05, 8.756437135133314e-05, 3.6485037354360945e-05, 3.6505256631535376e-05, 8.483077378264001e-05, 6.162139328754809e-05, 4.140872752715312e-05, 9.747021120859175e-05, 7.692230496211568e-05, 0.0, 0.0, 4.531979522864298e-05, 0.0, 0.0, 3.7116336895201706e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.72982643171999e-05, 7.472385347715352e-05, 6.824635924243712e-05, 7.471151919153343e-05, 0.0001257987217255701, 4.788816997347226e-05, 7.856824758561702e-05, 4.606845477316125e-05, 0.0, 0.0, 0.0, 3.844980546483105e-05, 0.0, 3.6694888045699156e-05, 0.0, 4.410817215663462e-05, 4.3642125057739514e-05, 4.593602245674629e-05, 4.788816997347226e-05, 1.722929561699724e-05, 3.9527189517434734e-05, 0.0, 7.362726045305451e-05, 3.705441864975298e-05, 4.596516335833802e-05, 2.4837550624065165e-05, 3.822042000309774e-05, 0.0, 0.0, 4.9430791564068564e-05, 3.787962448123045e-05, 0.0, 3.730081805403053e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.86916469754096e-05, 3.847070924878482e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 4.714527937310874e-05, 0.00018071649001476835, 0.0, 3.8536885916216565e-05, 0.0, 0.0, 0.0, 3.916084346366928e-05, 0.0, 0.0, 4.227255862819378e-05, 0.0, 3.651878880401931e-05, 0.0, 0.0, 3.2349540283492056e-05, 4.275273026120878e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.8257431762845876e-05, 9.096388212579554e-05, 3.7209531835029625e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.183140059070333e-05, 0.0, 4.202578077064881e-05, 7.880863451928947e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.09273681155317e-06, 0.0, 0.0, 0.0, 0.0, 3.752448393507875e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0390582929044375e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.962920406384265e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 3.561384451454437e-05, 0.0, 3.931269865994809e-05, 8.314555100087793e-05, 3.931269865994809e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.3395014819498805e-05, 4.227255862819378e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.579172316222407e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.814438380532507e-05, 3.814438380532507e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.227255862819378e-05, 0.0, 3.848892107819291e-05, 3.80915122384168e-05, 0.0, 0.0, 0.0, 0.0, 3.814438380532507e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5339339871732e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.615668374794095e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 3.8615008543307035e-05, 0.0, 0.0, 0.0, 0.0, 3.5660792465092944e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.8615008543307035e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.930132860743545e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 4.227255862819378e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.1167106947983689e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0390582929044375e-05, 0.0, 3.788031556980574e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.814438380532507e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.788816997347226e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0390582929044375e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.667602926067538e-05, 0.0, 3.75807977812716e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.418596337283539e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.156173027702007e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.579172316222407e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
1500 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.350199889218441e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.999037130845309e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.279433983190236e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.002089305363262e-05, 5.7812366431008564e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00013930338106616294, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010599333185304911, 0.0, 0.0, 0.0, 6.355944173604208e-05, 0.0, 0.0, 0.0, 0.00010125221568098062, 5.2892184938244875e-05, 6.175658065895123e-05, 5.200690943978667e-05, 0.0, 0.00010541696638697754, 7.151248621261196e-05, 0.00010900292578359963, 6.60129752541092e-05, 4.922511295854295e-05, 0.0, 0.0, 0.00012495253916761117, 0.00010663266989306221, 0.00015081764216559417, 7.237341220542173e-05, 5.7243075651294224e-05, 0.0002482847805951941, 0.00010044943541349184, 0.00010141788504787125, 0.0, 5.3774897984476556e-05, 5.790715646506808e-05, 0.00015835456344303326, 0.00010875106032581951, 4.9621899785541144e-05, 7.272771779696185e-05, 5.236285019874114e-05, 0.00016012748232795222, 8.805244884883592e-05, 0.0001560428920822369, 0.0001570003964441011, 0.00016133917863895566, 0.00015680323251091098, 0.00022098236957967241, 5.1319393986183536e-05, 0.00021622786573312805, 0.0002947469159865834, 0.00016226909627765744, 0.00024216370731535458, 0.00024055375873617655, 0.0002709809705408336, 0.00010164696579312348, 0.0003147247705053129, 0.0002741274962884258, 0.00027044846067878105, 0.00017037583407218207, 0.00023660215106772247, 0.00010819322399733422, 0.0003024071827027, 0.00021862597328534564, 0.00010274723061942022, 0.00026026728740746226, 0.00043949259785006897, 0.00021789355075298113, 0.00035565777150018167, 0.0006238488732143084, 0.0002828031233152238, 0.00042874950654122555, 0.00032437253974708163, 0.0001618715725904067, 0.0004044471268316288, 0.0005310559125843976, 0.0003952478653831234, 0.00031412789714210045, 0.00048208216004458533, 0.000535875475395711, 0.00032534791090023764, 0.0005240254047250111, 0.00035582590810701904, 0.00042930018125040246, 0.0005134307121034315, 0.0005722798491890884, 0.00046338534949388454, 0.0008149822758630057, 0.0009155726751266066, 0.0005077768578996726, 0.0006815315522408704, 0.0006213014231313575, 0.0006012363707229683, 0.0005836789613852355, 0.0006647927366884056, 0.0005980095520458355, 0.0005415042266749115, 0.0009128323498027219, 0.0007475668154477867, 0.0009539048409271945, 0.0012469837371232371, 0.0009571027813058908, 0.0009498203500334233, 0.0014500572564213599, 0.0010418476052296752, 0.001627633490177446, 0.0010332630673770167, 0.0008215824242175958, 0.0017105748776107964, 0.0013621719837772345, 0.0010693525948593438, 0.0009888959333148632, 0.0013530240541665462, 0.001666632572461881, 0.0014087065798532327, 0.001494815837143513, 0.001818124946797596, 0.0011835563962159519, 0.0021133909766924276, 0.0017287521768454726, 0.0023750998939269996, 0.002169059063229896, 0.0018771403990377807, 0.001926249932567948, 0.002121440338739198, 0.0017771065031307477, 0.0022056813820714506, 0.0021849471612018136, 0.0022492321080911076, 0.0019642524486178058, 0.002342223143380363, 0.0027596589472801067, 0.0026268505670942956, 0.0024217290730538404, 0.0022033205149396364, 0.0028283386943021055, 0.0030135892696330647, 0.0029035189727960146, 0.0036048371932977316, 0.0027552225518001124, 0.00260548751465274, 0.0031955161912586476, 0.0038446473524078257, 0.004130977006098405, 0.003952172119966178, 0.0037879621669006528, 0.003311664707570288, 0.00428139373005199, 0.004275682450424504, 0.004414714236392643, 0.003761094486875471, 0.004334076224432751, 0.003949886230434892, 0.004422285649353363, 0.0040058060085642385, 0.005048791855900866, 0.005282014583705223, 0.005031584099171956, 0.004804510594364951, 0.006798807203366696, 0.005496819315631512, 0.005988846174761653, 0.006591011154989392, 0.005641121258595055, 0.007575525866270585, 0.00539164402284871, 0.00617731707777606, 0.006664231153390349, 0.006817505504196559, 0.007899628864193155, 0.006902281060988405, 0.007562238139850542, 0.007981566399582689, 0.00905237688487067, 0.009808591955884639, 0.009154750187889579, 0.010459932704271986, 0.011045353570176905, 0.010243178024854938, 0.010532888294032212, 0.011914200363617686, 0.010721237325328448, 0.011592916841401183, 0.011605159120430121, 0.013517576480516241, 0.015533023131391257, 0.014906802793504424, 0.014403795822443983, 0.015298011008837908, 0.016788352696762644, 0.015435692138242474, 0.018485686609601783, 0.018395028887838494, 0.01716585299318691, 0.0211224140846384, 0.02191991293524476, 0.019883722658182727, 0.020925524495283458, 0.018804734050728338, 0.019995990884963163, 0.020539969008165033, 0.020182774659496437, 0.019120165610964423, 0.017951210241401082, 0.01752549431400559, 0.017070210188273333, 0.016427103729018157, 0.015151861384651464, 0.01383033640073917, 0.01351054819151694, 0.012862544608699376, 0.011024080066954356, 0.009939642204066554, 0.00988894674856904, 0.007814487162369757, 0.0076452465567288875, 0.006484790944692519, 0.0062629181829892645, 0.005718488074676088, 0.005645239462915211, 0.004281312128988518, 0.003847511867661913, 0.0027549131036113616, 0.003018602436266236, 0.002870209312711349, 0.0025402585918246454, 0.0022118423948328434, 0.0020911814456866097, 0.0019374998843785928, 0.001531385963097068, 0.0018503114000361922, 0.0017404697451399746, 0.0012755038386837265, 0.0019158889819536676, 0.0013621706590846456, 0.0017080696189867997, 0.0011446586529145625, 0.0010084403153012126, 0.0011455362617546628, 0.0011708629268912581, 0.0007275714418695699, 0.001042456301474243, 0.0007025364078064056, 0.0013295826915223747, 0.0005934944035914355, 0.0005232201572175757, 0.0007875045749006671, 0.0007430202717790314, 0.0004911552776413193, 0.0005077683136324746, 0.0006492040192415637, 0.000544391162351065, 0.00039713515491443047, 0.0005708744828215979, 0.00026595945834435305, 0.00022660343764185776, 0.00023007042308534548, 0.0007426552527361791, 0.0004562908589824575, 0.0003443203899786741, 0.0003396937024082945, 0.0003655384548163816, 0.0002666330148498014, 0.00031088064508178506, 0.0005710788828880545, 0.00042297765534511655, 0.00022465209922388067, 0.0004962218956204496, 0.0004784847917330419, 0.00023104889070742793, 0.0002104911023319657, 0.00013354298846094735, 0.00026392221361199033, 0.0003168210965271375, 0.0006310660633767758, 0.0005636291429416178, 0.00037561933230006853, 0.00018381119748125033, 0.00039527985670914363, 0.0002593263748202731, 0.00020681302712444782, 0.00028554450839317954, 0.00016801937154322586, 0.00015840947195084031, 0.00024782133689299165, 0.0002169422890049418, 0.00016723103042495536, 0.0001697601500742083, 0.0, 0.00015829538280162726, 0.00015717331850291556, 6.94656788556236e-05, 0.0006125119553352069, 0.00016658658403914656, 0.00016519969713327026, 0.0001409562165679044, 0.0002128044792344702, 0.0001587102930791064, 0.00016573541937485076, 0.00010330057127243217, 1.4652493029518053e-05, 5.191389118206472e-05, 5.763435672472009e-05, 0.00022794150962583565, 0.00017910034225550791, 5.092222217040171e-05, 0.00011595263567439613, 0.00034649192542222956, 0.00011483511672712985, 0.0002666432149827354, 0.0, 0.0001034937942451705, 0.00017142215907212736, 0.00010092480962832242, 0.00010865824077198587, 0.0001642981444746268, 9.59780012970652e-07, 0.00010894198992451341, 0.00015988640483544238, 5.280464345644486e-05, 0.00020857562996583122, 5.0037248867440336e-05, 0.00017577097581334235, 0.000184367237195412, 9.111179326542572e-05, 0.0, 0.00017269517209133632, 7.526499258496479e-05, 4.443002384280828e-05, 2.3632853167383516e-05, 5.746764830208009e-05, 0.00012154731751607769, 0.0, 9.090642451751355e-05, 5.094116527442199e-05, 0.00010239040811196131, 0.00017532468688016527, 5.3035247598174246e-05, 5.805865576092257e-05, 2.994936000071893e-05, 0.00015796285184451603, 5.093315088425956e-05, 0.00010702872813913654, 5.501102659441525e-05, 6.289200365448841e-05, 5.153748392261281e-05, 0.00010784633668429154, 4.958400115850757e-05, 8.770501510010034e-05, 0.00011239827847534633, 5.153748392261281e-05, 0.0, 0.0, 0.00011863696789841663, 1.5337307610868297e-06, 5.763180255182226e-05, 6.289200365448841e-05, 0.0001711314056075364, 0.00015428835330698798, 0.0, 0.0, 4.246849357122589e-05, 0.00011571543290770862, 6.759422354041737e-05, 0.00010414016143523308, 9.460282187940072e-05, 0.0, 6.58805018555618e-05, 6.242174606478218e-05, 5.238887626844732e-05, 0.0, 0.00011090019190596636, 5.362666902345074e-05, 0.0, 0.00015988895486867587, 9.753365455621658e-05, 5.1228884365051674e-05, 0.0, 0.0, 0.0, 0.000106328719178555, 0.0, 0.0, 0.0, 0.0, 5.109338073220732e-05, 0.0, 4.8913338278432966e-05, 0.0, 0.0, 0.00011205213630188488, 5.1228884365051674e-05, 0.0, 4.0195594306638845e-05, 0.0, 6.997457607228028e-05, 0.0, 0.0, 0.00010701136638689418, 0.0, 7.151248621261196e-05, 0.0, 0.0, 6.759422354041737e-05, 5.494348383104224e-05, 0.0, 7.33801620142757e-05, 0.00010983612430465942, 5.886509963135232e-05, 5.286278918176585e-05, 0.0, 0.0, 0.00016763420061436597, 0.00010264239775967163, 5.3035247598174246e-05, 0.0, 5.547388660394571e-05, 3.051614628350751e-06, 0.0, 0.0, 0.0, 0.0, 6.289200365448841e-05, 0.0001167482212052618, 0.0, 0.0, 0.0, 3.4853456827689175e-05, 9.145248764061561e-05, 0.0, 5.3035247598174246e-05, 4.8913338278432966e-05, 0.0, 0.0, 5.805865576092257e-05, 4.8913338278432966e-05, 0.0, 5.1614821131814485e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 5.805865576092257e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.177747268340119e-05, 0.0, 0.0, 0.0, 5.1228884365051674e-05, 0.0, 5.1228884365051674e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.202619199628284e-05, 0.0, 3.051614628350751e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.242174606478218e-05, 0.0, 0.0, 0.0, 5.1614821131814485e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010628100540737105, 5.1228884365051674e-05, 0.0, 0.0, 6.475102755768133e-05, 0.0, 0.0, 0.0, 5.811857326258112e-05, 0.0, 0.0, 0.0, 0.00011487473331486457, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.1109475406011871e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.44281784139871e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.279776833545047e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.202619199628284e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.238887626844732e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.811857326258112e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.1614821131814485e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.695223335736152e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
2000 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.489781837941068e-05, 0.0, 0.0, 5.2478180986745645e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011868807143030399, 0.0, 0.0, 0.0, 5.956603142989399e-05, 5.3663279971668236e-05, 0.0, 5.661897675644882e-05, 5.310369057858382e-05, 0.0, 0.0, 0.00010658677491843884, 0.00012261014906095362, 0.0, 0.00017718613408014192, 0.00011856564095598046, 4.102681860133824e-05, 5.302299592099911e-05, 5.31667424758063e-05, 0.0, 0.0, 5.014261911086313e-05, 0.00011212806288239496, 0.0, 0.0, 2.99723536350873e-06, 5.403295600852979e-05, 5.5355364781969436e-05, 5.30692033115874e-05, 5.4482298443936533e-05, 0.0, 0.0, 0.00013501616202134505, 0.00010560313014448526, 0.0, 4.1989522744654814e-05, 0.00016656672120091097, 0.00018208044416237893, 0.00010976320238874832, 0.00011148544157186957, 0.00017084272288701743, 6.671695744728217e-05, 0.00010395435059893056, 0.0, 5.318743618897657e-05, 5.781096913585344e-05, 0.00024364587117571654, 0.00014281012200584688, 0.00016737241627726922, 0.00011566956290090555, 0.00022961627778110067, 0.00011473488896370896, 0.00014941805283688706, 0.00011322388936458741, 0.00048801141544130126, 0.0002882601637984111, 0.00022654950872546596, 0.0004120226394698338, 0.00021549056922573836, 0.00039320203543727973, 0.00012319786675065852, 0.00011091315453261954, 0.00010924727341567077, 0.0002132875242335736, 5.326308087700208e-05, 0.00016382547730962855, 0.000325213764220015, 0.00021062759398475823, 5.3139967151606245e-05, 0.0004140530451811534, 5.47577162746987e-05, 0.00015065098954416455, 0.000303095831101663, 0.0002300240907280627, 0.00036596166755688427, 0.000287172509269638, 0.00022148239700569333, 0.00035956392099135585, 0.00010826328363374998, 0.0006031231207538205, 0.00010804603685220365, 0.0003532521152339723, 0.000227388256918055, 0.00033901843947946076, 0.00026949500457564576, 0.000328425747805533, 0.0004181272780902031, 0.0003410961992254949, 0.00048637951422669123, 0.0003697532374678605, 0.0005203580094741436, 0.0005092227758596288, 0.00037782547681979365, 0.00024127724941874114, 0.0003632231431401669, 0.0007799374038939351, 0.0003926646618513386, 0.0008014621668437279, 0.00047436869098057936, 0.0006530466124514729, 0.0006469664085130163, 0.0008130540015906851, 0.0003512731765935254, 0.0006495043141792342, 0.0006697766576188956, 0.0005144185687863072, 0.0006965990652591759, 0.0009086091941117371, 0.0011133289663741242, 0.0009635621713661504, 0.001240531536781147, 0.0009606994982001355, 0.0009015991192881764, 0.00073704563457857, 0.00099047333399721, 0.0013406930592047505, 0.0012473275713677978, 0.0007558639656174577, 0.0018387696153138104, 0.0014436090170007352, 0.0015489379199301882, 0.001447839382927101, 0.0008014155163546719, 0.001479535738414691, 0.0012566911146572252, 0.0012857021987225654, 0.0017441413171385375, 0.0017816316417193208, 0.001603490200872571, 0.0015605450162060478, 0.0016712275768841936, 0.0019668073485365084, 0.002613081143408065, 0.001961992282048935, 0.002158288664497394, 0.0020246888075354658, 0.001668945815956609, 0.0019564429303693532, 0.002069959481198201, 0.003232134648509945, 0.002224970938496679, 0.001999328908962759, 0.002679256431581971, 0.002789845150093037, 0.003392581591329102, 0.00231122758452338, 0.002922838658935031, 0.002027670758750064, 0.0035612030852746244, 0.00368612768307684, 0.003297337095394675, 0.0035931845391106447, 0.00303076602657624, 0.003245338144004103, 0.003571726396522833, 0.0034780810057515447, 0.003296412961112633, 0.004149169397191483, 0.0032845269360425805, 0.00429331875894768, 0.004388791420151086, 0.005073299349721274, 0.004330776395714785, 0.0038972502620216076, 0.004290025299363004, 0.005257010923903983, 0.0048182027858814985, 0.005131859459686312, 0.005239822762275008, 0.004933062567729336, 0.004194288970894307, 0.005302956135496642, 0.005447031462601994, 0.005839517721509976, 0.005503207310448876, 0.005367077503298562, 0.005791842004072708, 0.0063496549984880125, 0.0069854399017297356, 0.007281108753086211, 0.007226417494816803, 0.007376909591279914, 0.006534651138805562, 0.007999799909690509, 0.008207822558225617, 0.008027297504687473, 0.008083613195763732, 0.007308109320134812, 0.008461487785482386, 0.009386604866690114, 0.010597439416507665, 0.008376782619989625, 0.010417226953717535, 0.00977308739166084, 0.010871807503028273, 0.011015819186310966, 0.01271081091197633, 0.012698125442562502, 0.013071836773786843, 0.01366865304066234, 0.014517753420547843, 0.01369384473770374, 0.015377447249771827, 0.01603787270320977, 0.015972869413865325, 0.01735702297787209, 0.01742689090661359, 0.018017678761334883, 0.01679808474335417, 0.02067929459731049, 0.0219159607460054, 0.02295515093677684, 0.021547079418084154, 0.02070661035281415, 0.020202895906510317, 0.020077274257317104, 0.018984233599457202, 0.018368376139925726, 0.01940778280628443, 0.017609062906419656, 0.017646577259790627, 0.01608355078392774, 0.015419445245506265, 0.012058300671911208, 0.013357622648682245, 0.009988109128883456, 0.008861236800343167, 0.00793551531929579, 0.00899991452515561, 0.007373177552155436, 0.005591189483381681, 0.006219789994833197, 0.004718241719851704, 0.003916685007295237, 0.0032715284429295642, 0.004054992558853455, 0.0028615308243408167, 0.0020651182211645688, 0.0025224034164626156, 0.0026679977527638175, 0.0022959673544746015, 0.0021132459396280504, 0.0017717257188459967, 0.0017605032996887101, 0.0014045400441998097, 0.0009908891836003493, 0.0012330169114825645, 0.0010514166237024674, 0.0010394815669118988, 0.0014284344030467207, 0.0008359905100828377, 0.000811817980106289, 0.0009275753779767281, 0.0007937594783787074, 0.0008432206323408549, 0.0008070768941506642, 0.0007492512317343251, 0.0008596310513036785, 0.0009102699948172473, 0.0006032061391415373, 0.0005351919990915941, 0.0004254740265708603, 0.0007096783037662426, 0.0005907018056753541, 0.000642502086595712, 0.00046711849061184307, 0.0006641985146862073, 0.0006579816605332886, 0.00044392708211568393, 0.00038316227653212797, 0.0005658868843935916, 0.000274058986441438, 0.00037681061220723894, 0.00022753389086938865, 0.0006182546560659691, 0.0002073419545806727, 0.00010906829545903592, 0.00034317404014987433, 0.0004919924014911358, 0.00012294587563735587, 5.490092164053772e-05, 0.0003821659205822846, 0.00011555641381738608, 0.0001718432595216399, 0.00043646746867571554, 0.0, 0.00012608395986927074, 0.00017414664094850282, 0.0002125621957189881, 0.00011296603988070436, 0.00017483346386800325, 7.300541090068932e-05, 0.00011337314928200779, 8.550935200487053e-05, 0.0002542800179246059, 5.7737800387357296e-05, 0.0002461789332006694, 0.00012226949766494107, 0.00025422424840143624, 5.4537158093153386e-05, 0.00017527136692159536, 0.00013066382563127867, 0.0002766753645080401, 5.900567760739204e-05, 5.320089149969762e-05, 8.621466329249595e-05, 0.0, 5.6199118972514005e-05, 0.00020139917205095785, 0.0001183424681552327, 0.00011211477669322648, 0.00016872526693016823, 0.00012381639162237627, 0.00018145064903039908, 0.00012611256170623835, 5.7040850469290054e-05, 5.507392622393642e-05, 0.00010346903262686492, 5.8441508402664516e-05, 0.00018274440184797429, 6.242607982941321e-05, 0.00012443153406304302, 0.0002310509426762623, 6.060311184959193e-05, 9.949851181870226e-05, 0.0001138428065983071, 9.348421116684418e-05, 6.139449924260426e-05, 6.0053696809109046e-05, 8.666642078367314e-05, 5.533848306859368e-05, 4.7052281275664757e-05, 0.0, 5.507392622393642e-05, 0.0, 0.0001371960035364997, 0.0, 6.373066995607597e-05, 0.00012161847439568487, 6.689607746602116e-05, 6.20125505739749e-05, 3.185611108856179e-05, 0.0, 5.900567760739204e-05, 0.00010426303802151076, 0.0001739254976063857, 5.481865753494532e-05, 6.181873727475017e-05, 0.0, 5.507392622393642e-05, 5.4490412896027985e-05, 0.00011879584921331439, 6.242984172694779e-07, 6.181873727475017e-05, 5.391457092174331e-05, 6.997896718804513e-05, 5.4490412896027985e-05, 0.0, 0.0, 5.4490412896027985e-05, 0.0, 0.0, 0.00017727162840738806, 0.0, 0.0, 5.490092164053772e-05, 6.242607982941321e-05, 0.0, 5.641177917755513e-05, 5.481865753494532e-05, 6.181873727475017e-05, 5.4358548821503106e-05, 5.741440615456715e-05, 6.638864515974381e-05, 0.0, 0.00011917765803035604, 0.0, 2.0977964219727932e-05, 0.0, 0.0, 0.0, 0.0, 6.139449924260426e-05, 0.0, 0.0, 0.0, 0.0, 5.7431020655889874e-05, 0.0, 0.0, 6.679583573939249e-05, 0.00012453789122501198, 5.502567584499331e-05, 0.0, 0.0, 0.0, 5.4490412896027985e-05, 7.047548100688972e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 5.8242134386791885e-05, 0.0, 5.6523734262913574e-05, 5.507392622393642e-05, 7.480841577244735e-05, 5.7431020655889874e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 6.950355297409696e-05, 0.0, 0.0001240072960308971, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.4358548821503106e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 6.622025421229874e-05, 0.0, 5.741440615456715e-05, 0.0, 0.0001112304333300694, 0.0, 0.0, 0.0, 0.0, 0.0, 6.561166692300892e-05, 0.00013549718430597588, 0.0, 6.782977726307505e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.679583573939249e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.900567760739204e-05, 5.2027444989745866e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.7040850469290054e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.490092164053772e-05, 0.0, 0.0, 3.7072431989289053e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 5.641177917755513e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.507392622393642e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.881626461224015e-05, 0.0, 5.6199118972514005e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
2500 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.2527043716421672e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.6875973031081096e-05, 5.8756313606839456e-05, 7.142975082848527e-05, 0.0, 0.00011672177690525046, 5.3665072994390557e-05, 0.0, 5.670820798612232e-05, 5.6171407453593124e-05, 8.915952522288577e-06, 6.0737529624749755e-05, 5.6458577408130776e-05, 6.027656529843115e-05, 0.00012380149642893888, 0.0, 6.0778414779927276e-05, 6.050365407162018e-05, 0.0, 0.0, 0.0, 0.0, 0.000173618538778203, 0.00017462641022580153, 0.00011581276391035723, 0.0, 6.447548285441203e-05, 0.0, 0.0, 5.6156518089437856e-05, 6.086925721448854e-05, 6.713834175429685e-05, 5.643281967380287e-05, 0.00012143362872807678, 0.0001982032855353145, 0.0002498930463178145, 5.9207028510390715e-05, 0.00036320581673364574, 0.00016098106975892063, 0.00012270774278262846, 6.412481668703782e-05, 0.0, 6.525069933047745e-05, 5.7218618852007725e-05, 0.0001184456363233619, 0.00014512193188375637, 0.0002492234231347525, 0.00037166051709743074, 0.00017728582379607368, 5.612607713068669e-05, 0.000186154950566585, 0.00017931598859864442, 0.00018936975473306277, 0.0, 0.00012086404995288832, 5.834434134821663e-05, 5.995040165736012e-05, 0.00017433340138979717, 0.00022555229468749387, 0.00012212427880970453, 0.00029377034041483136, 0.00017406864771925956, 5.743996396640779e-05, 0.0002214157696269485, 0.00031909863777935713, 0.00023889763177986027, 0.00034715688274853713, 0.00018222431424852155, 0.0004344090414684013, 9.292725718085857e-05, 5.747718737679596e-05, 0.0003219444454129963, 0.0004358864819097431, 0.00042574720581071675, 0.0007979118144508739, 0.00018273635255653555, 0.00018822954107662368, 0.0003244185037127495, 0.0002307909789598117, 0.00020313796708065288, 0.00034282816369788987, 0.00029363505495958803, 0.00024010718485890616, 0.0003623870055843908, 7.919655391168982e-05, 0.00036118193662780567, 0.00023322646341847428, 0.00017507064998726666, 0.00017268102823284515, 0.000207539003426738, 0.0004209221515372751, 0.0006570838453403908, 0.0003620150831183635, 0.00023239059875329734, 0.00024588489874007296, 0.0005090212034397195, 0.0003616160481590024, 0.0005085450554246054, 0.0006634756069872493, 0.0004521576483114761, 0.00042247809009150056, 0.000422355824173286, 0.00029434901728404725, 0.0004576608958089761, 0.0006115075016611165, 0.00047749463690949717, 0.0007999524883744241, 0.0005438846842357071, 0.0006741050894312666, 0.00032712777933972765, 0.0007592064621856883, 0.0008594193622591454, 0.0002546392735273039, 0.0011078852457094633, 0.0004357573253323032, 0.0004989599060571529, 0.0008059851047072862, 0.0008691612308409392, 0.00048648272277432346, 0.0008360889751792587, 0.000784278350848889, 0.0004874058979783783, 0.000988328429989765, 0.0006881877963136015, 0.0008727873104091709, 0.000859611123418894, 0.0016069851117896758, 0.0010252753137217855, 0.0011388107320713804, 0.0008450171919496094, 0.0011351144993594776, 0.0012823658094193997, 0.001534781806553682, 0.0008682835893900568, 0.0011202440412340442, 0.001325017812725723, 0.001449384585596138, 0.0014279091362785715, 0.0016083265396207806, 0.001984354314983521, 0.0012893768301094045, 0.0013517383348916228, 0.0016366206251928943, 0.0017676722377990288, 0.0023309375763213695, 0.002011276916984793, 0.0020648992652950045, 0.0017747310429600434, 0.0018889952091189679, 0.002242135884936509, 0.00204578533838532, 0.001750052926039107, 0.0024336464878501017, 0.002508162838494402, 0.0021703079301829413, 0.0022287206370854695, 0.002614095054977752, 0.0020588727432135174, 0.003608578263339309, 0.002395564896063737, 0.002685269816788471, 0.0033453210085257223, 0.0028305213121101996, 0.0029280362593987223, 0.003038560771304696, 0.0031848374859145853, 0.0032125073262378825, 0.0030969481316617323, 0.0033260667753286953, 0.0034707467960782877, 0.0036474687284900158, 0.004008416832847727, 0.00433190579009362, 0.003253016646162978, 0.0037849932670908693, 0.004338935786066316, 0.004204512667392577, 0.004966309601929855, 0.0037368478501926742, 0.004286849050596937, 0.005152128866587596, 0.005068396068784041, 0.004800605798993025, 0.004049559954915841, 0.004538405065758753, 0.006320882179228076, 0.006765899373260682, 0.005396653500719586, 0.005331441686868054, 0.00638344410177711, 0.0058109274126558206, 0.006308419435165834, 0.007198361855437565, 0.005171591135365657, 0.006924204755028043, 0.0067749498905756746, 0.007003528638973082, 0.007343566258816849, 0.0071910736848157025, 0.008921632762774167, 0.008910081940326794, 0.00739271639615767, 0.008012149956502203, 0.009762632804274687, 0.011021958880853082, 0.00993151780642936, 0.008010651878709614, 0.011383211697779552, 0.012025207838783556, 0.011783219521269345, 0.011283384536117, 0.012414911945266477, 0.012078488216488192, 0.013801692087059668, 0.012845939508255931, 0.013813873941535798, 0.015441506653996007, 0.016458966298087695, 0.01716852663397405, 0.018253016394918484, 0.01945562715210805, 0.018371022154410343, 0.020104707029301422, 0.021259428051139125, 0.022174237233112997, 0.020466960411499126, 0.02019675238598072, 0.021861495765178658, 0.02143624776858428, 0.021500235191932258, 0.021739847859455853, 0.020669254099692533, 0.018655620369494103, 0.01917889273738968, 0.015829643983852656, 0.016920263453634815, 0.013666120479013133, 0.011800714454898928, 0.010815465699439324, 0.011573674781985179, 0.009065574548073085, 0.007677637052541463, 0.006237556588220954, 0.007404155305988882, 0.006116782930559059, 0.004552880851854787, 0.003499067336241598, 0.00469235250377001, 0.003525492564227228, 0.0027825055365594403, 0.0019124655562219141, 0.0030537692526228823, 0.0022047392039012867, 0.002340398347558483, 0.0019355529350282158, 0.0013112786000118073, 0.0012999815200307816, 0.0014016181203289086, 0.0012181008206224293, 0.001145575281844973, 0.001370529404984136, 0.0013545599732783307, 0.0008412124400115146, 0.00104811352075027, 0.0009090864029211522, 0.0008498278416288916, 0.0009763590432774868, 0.0009277283023606866, 0.0005816009325774155, 0.0006924246860764817, 0.0005767754281604063, 0.0003672385144490293, 0.00038952498396949045, 0.0005023407612603902, 0.0006947590613646016, 0.0003066833319238427, 0.00051549329840016, 0.00044366267741161535, 0.00045933646867286773, 0.0005410586829190373, 0.0005742700329430706, 6.675684940915698e-05, 0.0005025792334718723, 0.00035871168723516195, 0.00026957235354826, 0.00048452421736505343, 0.0007867678525356498, 0.00033833725445723504, 0.0004453786246908676, 0.0004680208461353548, 0.00019184367452710287, 0.00031066076585971043, 0.0002993857082870633, 0.00024138105652845996, 0.00018498274209042623, 0.00027115112823192824, 0.00030113880972384717, 0.0001240558794842102, 0.0002447051590157659, 0.00017278499408372, 0.00018455657732451702, 0.00029797336553086555, 0.0004715599094893488, 0.00012521676511832535, 0.0001887652984880013, 0.000116839264376364, 0.00019061246261717533, 0.00012409717149991988, 0.00023277245900423757, 6.309915158361301e-05, 0.00011985437793101983, 0.00029721916729675885, 0.00011970765709784098, 0.0, 0.00014272542812671657, 6.610737015073997e-05, 5.811652109173043e-05, 5.959008337310618e-05, 6.044503152856235e-05, 6.675684940915698e-05, 1.7031735898641056e-05, 7.153694126549259e-05, 7.296185774345524e-05, 0.0, 0.00025158589776941184, 0.0001392606211481434, 0.00018640130029049814, 5.812963585143696e-05, 0.0, 0.0, 0.0, 9.368397448762179e-05, 0.0, 0.00012665171893218218, 7.176237229842829e-05, 0.0, 6.610737015073997e-05, 0.0, 0.0, 6.310193901108859e-05, 0.0, 0.0, 0.0, 0.0, 0.00011968798062997765, 0.0, 6.695651405115771e-05, 0.0, 5.959008337310618e-05, 6.309915158361301e-05, 0.0, 0.00011786072669705777, 0.0, 0.0, 6.309915158361301e-05, 6.610737015073997e-05, 5.889464185946684e-05, 0.0, 0.0, 0.0, 0.00013258102731630658, 0.0, 6.603921668330136e-05, 0.0, 6.09980242446104e-05, 0.0, 6.675684940915698e-05, 0.0, 7.32285158675476e-05, 0.0, 5.862166441221555e-05, 0.0, 0.0, 0.0, 0.0, 0.00012537851382137254, 5.862166441221555e-05, 0.0, 0.0, 0.0, 0.0, 0.00018091340961393502, 0.0, 0.0, 7.081424009619922e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 1.3425713688983638e-05, 0.0, 6.139749203432493e-05, 0.0, 5.827064765225512e-05, 0.00011098227328767746, 0.0, 0.0, 5.563681435586601e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.8486797335525857e-06, 0.0, 0.0, 0.0, 0.0, 8.046794979162247e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.917755276332753e-05, 0.0, 1.7031735898641056e-05, 0.0, 5.053723743081158e-05, 0.0, 5.917755276332753e-05, 0.0, 0.0, 0.0, 7.483371051598439e-05, 0.0, 0.0, 0.0, 0.0, 2.733468679578698e-05, 7.350562251636985e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.889464185946684e-05, 0.0, 0.0, 0.0, 5.889464185946684e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.862166441221555e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.075295374269102e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.917755276332753e-05, 0.0, 6.09980242446104e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.695651405115771e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.309915158361301e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
3000 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.201530330693707e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.05048786859084e-05, 7.489154650165801e-05, 0.0, 0.0, 0.0, 5.975489547814596e-05, 0.0, 0.0, 0.0, 0.0, 6.197172745067125e-05, 6.861405014881978e-05, 5.973173314065296e-05, 6.202000510520979e-05, 6.076458102435491e-05, 0.0, 0.0, 6.106723914805931e-05, 5.220001378938466e-05, 0.0, 0.00018983789313918486, 6.412606895996446e-05, 0.0001307374704037849, 0.0, 0.0, 6.517211898544153e-05, 6.789322492577661e-05, 0.0, 0.00012677374216356175, 0.0, 6.143864215201276e-05, 6.002650122696514e-05, 7.31457390200607e-05, 6.116139717473565e-05, 0.0001322213442677969, 0.0, 4.4324447975649114e-05, 0.00013162854661931983, 7.329449259012801e-05, 6.246478643340065e-05, 0.00019536535257732297, 0.0002611875403366195, 0.0, 0.00012874901497768353, 6.0612356033132983e-05, 0.00025367759182491303, 0.0002537726042801436, 6.310348398381106e-05, 0.00010789604651085489, 0.00019173423628469116, 7.868222610618037e-05, 0.00017858812549295279, 0.00033985609558593216, 0.00012230606707960387, 0.00019740174388654117, 0.0003818694900905442, 0.00012593905823260612, 6.25987315360109e-05, 0.00021355788441361587, 6.91384739831628e-05, 8.014071123600952e-05, 0.00020840053413047766, 0.00012601815390448357, 0.0002513654788624687, 7.173393010153702e-05, 0.00020811317271911844, 0.00023150037627779838, 0.00013209268197487625, 0.00019954705286540393, 0.0003710185403987342, 0.000209255431399802, 0.0003639648469700897, 0.00012629036019451515, 0.00017528971879688995, 0.00029402894214093574, 0.00024856277743642895, 0.0001689562256378318, 0.00019560726815449482, 0.00018747431916553138, 6.52983156416771e-05, 0.0004413927914884534, 0.0005369867268509912, 0.0005466224936056262, 0.00025091603967804504, 0.00019096333666592113, 0.00031279279861552565, 0.00026781103142943275, 0.00041908253897422043, 0.0003390971286722281, 0.00018910040900055862, 0.0001248938626356833, 0.000327195358802384, 0.0003240212983144307, 0.0003793276872029269, 0.00033433193668323786, 0.00045147629833530137, 0.0002975769982131985, 0.0005187878868907436, 0.0003936078736910237, 0.0006385207276417299, 0.00045014729574980493, 0.0005850838479596883, 0.0006670739136531703, 0.0004957888966075155, 0.00043576621833802445, 0.0003301955650505711, 0.0005588796276348486, 0.0005095773619038764, 0.0004130582610677143, 0.0009933158668976004, 0.0005874764041499013, 0.00037663816094195615, 0.0003925502180845557, 0.0009262716412428994, 0.00065824472745373, 0.0005041370248835766, 0.0007919977349951894, 0.0006105630050958559, 0.0005459835568152206, 0.000775368582820492, 0.0008583396692028007, 0.000589820799864349, 0.0009145855584348367, 0.0007370361251168968, 0.0008486882786117453, 0.0017053662746920716, 0.0007735698886526122, 0.001312931751116208, 0.00112035554628683, 0.0007901117035817204, 0.0012288323112676295, 0.0007815990563081542, 0.0012226426159930243, 0.000996887007188174, 0.001116873539983034, 0.0012875604375109308, 0.0012294791380954274, 0.00090233123669617, 0.0007020812283706284, 0.0011301926261737397, 0.0014602881205520392, 0.0018586450155613583, 0.001887792219833496, 0.0016391319887173958, 0.0018976911701126299, 0.0021888747968136896, 0.0007350949415608644, 0.002602159955045817, 0.0020363945902238416, 0.002381598100062964, 0.0021082393958570243, 0.0021235563801299683, 0.0016383868879585148, 0.001556628601878327, 0.002427179861627185, 0.001832753037607377, 0.0028588389639662396, 0.0021842679524068185, 0.0021207495579170727, 0.003622601145544412, 0.0028866484553640766, 0.003362753310121237, 0.0032750020328669747, 0.0021101786264335043, 0.003085868618511978, 0.0019380375403307937, 0.001968611200868099, 0.0028754477270344085, 0.0037972575130724166, 0.0034384426731823474, 0.0037720272045908324, 0.0035507986805079278, 0.0034568397405671768, 0.0037124403922978748, 0.0037137006109435313, 0.003217321328617286, 0.0031632359816046226, 0.004256098028585215, 0.003460609459818652, 0.003556687929408172, 0.00461622402082767, 0.003999928044255363, 0.005230794813382454, 0.004981469944265037, 0.004686624090307074, 0.0047617553699312535, 0.005353925643256887, 0.004540795575834777, 0.004646934233695288, 0.005824879944099751, 0.0050865880531305785, 0.005088071692637044, 0.004943554018040405, 0.007102999756572936, 0.004763245258972287, 0.004703779062696494, 0.005335691376247557, 0.006733658513133097, 0.0063252401799458735, 0.00605694578607719, 0.008087765166512833, 0.0069304126099404275, 0.007772266163190958, 0.007744841955599244, 0.00800064290491285, 0.00805984162110913, 0.008793858830433725, 0.006970918968243535, 0.008344353557182018, 0.008602231226832601, 0.00801161583770751, 0.010167795256998027, 0.010052323231736795, 0.009765195865447157, 0.011305476778563897, 0.010684059308413036, 0.010862282285319052, 0.01205146872107442, 0.01385839165112004, 0.014519059948079277, 0.013324588905893128, 0.012395198121947294, 0.014466692598119382, 0.017905384002777542, 0.015873331589171612, 0.01676752624433685, 0.019629018135727247, 0.01857719896959243, 0.02014353981690467, 0.019834447836422006, 0.021889577281374084, 0.021221199558571643, 0.02146242659327245, 0.023092870166556555, 0.020290667609613674, 0.021753202437842945, 0.021982922829496977, 0.021068848404870628, 0.01965523743305423, 0.019389370983365253, 0.0175620520723032, 0.014852360125664913, 0.013809634032326677, 0.012526025878595791, 0.011721735777820251, 0.009231585605884943, 0.008930238048639062, 0.008487371655959043, 0.006928364637462463, 0.006281445316553035, 0.004889419612181383, 0.0045915899179438825, 0.00373849126466825, 0.0031022764590437828, 0.002294592704759404, 0.0027145453351838673, 0.0023036856213176178, 0.0029380768127551145, 0.0016217153170679365, 0.00157442946366536, 0.001648158816447552, 0.0016672656747209527, 0.0013901289706302357, 0.001304102252440039, 0.001002898903204317, 0.0010541386027705807, 0.0011503307357700584, 0.0011985806579989453, 0.0008635305326150602, 0.0006808122654942132, 0.0005237018178024546, 0.0005703919733819182, 0.0009945849911300161, 0.0008044871173527986, 0.00021278145786271218, 0.00047115561397513957, 0.0007283670051215818, 0.00021901923549390084, 0.00026503000807642606, 0.0004578826962210554, 0.00037817332004901766, 0.0004903553559560586, 0.0001937495940041286, 0.0002625111136389599, 0.0004327647231233327, 6.421861089606109e-05, 0.00018917751263329249, 0.00031491590221687616, 0.0002306286833843223, 0.00039938861504732934, 0.00033105241529944796, 0.0002993351680553633, 0.0006488561297912632, 0.00012741025725932222, 0.00015327941464723222, 0.0002141330173620403, 0.0005832178150568373, 0.00025048083771455937, 0.00019217598072967786, 0.0005114064444335936, 0.00013919587367027962, 0.0003881052756825893, 0.00022900802283245107, 0.0001949778228447145, 0.00013016826474830236, 0.00010937437391293774, 0.00010937437391293774, 0.0, 0.00012915335009934151, 0.00033071357334708406, 0.00019927769792551904, 0.0002018876793293152, 8.839906104204087e-05, 0.0002582243039913618, 6.535999561839488e-05, 6.269561396916301e-05, 9.348720749473519e-05, 0.00013114800623552215, 0.0, 0.0, 0.00025192684330504784, 8.685108062421706e-05, 6.20313470339611e-05, 0.0, 0.00012593460543922633, 0.00020893268199895094, 0.00014746745491277938, 0.0, 0.0, 4.220289211059398e-05, 0.0, 0.00011489700949158109, 2.3881107205065802e-05, 0.0, 0.0001270416714283733, 0.0001910181958615517, 0.0, 0.0, 7.824956301611295e-05, 0.0, 0.0, 7.03738409640132e-05, 0.0001248640601677117, 9.854747638492937e-06, 6.20313470339611e-05, 0.0, 6.535999561839488e-05, 0.0, 6.337099335801882e-05, 7.127778731479931e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00018212795952129936, 0.0, 0.0001264327226389968, 0.0, 0.0, 0.0, 0.0, 0.0001264833341041006, 0.0, 0.0, 0.0, 0.00012453001278014926, 7.676077740852631e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 6.617956226700172e-06, 7.603972271038571e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 2.926859415744966e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00012549544869214677, 0.0, 0.0, 8.685108062421706e-05, 0.0, 0.0, 6.717148180234375e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00014019217642410487, 0.0, 0.0001330694549331762, 0.0, 0.0, 0.0, 0.0, 0.0, 6.240502037664436e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.3798835147409706e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
3500 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.130675171338496e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010509075959489036, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00028795172607081637, 0.0, 0.0, 9.519253116751466e-05, 0.0, 0.0, 9.53770007759026e-05, 0.0, 0.0, 0.00019774159642445974, 0.0, 0.0, 0.00010498129895154257, 9.752656632661217e-05, 9.407685842804524e-05, 0.0, 0.00018725658380279008, 0.00018882862141813184, 9.692259053936547e-05, 0.0001596374237845921, 0.0, 9.408847278253293e-05, 0.0, 9.475751635187158e-05, 0.0, 0.0, 9.589348241165058e-05, 0.00018669502438176423, 3.566662833794227e-05, 0.0, 6.633279701057805e-07, 0.00011623939510149584, 0.00023202662293291142, 0.0, 9.275087487529994e-05, 0.0002012744345300422, 0.0001862394911993472, 9.341462496041126e-05, 0.00014612149516530557, 0.0002481984618631296, 0.00019741993653446107, 9.596265973787467e-05, 0.00020721650266771114, 9.372943561018971e-05, 0.0, 0.0003230492290234467, 0.00019072271030862772, 0.00038787036019044893, 9.405223051713915e-05, 0.0002864002948839085, 6.641705948261748e-07, 0.00019925774423960464, 0.00010009530537879838, 0.0003137715026215061, 0.0002931734498298973, 0.0002754375379722506, 0.00018757045512819546, 0.0, 0.0005997279276006298, 9.596148558240835e-05, 0.00019285943842623884, 9.351223641817808e-05, 0.00010335847911228242, 0.0003994379050135511, 0.0001251665284658218, 0.0004034893284500921, 2.710057957955393e-05, 0.0005473171109119394, 0.0003825009472429626, 0.00020703472383226687, 0.0003021568545949467, 0.0003654194509344699, 0.0003011753780406489, 0.0004010661063986986, 0.00022909886627763977, 0.0004982643397184118, 0.00044343957680651196, 0.00016534177436347544, 0.0006932489408309185, 0.0008139911047302091, 0.0005571404477898215, 0.00019284086720061322, 0.0004665353713831101, 0.0003088587383041298, 0.0004159473919173677, 0.00016993732101236737, 0.0005979168319323449, 0.0003074926866960963, 0.0003800735765089214, 0.00043721060378064744, 0.00020708137694279535, 0.0005680210725266104, 0.00029553074305185687, 0.0, 0.0005573798189508889, 0.0002868345171446118, 0.00013304537582981628, 0.00040763394069214334, 0.0002851634199464281, 0.0007785456212358768, 0.0010787536125878238, 0.000657086048346664, 0.00041112576163343566, 0.0006443692792916423, 0.00045959118112419385, 0.0003122378011821397, 0.000609336667997784, 0.0003880694186805059, 0.0009093141334931765, 0.0005626546341063053, 0.0008767530020403775, 0.0008983568364044369, 0.0005313404948972548, 0.0008518689686885723, 0.0010572265679912617, 0.0006354757169890242, 0.0003274066373745109, 0.000385668466444457, 0.0008420542814226254, 0.0007626820763925989, 0.00046753175971183015, 0.000597552021828959, 0.001512642413138702, 0.0015533461561963759, 0.00042917272684336866, 0.0007589333893737912, 0.0007793175110394363, 0.0013606323326172186, 0.0010708106074122238, 0.0017255764462804062, 0.0014505471230252798, 0.0011778632144526457, 0.0009922272000244719, 0.0015684539363044984, 0.001202647835974969, 0.0008013912424211175, 0.0017305482901869961, 0.001546878751333817, 0.0016280335591928121, 0.0012209017265165832, 0.0016428836513145927, 0.0017125437902664284, 0.0012839198245495028, 0.0023549063987276125, 0.00194616284198128, 0.0013626947758323272, 0.0017155356949486767, 0.0016421110570177533, 0.0022854458662050514, 0.002232510397715493, 0.001215801821387133, 0.0020357009633157454, 0.0027416934048078026, 0.0033997017131347463, 0.0020350033584146886, 0.002268660609320711, 0.0015326216859995597, 0.0024364336487005247, 0.0027935535034442727, 0.0033879617240121567, 0.0029156368659942265, 0.0020645156777836385, 0.002438835657676493, 0.002441985368853415, 0.0034505417055020766, 0.002901555767425855, 0.0031212521532832578, 0.001882580914493433, 0.003287713582730159, 0.0029367140524931253, 0.0039670292120274, 0.0033618625963067715, 0.003451895585031776, 0.0031243478533087288, 0.0031430316413050786, 0.003943271506875921, 0.004323357216324662, 0.002625114483083962, 0.0039257578039402765, 0.0033942955882596795, 0.0050165582516125545, 0.003618134394602263, 0.0040988395915684825, 0.005175196048356265, 0.004432317278058638, 0.003930083705786328, 0.004502927543613821, 0.0037867850760546357, 0.005004235568270545, 0.005730496751084424, 0.004906496362388982, 0.005118748289481785, 0.004481453962229492, 0.005523704490355952, 0.005273490394942305, 0.005143007593848476, 0.005774214159838988, 0.007580190510290576, 0.006498938607978545, 0.007267638465967134, 0.006711962346597876, 0.007160055766872183, 0.007741573325960483, 0.006958570688851483, 0.008027616384043763, 0.006491124055410961, 0.006965507912454632, 0.007033286428133173, 0.0075345224379212965, 0.007969065791006133, 0.007304825691680206, 0.008879706955007347, 0.009719468580466434, 0.00930794305803523, 0.008589718734557227, 0.011319061589399954, 0.010844771664793588, 0.014224548429327051, 0.012721626927571053, 0.010222910123882495, 0.012362371675419223, 0.015389424623274844, 0.01565643696955999, 0.015230812814505454, 0.01906925165094261, 0.017090268032596414, 0.016921746977907524, 0.01635859947247497, 0.018109406171415685, 0.021331014488283843, 0.019025690796250223, 0.02007964655849747, 0.023200653235011234, 0.02345841135757303, 0.025712153617200847, 0.018961324532910873, 0.021425620734273135, 0.019705622622336192, 0.02305949657795824, 0.016904155311048913, 0.01779913605910252, 0.017958248840870876, 0.015605664608547521, 0.01320737285223122, 0.011833960385302314, 0.011730064847479736, 0.009846339992453983, 0.008205800581666416, 0.008087698075809476, 0.0068239880499773855, 0.006234452418040672, 0.004989127788162423, 0.004247616987260091, 0.0040220996082628545, 0.003894180536491223, 0.0031874275553651146, 0.0019197823866261643, 0.0018729511175749163, 0.0021220417810383875, 0.001538804162468958, 0.001154480847665234, 0.0015677334745103637, 0.0012899480169906263, 0.0007547088562829989, 0.0013821809033973298, 0.0013596833018923067, 0.0008751287536453, 0.0007717716835195772, 0.0011809427111326815, 0.0008204830103034985, 0.0007205160401171691, 0.0005120452067340448, 0.00018562536875194573, 0.0005563956418390186, 0.0007167694665782006, 0.0006414536555761893, 0.00038011999478835667, 0.00039504726806279, 0.0005938407903695269, 0.00010751386423073721, 0.0002915600232343677, 0.0004151473223826165, 0.00014006710927265412, 0.0005997265577525856, 0.0002265689134943577, 0.0005981214089530937, 9.644529634230595e-05, 0.0005115322964878402, 0.0, 0.00010964734385155819, 0.0003086815386750042, 0.00043743733320119406, 0.0, 0.000289606522077276, 0.0002056294162931427, 0.00020578968851429552, 9.542344840922448e-05, 0.000321123105257978, 0.0003137907783404116, 0.00010234259001819252, 0.0, 9.758414886760633e-05, 0.0, 0.00017721446262301672, 0.0001949324490405443, 0.0, 2.0693489615605736e-05, 2.7891006362537966e-05, 0.00010333056356607064, 9.542344840922448e-05, 0.0, 0.00010333056356607064, 9.878813766740087e-05, 0.0, 0.0, 0.0, 9.111030571924056e-05, 0.0, 7.48718628201953e-05, 0.0003174288794835497, 0.00010333056356607064, 9.644529634230595e-05, 0.0, 0.0, 9.841572490737047e-05, 0.0, 0.0, 9.988976946441886e-05, 7.177537757825609e-06, 0.00010825679153279412, 0.0, 0.0, 0.0, 0.0, 9.644529634230595e-05, 0.0, 0.0, 0.0, 0.0, 9.599826600239085e-05, 0.0, 0.0, 0.0, 0.0, 0.00012590698025677054, 0.0, 0.0, 0.0, 0.0, 0.00010054394039785078, 0.0, 0.0, 0.0, 2.0693489615605736e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011714821100168613, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 8.275929130372281e-05, 0.0, 0.0, 0.0, 0.0, 0.00010751386423073721, 0.0, 0.0, 0.0, 0.0, 0.00018407002371348348, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.7891006362537966e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
4000 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.621755382180972e-05, 9.569969132817099e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 9.47644518875752e-05, 9.450075595656066e-05, 0.0, 0.0, 0.00014955302499536656, 0.00010635460487071522, 0.0, 0.0, 0.0, 0.00019114807020996472, 2.033070560719565e-05, 0.0, 0.00039001051620217854, 0.0, 0.0002003645936609027, 0.0, 9.78542017186367e-05, 0.0002144610362504868, 0.00010837978811874046, 2.05770726399772e-05, 9.875589590479039e-05, 0.0002881475392209586, 0.00028302978957978607, 0.00019536564660850995, 0.0, 0.00010751338545552207, 0.0, 0.00031121589623286764, 9.65296476779989e-05, 0.0001920639223148698, 0.00020080608037931436, 0.00019089679531774864, 0.00019114720646425841, 9.455581035679811e-05, 0.0003034850153967957, 5.5441090740482904e-05, 0.00011233983685723907, 0.00019665690888529336, 0.00012959301651282567, 0.0001081040560790792, 0.00019494565964735337, 0.0, 0.0003335000284746822, 0.00019209441629371892, 0.0, 9.483558885732267e-05, 9.495395957324931e-05, 9.511398724242518e-05, 0.0003910025844771258, 0.0005565886825981236, 0.0004879506042873521, 0.0003013116434288953, 0.00030479944859100513, 0.0003011883155636976, 0.0001926934051639695, 0.0002127049097899794, 0.00040715560559345177, 0.0003938995500219534, 0.0, 0.0006020362402107839, 0.0002971297998146788, 0.00039338460736434365, 3.6816787537304577e-05, 0.0006057752451567875, 9.564851439507168e-05, 0.0002798708649838969, 0.00026103688985764506, 0.0002093828499180797, 0.0, 0.00017508829607575997, 0.00040562782720978194, 0.0005115008938492251, 0.0009770949051408349, 0.00039074946943101367, 0.00041897266081822527, 0.00029529576724687774, 0.000304916317140486, 0.0005075254854588185, 0.0003155323151835445, 0.0005068836848448636, 0.00024500158225942574, 0.00040352359245254805, 0.0005016966289992993, 0.0004961844292265483, 0.0003052976984239863, 0.00061399615146455, 0.0004047815066342354, 0.00044710632308526895, 0.0004598069150592829, 0.0007046767197672535, 0.0004910685760705134, 0.00028755487822686084, 0.0009063546575516925, 0.000757967576597371, 0.0003975044866126721, 0.0003972784481167449, 0.0006910043012111491, 0.0005710569113830357, 0.0007733719930654477, 0.0009308601747564303, 0.001047818929332363, 0.000819104398990387, 0.000428241853980168, 0.0008067676317295384, 0.0006075866325654186, 0.0003988093059416286, 0.0007849998879805377, 0.0005264773430969021, 0.001047753585091972, 0.0006904959680859007, 0.0012772909271046183, 0.0006884634617764523, 0.0006855307822243766, 0.0006330309662446013, 0.0002089718947326783, 0.0009087757743206614, 0.0010985739038770548, 0.0004912328004171972, 0.001109477003482061, 0.0005913803972027513, 0.0006905740807410806, 0.000691812992517229, 0.0007674237894818121, 0.000913156317001492, 0.0006428587399994034, 0.0007074621870076438, 0.0009524964038331039, 0.0015144888673407552, 0.0010432540708286365, 0.0011437630513068816, 0.0010782219520031919, 0.0005991071283040168, 0.0005143839643544293, 0.0018435561528506164, 0.0005412927601015926, 0.0007664485079168962, 0.001201903727615488, 0.0012340206470926175, 0.001624081673641535, 0.0011020679430299146, 0.0011140858003543263, 0.0012166947344154349, 0.0015091606829576109, 0.0014541391801653225, 0.0016066535385377177, 0.0014035292397909698, 0.001871127967312781, 0.0016419141919525536, 0.001497712972907691, 0.0019317672721789582, 0.0014225972900036336, 0.00242090661588317, 0.0008777204358707573, 0.0018442246169189838, 0.0018919669215483625, 0.0014845352177621784, 0.00167809777643167, 0.0017907063320889767, 0.001542008105977318, 0.0020116258954188883, 0.0020343813136059824, 0.002240610890829425, 0.0031326034852097603, 0.002694222646141388, 0.001342211556557883, 0.0016775350648810848, 0.0021947383327750956, 0.0026101049303777677, 0.0017580580963399786, 0.0030101561899825973, 0.003203121186839962, 0.0028024206908972707, 0.0041416660695913375, 0.002483239613416952, 0.0033653659785814736, 0.0036947781615072834, 0.002451093101260841, 0.0024388982136207956, 0.002903712375202084, 0.002943686226057166, 0.002564200526394788, 0.002988640059113263, 0.0028498278621925982, 0.0040575384395291755, 0.0029562383288780155, 0.0034598669692515413, 0.003961566227042121, 0.004271448744006482, 0.003955781083626173, 0.004339845386615257, 0.0031808491658151163, 0.004809050981584108, 0.004856006301212548, 0.00537094925925561, 0.004108931459271707, 0.004116473536562627, 0.003034624678784447, 0.00330908761312401, 0.0047685573811387895, 0.005215684133687721, 0.005454226362676177, 0.005555572725839617, 0.0045580236463982834, 0.006067946678827122, 0.006071828276922991, 0.007707547534453191, 0.006764152156248426, 0.006551684532425637, 0.0044448912853013375, 0.005897324004384265, 0.0064342601813598375, 0.005571513115295543, 0.005497291071210074, 0.00482605490488364, 0.006626363835977198, 0.0067786318390524875, 0.006765342472940054, 0.008500413216593712, 0.009570339679725108, 0.006957510420281293, 0.007219568163678402, 0.009341518270240516, 0.0084434630823017, 0.00840591733328956, 0.01012551996861784, 0.00947606611705493, 0.00980826241527133, 0.011165141725872284, 0.011606261915671162, 0.01092772732345131, 0.011930719052358955, 0.011491925417951286, 0.011857526292722128, 0.01299989383140089, 0.01608102468204649, 0.016009266791798663, 0.017783235234265653, 0.015121600586998789, 0.020152358417347034, 0.01902240838737558, 0.01968017102198333, 0.01994237477595897, 0.020898072296488008, 0.021904855493504776, 0.022626347239141004, 0.01982896962143547, 0.023663579950880097, 0.023064657776819715, 0.020089459703929776, 0.0195614373826434, 0.02186077111438749, 0.016034738727786286, 0.016980977406639605, 0.015834057702763582, 0.012672390301161074, 0.011890190601652096, 0.011007750734350343, 0.010671504595396149, 0.007619813803621315, 0.00574297039341567, 0.004034841906266755, 0.0048849912036502486, 0.004507992190322197, 0.0038260678093008237, 0.0038857747194541116, 0.0024989222311107852, 0.002563215706072941, 0.0022220756590550757, 0.0020508358946362965, 0.002264615510632821, 0.0024208989548342966, 0.001346622367892596, 0.001552919993256032, 0.0012403534052863702, 0.0011651693738090301, 0.0014025035605417983, 0.001049285794866657, 0.0005078774430570624, 0.0006716646593050901, 0.0007352141112266563, 0.0005257397793720294, 0.000637895056304325, 0.00030244551621632325, 0.0010415890695401457, 0.0001123332179363374, 0.0005194830308004327, 0.00019925693367795345, 0.000603937194293744, 0.0007236784493313366, 0.00031428746984993533, 0.0004017444265141787, 0.0003592010448452857, 0.00032376332356065486, 0.0002150348638327707, 0.0002005311463655774, 0.00026854539372850143, 0.0, 0.0003260291914274535, 0.00020079751803057347, 9.834980398325718e-05, 0.0, 0.0002940191886471017, 0.0, 0.0, 0.00021076878623510774, 0.00030233069436862473, 9.849739183655404e-05, 0.0, 0.00011014747187233996, 0.0003121509009599563, 9.334210681131781e-05, 0.00018779333821786952, 9.849739183655404e-05, 0.00019552179681054768, 9.834980398325718e-05, 9.776089840527384e-05, 0.0, 0.00018529955414708647, 0.00010303661962529963, 0.00010586170650569711, 0.00011014747187233996, 0.00011771347116390817, 0.00010876223030729308, 0.0, 0.0, 0.00010300682039843168, 0.0, 0.000213541222181581, 0.00021791744613388004, 0.0, 0.00010586170650569711, 0.00010300682039843168, 0.00010233662883865163, 0.00010140887206466384, 0.0, 1.553100215675363e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.880777697841115e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010120801363378389, 0.0, 0.0, 0.0, 9.928241463257287e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.334210681131781e-05, 9.752432596714571e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.715284226532821e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 8.478652720797812e-05, 0.0, 0.0, 0.00011199823481893081, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010140887206466384, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011090860083333078, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
4500 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.600325197683399e-05, 0.0, 0.0, 0.00010742822522682623, 0.0, 0.0, 0.0, 0.0, 0.0, 9.90041907769529e-05, 0.0, 0.0, 0.0, 9.39088020009381e-05, 0.00020290489359851203, 0.0, 0.0, 0.00020448908921353456, 9.536108632130073e-05, 0.0002985088385903463, 0.00021522579172188566, 9.916483360131202e-05, 0.0, 0.00019106353325019037, 0.00020384128772475002, 0.00019255396517081448, 0.0001406364793848756, 0.00010310145881200793, 0.00048656494276996603, 0.00010853608504862874, 0.00038415651883429256, 9.749450381194338e-05, 0.00028133292472599936, 0.0004900563975685605, 0.0002878990415829557, 0.0005124934351337689, 0.00021836487316675241, 9.101683415124945e-05, 0.0002998205291531841, 0.0002880674779102642, 0.0001957334554537863, 0.00011727981710232256, 0.00020019839903606765, 9.7555738477974e-05, 0.0003411002027483729, 0.0005751654544104435, 0.00019656298451521933, 0.0007797644294263603, 0.00020524553368695528, 0.00032087434508972493, 0.0004012030070394718, 0.0003869937681804387, 0.0003973131255851144, 0.0004686666410907124, 0.0001949818786832044, 0.0006561008185565666, 0.0008383632861538465, 0.0005167838836522296, 0.0001936612639984955, 0.0003127982737167165, 0.00036968445854467956, 0.0005641990032326442, 0.0002889461845794551, 0.00027062801763831873, 0.00038864947759993337, 0.0006011999660325296, 0.0006324999166948731, 0.0009422798349016681, 0.0006052159934881137, 0.0005053745060388149, 0.0007117676610847683, 0.00039073074852931356, 0.0005854449035563841, 0.0007378389393985092, 0.00047964331099220164, 0.0006659498730124979, 0.0004069927770777126, 0.000804852431766664, 0.0011950732452702196, 0.000598493730390449, 0.0005999196566177803, 0.00041833359257880217, 0.0005886467472974277, 0.00032983347299403557, 3.695780940592446e-05, 0.0005622759153841673, 9.651718300687281e-05, 0.0010062944770939718, 0.0005021623400059357, 0.0003890812186758341, 0.0008325283258814498, 0.00020722235614159233, 0.0006562340071916877, 0.0007035111394795726, 0.0011973418019293903, 0.0014202769302210465, 0.0006088437094434794, 0.000502484954779188, 0.0007150950984635959, 0.0007411174235672274, 0.001267750879516116, 0.0008321418786078504, 0.0014648721308583297, 0.0005700390038214556, 0.0009357024291168516, 0.0011565734442685663, 0.00048802829162054027, 0.0010225429382153095, 0.0009981286121388093, 0.0013713518906472949, 0.0009157952506419267, 0.0003931793411480896, 0.0008560153718285164, 0.001071024913262585, 0.000510659312322282, 0.0005937348431498631, 0.0008439958052244936, 0.0010293751906835019, 0.0007653763974254681, 0.0012299806815768398, 0.0011627965779503209, 0.0016589411599070413, 0.0006231601413331115, 0.0012742661444136287, 0.0010120597014817276, 0.0011013946838874208, 0.0008143570185706414, 0.0010639017033509499, 0.0009527638311349837, 0.0020252575478371393, 0.0007971468236501317, 0.0011202851684006252, 0.0018436835322257553, 0.001904661142119231, 0.001132314125183174, 0.0006568510662026144, 0.0017211234744690467, 0.0013439581161608686, 0.0011292619719351022, 0.0014709333530059426, 0.0018388152770235147, 0.0010834588214940636, 0.0013847776364855726, 0.0019324012484842308, 0.001023155233091543, 0.0017835593238512517, 0.0014176643892279339, 0.0017154211504683285, 0.0018974075053896885, 0.001231592340011779, 0.0013202951424573016, 0.0013090052479610861, 0.0008316296996056064, 0.0019349367347770866, 0.001749346622664688, 0.0019703067754669686, 0.0011014756051317765, 0.0016958899934070217, 0.002000849712031091, 0.0024221603709524728, 0.0013021777596275868, 0.0027104407849698983, 0.0019315688367760812, 0.002326074264459234, 0.0021536915765343854, 0.0017073750102893713, 0.001668777233822069, 0.0020366160734911105, 0.002666499996921354, 0.002475542983508817, 0.0021359811474710804, 0.0024579361225910185, 0.0029236326364049, 0.00237891114193419, 0.002651247308996345, 0.003634330507592018, 0.002511205500641609, 0.0015594522184250459, 0.0025101673335510537, 0.0034180194597960866, 0.0024929609361292126, 0.0026706139998425873, 0.002890522038377384, 0.0029068416162919696, 0.0033532443512340453, 0.0031695934490781072, 0.0029760972209194763, 0.0041191418345222705, 0.004216670990509565, 0.0023886548330634264, 0.004479027607252451, 0.004199737460628418, 0.0034810844511395274, 0.003981521587500334, 0.004311114644305934, 0.0028534023864827487, 0.002952024117359704, 0.00335678389617487, 0.003000128344857603, 0.004443168172552336, 0.0031792305340629023, 0.004081001967348967, 0.004346216788726106, 0.0036477338827108664, 0.004422503703797469, 0.00345801250631965, 0.004360237706173894, 0.005277067119271416, 0.004128898782374132, 0.006161510911359968, 0.004114797496045432, 0.004111251046561573, 0.0060276831917356125, 0.004666016717874349, 0.0073140973272536915, 0.005868958794658419, 0.005135486426365317, 0.006348961521638242, 0.005165858130262203, 0.006770908641336475, 0.005939597793528168, 0.004678394630261823, 0.0056754763755858546, 0.005022201103174992, 0.007021132595060445, 0.0066421963595549695, 0.006718857777040482, 0.008687703689292368, 0.006379921492601604, 0.008609335468768912, 0.008285920044706473, 0.008924232896181465, 0.007270530213074135, 0.008825011298605736, 0.009614002821251548, 0.010350145314426663, 0.012222434230350408, 0.011603174051104484, 0.011163539149244096, 0.013509668625580899, 0.01263889306688197, 0.013876664420545519, 0.012936021514706292, 0.013134321095362647, 0.01512216223404556, 0.015024064696075727, 0.019407508295204354, 0.016219729384502217, 0.017934465466210737, 0.018097320989475887, 0.020501826443891046, 0.021727741868620472, 0.02041622778590264, 0.021014070625021298, 0.022349466693547728, 0.021791999412636702, 0.023641815974257623, 0.02002860342580693, 0.018515810314022815, 0.017895723799066614, 0.014676311034001022, 0.013646937569453278, 0.015436493488929346, 0.013152682760924897, 0.009582704803861255, 0.01026360874331255, 0.006838518479086456, 0.008956646135362572, 0.007212502776328122, 0.004041494724473259, 0.0034778790860815366, 0.004636915449895674, 0.0030505693458998456, 0.0032913550654783924, 0.002334506368593778, 0.002760655593182355, 0.0019070238767453823, 0.001779794552716666, 0.0016052733897234028, 0.0011193439410942636, 0.0010810938774141413, 0.0010190506203488357, 0.0012394104921797616, 0.0006181872132584792, 0.00035409876450915207, 0.0005193803370600735, 0.0005144608846712376, 0.0007147831512093308, 0.0004988388036939222, 0.0003209499843586597, 0.0008291636730156702, 0.0004905030524574143, 0.0004179676517980131, 0.0005113303993824478, 0.00031821927211152446, 0.00021283688887763645, 0.0002148454549688712, 0.00010268413096969014, 0.00020367060742095555, 0.0002007883922383021, 0.0005900070803659383, 0.00021957655142374624, 0.00025072104629968476, 0.000425348780914674, 0.0, 0.0002975501427901093, 0.000101753225955164, 0.00021090475607778538, 0.00011201694137366643, 0.00010520556651747229, 9.785548195242693e-05, 0.0, 0.000213907990638463, 0.0002102119542297783, 0.00012223134109355026, 0.00028424363841002466, 9.809286014192746e-05, 7.378325224888272e-06, 0.0, 0.00011117047892044313, 0.00010914902722577163, 0.0, 0.0, 0.0, 9.96195495441576e-05, 0.00011237854092302302, 0.0, 9.37825351702083e-05, 0.0, 0.0, 0.0, 0.000101753225955164, 3.776422982489137e-05, 0.0, 9.785548195242693e-05, 0.0, 0.00010031400848249276, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011052149502866404, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00010116885132482329, 0.0, 0.00010268413096969014, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
5000 : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.511415670772774e-05, 0.0, 0.0, 0.0, 2.0611103580595476e-05, 0.0, 0.00014059299724787763, 0.00025144680536734053, 6.479237404181693e-05, 0.00018045122676399082, 0.00014285659700274717, 0.000202618820214788, 0.00038357618644584444, 0.0002532167877747989, 0.0, 0.0001649127820000293, 0.0007565034394209913, 0.0004960093334627375, 0.00020299881661146903, 0.0005995053745416768, 0.000480859904094521, 0.0004807691917087049, 0.00031837103571006687, 0.0005002156901471695, 0.0006028247964050345, 0.0006118668423150621, 0.0007189528137710243, 0.0002934746342358125, 0.0005123995128316648, 0.00046200398876682894, 0.0002077089005264198, 0.000306263636366874, 0.00031921461173155225, 0.0007173246001955316, 0.0002956196195566542, 0.0005154624386637192, 0.000777793378247366, 0.00032288268628104477, 0.0003036434250817211, 0.00039127561529588587, 0.00038691601245018515, 0.0005084188001340376, 0.00028597565148762703, 0.0008137203382327823, 0.0005177500993258021, 0.0010603027840981158, 0.00032501971275773794, 0.0007350652018229537, 0.0010892905538598767, 0.00061756217889608, 0.0010924862602539607, 0.0006753711665459149, 0.0007291036280770608, 0.000824751480597376, 0.00029414698755615714, 0.0005118033677487267, 0.001199512740118141, 0.0009086694718409405, 0.0007456778749226272, 0.001467859398171193, 0.0005191126288607235, 0.0006410130514607214, 0.0006681000402817493, 0.0008190012617937742, 0.0006693044991823081, 0.0008209601822446577, 0.0008169372477251771, 0.0010768353007878768, 0.0009607928333142013, 0.0011561684446775994, 0.0008838956488582244, 0.0008110906623762096, 0.0011141854496068597, 0.0011895231922757778, 0.00047534009229256203, 0.0012101088809469578, 0.001004251256736085, 0.0011497002090694651, 0.0009034825179533313, 0.0013626383160344755, 0.0007133892436908125, 0.0016070337084027539, 0.001612662547150809, 0.0008871422426882541, 0.0007999277535108098, 0.0008239129441528804, 0.0009376688572130804, 0.0011030363073907979, 0.0013562993885969084, 0.0016115135235971378, 0.0009533580437143796, 0.00041138386550553977, 0.001102539601766024, 0.0012873013108646292, 0.0017681740597346486, 0.0012052321683355004, 0.002067008806494274, 0.0014595590919525036, 0.0007087662307600116, 0.001405842117901863, 0.0012072890532464047, 0.001149491005166133, 0.002042817853611206, 0.001305554535805122, 0.0006032864093656477, 0.0015159998959078677, 0.001293021845330596, 0.0009299473648491446, 0.0012501215243312033, 0.0006213078137646104, 0.000914677508028349, 0.0011065633967010883, 0.0020610058098838556, 0.0010803037589219688, 0.0009266278200692749, 0.0011096514282307077, 0.0005129758068979236, 0.0017775755749838622, 0.00252372624826969, 0.0012938918484021499, 0.001819824684302965, 0.0008710662379238607, 0.0008687297794065351, 0.0013221433482573542, 0.0014024760492222217, 0.000656485044485412, 0.001428590276767709, 0.0021032180442493746, 0.0015080033163901968, 0.0017357566734565704, 0.0013209631039096485, 0.0014528048296210709, 0.0011878990964034352, 0.0011527833239386067, 0.0014141029679202177, 0.0011317938545256838, 0.0006694542114938583, 0.001955766659019882, 0.0009491742729390176, 0.0017791196522069278, 0.0011703269518556353, 0.001821149724301743, 0.0012504731884718809, 0.0015775417351089906, 0.0012563025040509199, 0.0026605448635498054, 0.0007139225169778903, 0.0011549081816806172, 0.0017684207531741241, 0.0015675086748213984, 0.0022748144115864676, 0.0017973183022161167, 0.001682874671271579, 0.001269759035029232, 0.001569271297602704, 0.0018131741635076149, 0.000967715614183512, 0.001981779480443014, 0.002482711220730525, 0.0024210248317113534, 0.002352670467752135, 0.00172083042974492, 0.0011708240262299449, 0.0021111594342531816, 0.0026293582403058425, 0.0017345868770123805, 0.002529848228063672, 0.0033764280832680026, 0.0023466350211825628, 0.0018699002566872054, 0.002080409410459651, 0.0018330602113927983, 0.0019253005723689465, 0.002935038822198301, 0.0035426114532520177, 0.0027759607641038363, 0.0027279746495061656, 0.0023898878667382166, 0.0028842445569687123, 0.0022604501421674343, 0.002450843148344333, 0.0025947651088498083, 0.0027823947060051377, 0.0021694149775909164, 0.002613269452224207, 0.0023205668873290553, 0.0028451831644704167, 0.0031203504597713525, 0.0032658750057595796, 0.0028644655693654338, 0.0033803328950195027, 0.0038968961739642106, 0.002394086694786292, 0.0026964999099582014, 0.003880287203204019, 0.004006091761514702, 0.002395798430131652, 0.00440871979220828, 0.005198491323086528, 0.0034943005932297857, 0.004980706863888511, 0.005485714269879127, 0.004301657543639124, 0.004083061343796215, 0.0048436244697737585, 0.0034435967945529895, 0.004345747696474154, 0.004367771877084638, 0.0038983092221855416, 0.00409077582991897, 0.004624943211704584, 0.004776293754526027, 0.004742839813302709, 0.006706413391662328, 0.00574167266435919, 0.0052338199869500475, 0.004694707673888846, 0.006924431883899091, 0.005946637914693334, 0.005118976631508661, 0.006085528164865788, 0.005655676830939434, 0.006556393297166188, 0.005815337021634475, 0.007210485157164012, 0.0075983172356486025, 0.007164543881659075, 0.00642914569900526, 0.006486654401616411, 0.007534059433199521, 0.00963764475198659, 0.008588286822861682, 0.00920161563416983, 0.007279965928038703, 0.00825113469725057, 0.008459098618810149, 0.009579195492226028, 0.008735202537932113, 0.009101868393059603, 0.010779717131074222, 0.01330076822437792, 0.011191664445870523, 0.012334497924000124, 0.014024028647310092, 0.01367861161543573, 0.015046081410805268, 0.016473110647871263, 0.014983999722283677, 0.01654923923537804, 0.01807869431018259, 0.018372962339773886, 0.019820674984138056, 0.01782385986428776, 0.018060872399282515, 0.019604184589977806, 0.019953338283815326, 0.019172930532817125, 0.018335721586650533, 0.017638698430692306, 0.016772019021621544, 0.01782804885901488, 0.017490225117582497, 0.016150026071219348, 0.01109037435670235, 0.010578189118080134, 0.009826184914662415, 0.008641835155457932, 0.006069083902231605, 0.00571110332783822, 0.005907298730878041, 0.0044861483447453995, 0.0038751942804480487, 0.004066008398594875, 0.0027458405645175168, 0.003368694422169384, 0.0030228988074382296, 0.002140421676586118, 0.0014501697454379727, 0.0008963698310730641, 0.0013295464868553497, 0.0012277269962702258, 0.00044640151986470757, 0.0010796921263585256, 0.0005073499488752833, 0.0012515735370868214, 0.0003493012243809824, 0.0004983666570579658, 0.0007578176627667178, 0.0006204403919398075, 0.0006317698526788942, 0.00019899991227007388, 0.0005331044612485606, 0.000475998064381009, 0.00015024999446717105, 0.00044550453661878163, 0.00020082817246875866, 0.00010235648209668799, 0.00020170633412379288, 0.00014981769709465696, 0.0004122654534583445, 0.0003017039254397252, 0.0, 0.0, 0.00019259488647820777, 0.0, 0.00010396936183731541, 9.941779412908304e-05, 0.0, 0.0, 0.0002921227062500465, 0.00010235648209668799, 0.0, 0.0, 0.0002142916328654318, 9.926882699888648e-05, 0.0, 8.557881610613288e-05, 0.0, 0.0, 5.768690466548556e-05, 0.0, 0.0, 0.0001032929061783846, 0.0, 0.00010215374667479705, 0.0001119449840896995, 0.0, 0.00010021016005877893, 0.0, 0.0, 9.941779412908304e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011117674052547301, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.926882699888648e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
}
binxcenters = [0.0017857142857142857, 0.005357142857142857, 0.008928571428571428, 0.0125, 0.01607142857142857, 0.019642857142857142, 0.023214285714285715, 0.026785714285714284, 0.030357142857142857, 0.033928571428571426, 0.0375, 0.04107142857142857, 0.044642857142857144, 0.048214285714285716, 0.05178571428571428, 0.055357142857142855, 0.05892857142857143, 0.0625, 0.06607142857142856, 0.06964285714285713, 0.0732142857142857, 0.07678571428571428, 0.08035714285714285, 0.08392857142857142, 0.0875, 0.09107142857142857, 0.09464285714285714, 0.0982142857142857, 0.10178571428571427, 0.10535714285714284, 0.10892857142857142, 0.11249999999999999, 0.11607142857142856, 0.11964285714285713, 0.1232142857142857, 0.12678571428571428, 0.13035714285714284, 0.13392857142857142, 0.13749999999999998, 0.14107142857142857, 0.14464285714285713, 0.14821428571428572, 0.15178571428571427, 0.15535714285714283, 0.15892857142857142, 0.16249999999999998, 0.16607142857142856, 0.16964285714285712, 0.1732142857142857, 0.17678571428571427, 0.18035714285714285, 0.1839285714285714, 0.1875, 0.19107142857142856, 0.19464285714285712, 0.1982142857142857, 0.20178571428571426, 0.20535714285714285, 0.2089285714285714, 0.2125, 0.21607142857142855, 0.21964285714285714, 0.2232142857142857, 0.22678571428571428, 0.23035714285714284, 0.2339285714285714, 0.2375, 0.24107142857142855, 0.24464285714285713, 0.2482142857142857, 0.2517857142857143, 0.25535714285714284, 0.2589285714285714, 0.2625, 0.26607142857142857, 0.26964285714285713, 0.2732142857142857, 0.27678571428571425, 0.28035714285714286, 0.2839285714285714, 0.2875, 0.29107142857142854, 0.29464285714285715, 0.2982142857142857, 0.30178571428571427, 0.3053571428571428, 0.3089285714285714, 0.3125, 0.31607142857142856, 0.3196428571428571, 0.3232142857142857, 0.3267857142857143, 0.33035714285714285, 0.3339285714285714, 0.33749999999999997, 0.3410714285714286, 0.34464285714285714, 0.3482142857142857, 0.35178571428571426, 0.3553571428571428, 0.35892857142857143, 0.3625, 0.36607142857142855, 0.3696428571428571, 0.3732142857142857, 0.3767857142857143, 0.38035714285714284, 0.3839285714285714, 0.38749999999999996, 0.39107142857142857, 0.39464285714285713, 0.3982142857142857, 0.40178571428571425, 0.40535714285714286, 0.4089285714285714, 0.4125, 0.41607142857142854, 0.41964285714285715, 0.4232142857142857, 0.42678571428571427, 0.4303571428571428, 0.4339285714285714, 0.4375, 0.44107142857142856, 0.4446428571428571, 0.4482142857142857, 0.4517857142857143, 0.45535714285714285, 0.4589285714285714, 0.46249999999999997, 0.4660714285714285, 0.46964285714285714, 0.4732142857142857, 0.47678571428571426, 0.4803571428571428, 0.48392857142857143, 0.4875, 0.49107142857142855, 0.4946428571428571, 0.49821428571428567, 0.5017857142857143, 0.5053571428571428, 0.5089285714285714, 0.5125, 0.5160714285714285, 0.5196428571428571, 0.5232142857142857, 0.5267857142857143, 0.5303571428571429, 0.5339285714285714, 0.5375, 0.5410714285714285, 0.5446428571428571, 0.5482142857142857, 0.5517857142857142, 0.5553571428571429, 0.5589285714285714, 0.5625, 0.5660714285714286, 0.5696428571428571, 0.5732142857142857, 0.5767857142857142, 0.5803571428571428, 0.5839285714285714, 0.5875, 0.5910714285714286, 0.5946428571428571, 0.5982142857142857, 0.6017857142857143, 0.6053571428571428, 0.6089285714285714, 0.6124999999999999, 0.6160714285714285, 0.6196428571428572, 0.6232142857142857, 0.6267857142857143, 0.6303571428571428, 0.6339285714285714, 0.6375, 0.6410714285714285, 0.6446428571428571, 0.6482142857142857, 0.6517857142857143, 0.6553571428571429, 0.6589285714285714, 0.6625, 0.6660714285714285, 0.6696428571428571, 0.6732142857142857, 0.6767857142857142, 0.6803571428571429, 0.6839285714285714, 0.6875, 0.6910714285714286, 0.6946428571428571, 0.6982142857142857, 0.7017857142857142, 0.7053571428571428, 0.7089285714285714, 0.7125, 0.7160714285714286, 0.7196428571428571, 0.7232142857142857, 0.7267857142857143, 0.7303571428571428, 0.7339285714285714, 0.7374999999999999, 0.7410714285714285, 0.7446428571428572, 0.7482142857142857, 0.7517857142857143, 0.7553571428571428, 0.7589285714285714, 0.7625, 0.7660714285714285, 0.7696428571428571, 0.7732142857142856, 0.7767857142857143, 0.7803571428571429, 0.7839285714285714, 0.7875, 0.7910714285714285, 0.7946428571428571, 0.7982142857142857, 0.8017857142857142, 0.8053571428571428, 0.8089285714285714, 0.8125, 0.8160714285714286, 0.8196428571428571, 0.8232142857142857, 0.8267857142857142, 0.8303571428571428, 0.8339285714285714, 0.8375, 0.8410714285714286, 0.8446428571428571, 0.8482142857142857, 0.8517857142857143, 0.8553571428571428, 0.8589285714285714, 0.8624999999999999, 0.8660714285714285, 0.8696428571428572, 0.8732142857142857, 0.8767857142857143, 0.8803571428571428, 0.8839285714285714, 0.8875, 0.8910714285714285, 0.8946428571428571, 0.8982142857142856, 0.9017857142857143, 0.9053571428571429, 0.9089285714285714, 0.9125, 0.9160714285714285, 0.9196428571428571, 0.9232142857142857, 0.9267857142857142, 0.9303571428571428, 0.9339285714285714, 0.9375, 0.9410714285714286, 0.9446428571428571, 0.9482142857142857, 0.9517857142857142, 0.9553571428571428, 0.9589285714285714, 0.9624999999999999, 0.9660714285714286, 0.9696428571428571, 0.9732142857142857, 0.9767857142857143, 0.9803571428571428, 0.9839285714285714, 0.9874999999999999, 0.9910714285714285, 0.994642857142857, 0.9982142857142857, 1.0017857142857143, 1.0053571428571428, 1.0089285714285714, 1.0125, 1.0160714285714285, 1.019642857142857, 1.0232142857142856, 1.0267857142857142, 1.0303571428571427, 1.0339285714285713, 1.0374999999999999, 1.0410714285714284, 1.0446428571428572, 1.0482142857142858, 1.0517857142857143, 1.0553571428571429, 1.0589285714285714, 1.0625, 1.0660714285714286, 1.0696428571428571, 1.0732142857142857, 1.0767857142857142, 1.0803571428571428, 1.0839285714285714, 1.0875, 1.0910714285714285, 1.094642857142857, 1.0982142857142856, 1.1017857142857141, 1.105357142857143, 1.1089285714285715, 1.1125, 1.1160714285714286, 1.1196428571428572, 1.1232142857142857, 1.1267857142857143, 1.1303571428571428, 1.1339285714285714, 1.1375, 1.1410714285714285, 1.144642857142857, 1.1482142857142856, 1.1517857142857142, 1.1553571428571427, 1.1589285714285713, 1.1624999999999999, 1.1660714285714284, 1.1696428571428572, 1.1732142857142858, 1.1767857142857143, 1.1803571428571429, 1.1839285714285714, 1.1875, 1.1910714285714286, 1.1946428571428571, 1.1982142857142857, 1.2017857142857142, 1.2053571428571428, 1.2089285714285714, 1.2125, 1.2160714285714285, 1.219642857142857, 1.2232142857142856, 1.2267857142857141, 1.2303571428571427, 1.2339285714285715, 1.2375, 1.2410714285714286, 1.2446428571428572, 1.2482142857142857, 1.2517857142857143, 1.2553571428571428, 1.2589285714285714, 1.2625, 1.2660714285714285, 1.269642857142857, 1.2732142857142856, 1.2767857142857142, 1.2803571428571427, 1.2839285714285713, 1.2874999999999999, 1.2910714285714284, 1.2946428571428572, 1.2982142857142858, 1.3017857142857143, 1.3053571428571429, 1.3089285714285714, 1.3125, 1.3160714285714286, 1.3196428571428571, 1.3232142857142857, 1.3267857142857142, 1.3303571428571428, 1.3339285714285714, 1.3375, 1.3410714285714285, 1.344642857142857, 1.3482142857142856, 1.3517857142857141, 1.3553571428571427, 1.3589285714285715, 1.3625, 1.3660714285714286, 1.3696428571428572, 1.3732142857142857, 1.3767857142857143, 1.3803571428571428, 1.3839285714285714, 1.3875, 1.3910714285714285, 1.394642857142857, 1.3982142857142856, 1.4017857142857142, 1.4053571428571427, 1.4089285714285713, 1.4124999999999999, 1.4160714285714284, 1.419642857142857, 1.4232142857142858, 1.4267857142857143, 1.4303571428571429, 1.4339285714285714, 1.4375, 1.4410714285714286, 1.4446428571428571, 1.4482142857142857, 1.4517857142857142, 1.4553571428571428, 1.4589285714285714, 1.4625, 1.4660714285714285, 1.469642857142857, 1.4732142857142856, 1.4767857142857141, 1.4803571428571427, 1.4839285714285715, 1.4875, 1.4910714285714286, 1.4946428571428572, 1.4982142857142857, 1.5017857142857143, 1.5053571428571428, 1.5089285714285714, 1.5125, 1.5160714285714285, 1.519642857142857, 1.5232142857142856, 1.5267857142857142, 1.5303571428571427, 1.5339285714285713, 1.5374999999999999, 1.5410714285714284, 1.544642857142857, 1.5482142857142858, 1.5517857142857143, 1.5553571428571429, 1.5589285714285714, 1.5625, 1.5660714285714286, 1.5696428571428571, 1.5732142857142857, 1.5767857142857142, 1.5803571428571428, 1.5839285714285714, 1.5875, 1.5910714285714285, 1.594642857142857, 1.5982142857142856, 1.6017857142857141, 1.6053571428571427, 1.6089285714285713, 1.6125, 1.6160714285714286, 1.6196428571428572, 1.6232142857142857, 1.6267857142857143, 1.6303571428571428, 1.6339285714285714, 1.6375, 1.6410714285714285, 1.644642857142857, 1.6482142857142856, 1.6517857142857142, 1.6553571428571427, 1.6589285714285713, 1.6624999999999999, 1.6660714285714284, 1.669642857142857, 1.6732142857142858, 1.6767857142857143, 1.6803571428571429, 1.6839285714285714, 1.6875, 1.6910714285714286, 1.6946428571428571, 1.6982142857142857, 1.7017857142857142, 1.7053571428571428, 1.7089285714285714, 1.7125, 1.7160714285714285, 1.719642857142857, 1.7232142857142856, 1.7267857142857141, 1.7303571428571427, 1.7339285714285713, 1.7375, 1.7410714285714286, 1.7446428571428572, 1.7482142857142857, 1.7517857142857143, 1.7553571428571428, 1.7589285714285714, 1.7625, 1.7660714285714285, 1.769642857142857, 1.7732142857142856, 1.7767857142857142, 1.7803571428571427, 1.7839285714285713, 1.7874999999999999, 1.7910714285714284, 1.794642857142857, 1.7982142857142855, 1.8017857142857143, 1.8053571428571429, 1.8089285714285714, 1.8125, 1.8160714285714286, 1.8196428571428571, 1.8232142857142857, 1.8267857142857142, 1.8303571428571428, 1.8339285714285714, 1.8375, 1.8410714285714285, 1.844642857142857, 1.8482142857142856, 1.8517857142857141, 1.8553571428571427, 1.8589285714285713, 1.8625, 1.8660714285714286, 1.8696428571428572, 1.8732142857142857, 1.8767857142857143, 1.8803571428571428, 1.8839285714285714, 1.8875, 1.8910714285714285, 1.894642857142857, 1.8982142857142856, 1.9017857142857142, 1.9053571428571427, 1.9089285714285713, 1.9124999999999999, 1.9160714285714284, 1.919642857142857, 1.9232142857142855, 1.9267857142857143, 1.9303571428571429, 1.9339285714285714, 1.9375, 1.9410714285714286, 1.9446428571428571, 1.9482142857142857, 1.9517857142857142, 1.9553571428571428, 1.9589285714285714, 1.9625, 1.9660714285714285, 1.969642857142857, 1.9732142857142856, 1.9767857142857141, 1.9803571428571427, 1.9839285714285713, 1.9874999999999998, 1.9910714285714286, 1.9946428571428572, 1.9982142857142857, 2.0017857142857145, 2.005357142857143, 2.0089285714285716, 2.0125, 2.0160714285714287, 2.0196428571428573, 2.023214285714286, 2.0267857142857144, 2.030357142857143, 2.0339285714285715, 2.0375, 2.0410714285714286, 2.044642857142857, 2.0482142857142858, 2.0517857142857143, 2.055357142857143, 2.0589285714285714, 2.0625, 2.0660714285714286, 2.069642857142857, 2.0732142857142857, 2.0767857142857142, 2.080357142857143, 2.083928571428572, 2.0875000000000004, 2.091071428571429, 2.0946428571428575, 2.098214285714286, 2.1017857142857146, 2.105357142857143, 2.1089285714285717, 2.1125000000000003, 2.116071428571429, 2.1196428571428574, 2.123214285714286, 2.1267857142857145, 2.130357142857143, 2.1339285714285716, 2.1375, 2.1410714285714287, 2.1446428571428573, 2.148214285714286, 2.1517857142857144, 2.155357142857143, 2.1589285714285715, 2.1625, 2.1660714285714286, 2.169642857142857, 2.1732142857142858, 2.1767857142857143, 2.180357142857143, 2.1839285714285714, 2.1875, 2.1910714285714286, 2.194642857142857, 2.1982142857142857, 2.2017857142857142, 2.205357142857143, 2.208928571428572, 2.2125000000000004, 2.216071428571429, 2.2196428571428575, 2.223214285714286, 2.2267857142857146, 2.230357142857143, 2.2339285714285717, 2.2375000000000003, 2.241071428571429, 2.2446428571428574, 2.248214285714286, 2.2517857142857145, 2.255357142857143, 2.2589285714285716, 2.2625, 2.2660714285714287, 2.2696428571428573, 2.273214285714286, 2.2767857142857144, 2.280357142857143, 2.2839285714285715, 2.2875, 2.2910714285714286, 2.294642857142857, 2.2982142857142858, 2.3017857142857143, 2.305357142857143, 2.3089285714285714, 2.3125, 2.3160714285714286, 2.319642857142857, 2.3232142857142857, 2.3267857142857142, 2.330357142857143, 2.3339285714285714, 2.3375000000000004, 2.341071428571429, 2.3446428571428575, 2.348214285714286, 2.3517857142857146, 2.355357142857143, 2.3589285714285717, 2.3625000000000003, 2.366071428571429, 2.3696428571428574, 2.373214285714286, 2.3767857142857145, 2.380357142857143, 2.3839285714285716, 2.3875, 2.3910714285714287, 2.3946428571428573, 2.398214285714286, 2.4017857142857144, 2.405357142857143, 2.4089285714285715, 2.4125, 2.4160714285714286, 2.419642857142857, 2.4232142857142858, 2.4267857142857143, 2.430357142857143, 2.4339285714285714, 2.4375, 2.4410714285714286, 2.444642857142857, 2.4482142857142857, 2.4517857142857142, 2.455357142857143, 2.4589285714285714, 2.4625000000000004, 2.466071428571429, 2.4696428571428575, 2.473214285714286, 2.4767857142857146, 2.480357142857143, 2.4839285714285717, 2.4875000000000003, 2.491071428571429, 2.4946428571428574, 2.498214285714286, 2.5017857142857145, 2.505357142857143, 2.5089285714285716, 2.5125, 2.5160714285714287, 2.5196428571428573, 2.523214285714286, 2.5267857142857144, 2.530357142857143, 2.5339285714285715, 2.5375, 2.5410714285714286, 2.544642857142857, 2.5482142857142858, 2.5517857142857143, 2.555357142857143, 2.5589285714285714, 2.5625, 2.5660714285714286, 2.569642857142857, 2.5732142857142857, 2.5767857142857142, 2.580357142857143, 2.5839285714285714, 2.5875000000000004, 2.591071428571429, 2.5946428571428575, 2.598214285714286, 2.6017857142857146, 2.605357142857143, 2.6089285714285717, 2.6125000000000003, 2.616071428571429, 2.6196428571428574, 2.623214285714286, 2.6267857142857145, 2.630357142857143, 2.6339285714285716, 2.6375, 2.6410714285714287, 2.6446428571428573, 2.648214285714286, 2.6517857142857144, 2.655357142857143, 2.6589285714285715, 2.6625, 2.6660714285714286, 2.669642857142857, 2.6732142857142858, 2.6767857142857143, 2.680357142857143, 2.6839285714285714, 2.6875, 2.6910714285714286, 2.694642857142857, 2.6982142857142857, 2.7017857142857142, 2.705357142857143, 2.7089285714285714, 2.7125, 2.716071428571429, 2.7196428571428575, 2.723214285714286, 2.7267857142857146, 2.730357142857143, 2.7339285714285717, 2.7375000000000003, 2.741071428571429, 2.7446428571428574, 2.748214285714286, 2.7517857142857145, 2.755357142857143, 2.7589285714285716, 2.7625, 2.7660714285714287, 2.7696428571428573, 2.773214285714286, 2.7767857142857144, 2.780357142857143, 2.7839285714285715, 2.7875, 2.7910714285714286, 2.794642857142857, 2.7982142857142858, 2.8017857142857143, 2.805357142857143, 2.8089285714285714, 2.8125, 2.8160714285714286, 2.819642857142857, 2.8232142857142857, 2.8267857142857142, 2.830357142857143, 2.8339285714285714, 2.8375, 2.841071428571429, 2.8446428571428575, 2.848214285714286, 2.8517857142857146, 2.855357142857143, 2.8589285714285717, 2.8625000000000003, 2.866071428571429, 2.8696428571428574, 2.873214285714286, 2.8767857142857145, 2.880357142857143, 2.8839285714285716, 2.8875, 2.8910714285714287, 2.8946428571428573, 2.898214285714286, 2.9017857142857144, 2.905357142857143, 2.9089285714285715, 2.9125, 2.9160714285714286, 2.919642857142857, 2.9232142857142858, 2.9267857142857143, 2.930357142857143, 2.9339285714285714, 2.9375, 2.9410714285714286, 2.944642857142857, 2.9482142857142857, 2.9517857142857142, 2.955357142857143, 2.9589285714285714, 2.9625, 2.966071428571429, 2.9696428571428575, 2.973214285714286, 2.9767857142857146, 2.980357142857143, 2.9839285714285717, 2.9875000000000003, 2.991071428571429, 2.9946428571428574, 2.998214285714286, 3.0017857142857145, 3.005357142857143, 3.0089285714285716, 3.0125, 3.0160714285714287, 3.0196428571428573, 3.023214285714286, 3.0267857142857144, 3.030357142857143, 3.0339285714285715, 3.0375, 3.0410714285714286, 3.044642857142857, 3.0482142857142858, 3.0517857142857143, 3.055357142857143, 3.0589285714285714, 3.0625, 3.0660714285714286, 3.069642857142857, 3.0732142857142857, 3.0767857142857142, 3.080357142857143, 3.0839285714285714, 3.0875, 3.0910714285714285, 3.0946428571428575, 3.098214285714286, 3.1017857142857146, 3.105357142857143, 3.1089285714285717, 3.1125000000000003, 3.116071428571429, 3.1196428571428574, 3.123214285714286, 3.1267857142857145, 3.130357142857143, 3.1339285714285716, 3.1375, 3.1410714285714287, 3.1446428571428573, 3.148214285714286, 3.1517857142857144, 3.155357142857143, 3.1589285714285715, 3.1625, 3.1660714285714286, 3.169642857142857, 3.1732142857142858, 3.1767857142857143, 3.180357142857143, 3.1839285714285714, 3.1875, 3.1910714285714286, 3.194642857142857, 3.1982142857142857, 3.2017857142857142, 3.205357142857143, 3.2089285714285714, 3.2125, 3.2160714285714285, 3.2196428571428575, 3.223214285714286, 3.2267857142857146, 3.230357142857143, 3.2339285714285717, 3.2375000000000003, 3.241071428571429, 3.2446428571428574, 3.248214285714286, 3.2517857142857145, 3.255357142857143, 3.2589285714285716, 3.2625, 3.2660714285714287, 3.2696428571428573, 3.273214285714286, 3.2767857142857144, 3.280357142857143, 3.2839285714285715, 3.2875, 3.2910714285714286, 3.294642857142857, 3.2982142857142858, 3.3017857142857143, 3.305357142857143, 3.3089285714285714, 3.3125, 3.3160714285714286, 3.319642857142857, 3.3232142857142857, 3.3267857142857142, 3.330357142857143, 3.3339285714285714, 3.3375, 3.3410714285714285, 3.3446428571428575, 3.348214285714286, 3.3517857142857146, 3.355357142857143, 3.3589285714285717, 3.3625000000000003, 3.366071428571429, 3.3696428571428574, 3.373214285714286, 3.3767857142857145, 3.380357142857143, 3.3839285714285716, 3.3875, 3.3910714285714287, 3.3946428571428573, 3.398214285714286, 3.4017857142857144, 3.405357142857143, 3.4089285714285715, 3.4125, 3.4160714285714286, 3.419642857142857, 3.4232142857142858, 3.4267857142857143, 3.430357142857143, 3.4339285714285714, 3.4375, 3.4410714285714286, 3.444642857142857, 3.4482142857142857, 3.4517857142857142, 3.455357142857143, 3.4589285714285714, 3.4625, 3.4660714285714285, 3.469642857142857, 3.473214285714286, 3.4767857142857146, 3.480357142857143, 3.4839285714285717, 3.4875000000000003, 3.491071428571429, 3.4946428571428574, 3.498214285714286, 3.5017857142857145, 3.505357142857143, 3.5089285714285716, 3.5125, 3.5160714285714287, 3.5196428571428573, 3.523214285714286, 3.5267857142857144, 3.530357142857143, 3.5339285714285715, 3.5375, 3.5410714285714286, 3.544642857142857, 3.5482142857142858, 3.5517857142857143, 3.555357142857143, 3.5589285714285714, 3.5625, 3.5660714285714286, 3.569642857142857, 3.5732142857142857, 3.5767857142857142, 3.580357142857143, 3.5839285714285714, 3.5875, 3.5910714285714285, 3.594642857142857, 3.598214285714286, 3.6017857142857146, 3.605357142857143, 3.6089285714285717, 3.6125000000000003, 3.616071428571429, 3.6196428571428574, 3.623214285714286, 3.6267857142857145, 3.630357142857143, 3.6339285714285716, 3.6375, 3.6410714285714287, 3.6446428571428573, 3.648214285714286, 3.6517857142857144, 3.655357142857143, 3.6589285714285715, 3.6625, 3.6660714285714286, 3.669642857142857, 3.6732142857142858, 3.6767857142857143, 3.680357142857143, 3.6839285714285714, 3.6875, 3.6910714285714286, 3.694642857142857, 3.6982142857142857, 3.7017857142857142, 3.705357142857143, 3.7089285714285714, 3.7125, 3.7160714285714285, 3.719642857142857, 3.723214285714286, 3.7267857142857146, 3.730357142857143, 3.7339285714285717, 3.7375000000000003, 3.741071428571429, 3.7446428571428574, 3.748214285714286, 3.7517857142857145, 3.755357142857143, 3.7589285714285716, 3.7625, 3.7660714285714287, 3.7696428571428573, 3.773214285714286, 3.7767857142857144, 3.780357142857143, 3.7839285714285715, 3.7875, 3.7910714285714286, 3.794642857142857, 3.7982142857142858, 3.8017857142857143, 3.805357142857143, 3.8089285714285714, 3.8125, 3.8160714285714286, 3.819642857142857, 3.8232142857142857, 3.8267857142857142, 3.830357142857143, 3.8339285714285714, 3.8375, 3.8410714285714285, 3.844642857142857, 3.848214285714286, 3.8517857142857146, 3.855357142857143, 3.8589285714285717, 3.8625000000000003, 3.866071428571429, 3.8696428571428574, 3.873214285714286, 3.8767857142857145, 3.880357142857143, 3.8839285714285716, 3.8875, 3.8910714285714287, 3.8946428571428573, 3.898214285714286, 3.9017857142857144, 3.905357142857143, 3.9089285714285715, 3.9125, 3.9160714285714286, 3.919642857142857, 3.9232142857142858, 3.9267857142857143, 3.930357142857143, 3.9339285714285714, 3.9375, 3.9410714285714286, 3.944642857142857, 3.9482142857142857, 3.9517857142857142, 3.955357142857143, 3.9589285714285714, 3.9625, 3.9660714285714285, 3.969642857142857, 3.9732142857142856, 3.9767857142857146, 3.980357142857143, 3.9839285714285717, 3.9875000000000003, 3.991071428571429, 3.9946428571428574, 3.998214285714286, 4.001785714285714, 4.005357142857142, 4.008928571428571, 4.012499999999999, 4.016071428571428, 4.019642857142856, 4.023214285714285, 4.0267857142857135, 4.0303571428571425, 4.033928571428571, 4.0375, 4.041071428571429, 4.044642857142857, 4.048214285714286, 4.051785714285714, 4.055357142857143, 4.058928571428571, 4.0625, 4.066071428571428, 4.069642857142857, 4.073214285714285, 4.076785714285714, 4.080357142857142, 4.083928571428571, 4.0874999999999995, 4.0910714285714285, 4.094642857142857, 4.098214285714286, 4.101785714285714, 4.105357142857143, 4.108928571428571, 4.1125, 4.116071428571428, 4.119642857142857, 4.123214285714285, 4.126785714285714, 4.130357142857142, 4.133928571428571, 4.137499999999999, 4.141071428571428, 4.144642857142856, 4.148214285714285, 4.1517857142857135, 4.1553571428571425, 4.158928571428571, 4.1625, 4.166071428571429, 4.169642857142857, 4.173214285714286, 4.176785714285714, 4.180357142857143, 4.183928571428571, 4.1875, 4.191071428571428, 4.194642857142857, 4.198214285714285, 4.201785714285714, 4.205357142857142, 4.208928571428571, 4.2124999999999995, 4.2160714285714285, 4.219642857142857, 4.223214285714286, 4.226785714285714, 4.230357142857143, 4.233928571428571, 4.2375, 4.241071428571428, 4.244642857142857, 4.248214285714285, 4.251785714285714, 4.255357142857142, 4.258928571428571, 4.262499999999999, 4.266071428571428, 4.269642857142856, 4.273214285714285, 4.2767857142857135, 4.2803571428571425, 4.283928571428571, 4.2875, 4.291071428571429, 4.294642857142857, 4.298214285714286, 4.301785714285714, 4.305357142857143, 4.308928571428571, 4.3125, 4.316071428571428, 4.319642857142857, 4.323214285714285, 4.326785714285714, 4.330357142857142, 4.333928571428571, 4.3374999999999995, 4.3410714285714285, 4.344642857142857, 4.348214285714286, 4.351785714285714, 4.355357142857143, 4.358928571428571, 4.3625, 4.366071428571428, 4.369642857142857, 4.373214285714285, 4.376785714285714, 4.380357142857142, 4.383928571428571, 4.387499999999999, 4.391071428571428, 4.394642857142856, 4.398214285714285, 4.4017857142857135, 4.4053571428571425, 4.408928571428571, 4.4125, 4.416071428571429, 4.419642857142857, 4.423214285714286, 4.426785714285714, 4.430357142857143, 4.433928571428571, 4.4375, 4.441071428571428, 4.444642857142857, 4.448214285714285, 4.451785714285714, 4.455357142857142, 4.458928571428571, 4.4624999999999995, 4.4660714285714285, 4.469642857142857, 4.473214285714286, 4.476785714285714, 4.480357142857143, 4.483928571428571, 4.4875, 4.491071428571428, 4.494642857142857, 4.498214285714285, 4.501785714285714, 4.505357142857142, 4.508928571428571, 4.512499999999999, 4.516071428571428, 4.519642857142856, 4.523214285714285, 4.5267857142857135, 4.5303571428571425, 4.533928571428571, 4.5375, 4.541071428571428, 4.544642857142857, 4.548214285714286, 4.551785714285714, 4.555357142857143, 4.558928571428571, 4.5625, 4.566071428571428, 4.569642857142857, 4.573214285714285, 4.576785714285714, 4.580357142857142, 4.583928571428571, 4.5874999999999995, 4.5910714285714285, 4.594642857142857, 4.598214285714286, 4.601785714285714, 4.605357142857143, 4.608928571428571, 4.6125, 4.616071428571428, 4.619642857142857, 4.623214285714285, 4.626785714285714, 4.630357142857142, 4.633928571428571, 4.637499999999999, 4.641071428571428, 4.644642857142856, 4.648214285714285, 4.6517857142857135, 4.6553571428571425, 4.658928571428571, 4.6625, 4.666071428571428, 4.669642857142857, 4.673214285714286, 4.676785714285714, 4.680357142857143, 4.683928571428571, 4.6875, 4.691071428571428, 4.694642857142857, 4.698214285714285, 4.701785714285714, 4.705357142857142, 4.708928571428571, 4.7124999999999995, 4.7160714285714285, 4.719642857142857, 4.723214285714286, 4.726785714285714, 4.730357142857143, 4.733928571428571, 4.7375, 4.741071428571428, 4.744642857142857, 4.748214285714285, 4.751785714285714, 4.755357142857142, 4.758928571428571, 4.762499999999999, 4.766071428571428, 4.769642857142856, 4.773214285714285, 4.7767857142857135, 4.7803571428571425, 4.783928571428571, 4.7875, 4.791071428571428, 4.794642857142857, 4.798214285714286, 4.801785714285714, 4.805357142857143, 4.808928571428571, 4.8125, 4.816071428571428, 4.819642857142857, 4.823214285714285, 4.826785714285714, 4.830357142857142, 4.833928571428571, 4.8374999999999995, 4.8410714285714285, 4.844642857142857, 4.848214285714286, 4.851785714285714, 4.855357142857143, 4.858928571428571, 4.8625, 4.866071428571428, 4.869642857142857, 4.873214285714285, 4.876785714285714, 4.880357142857142, 4.883928571428571, 4.887499999999999, 4.891071428571428, 4.894642857142856, 4.898214285714285, 4.9017857142857135, 4.9053571428571425, 4.908928571428571, 4.9125, 4.916071428571428, 4.919642857142857, 4.923214285714286, 4.926785714285714, 4.930357142857143, 4.933928571428571, 4.9375, 4.941071428571428, 4.944642857142857, 4.948214285714285, 4.951785714285714, 4.955357142857142, 4.958928571428571, 4.9624999999999995, 4.9660714285714285, 4.969642857142857, 4.973214285714286, 4.976785714285714, 4.980357142857143, 4.983928571428571, 4.9875, 4.991071428571428, 4.994642857142857, 4.998214285714285]
| [
"[email protected]"
]
| |
c6672d7dd3e2446b3f16cf09954a42762c8fceef | 88b7c57a0d9a7a3b28ebd9d6c12ecbbebc50e8a5 | /config/settings/dev.py | fb7a3dc769830e8f5120adf3b2bf7efddccd22d8 | []
| no_license | largerbigsuper/beep | 71438a4c2feae1afd6ecd25899e95f441bf2165b | a5d84437d79f065cec168f68210c4344a60d08d1 | refs/heads/master | 2022-09-23T02:09:37.117676 | 2020-01-03T06:21:57 | 2020-01-03T06:21:57 | 209,052,138 | 0 | 0 | null | 2022-09-13T23:03:25 | 2019-09-17T12:47:26 | Python | UTF-8 | Python | false | false | 4,456 | py | from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="1ReGaeINNTOIuHNczpQnKUf51jXoc7ZbELmcmgEJM5cun2L31vbVXfrQKPVimrLN",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
# DATABASES = {"default": env.db("DATABASE_URL")}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': env('DB_NAME'),
'USER': env('DB_USER'),
'PASSWORD':env('DB_PASSWORD'),
'HOST': env('DB_HOST'),
'PORT': env('DB_PORT'),
'ATOMIC_REQUESTS': True,
'CONN_MAX_AGE': 10,
'OPTIONS': {
'init_command': 'SET CHARACTER SET utf8mb4',
'charset': 'utf8mb4',
}
}
}
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://redis:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
# Mimicing memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
"IGNORE_EXCEPTIONS": True,
},
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
SESSION_COOKIE_AGE = 365 * 24 * 60 * 60
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env(
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend"
)
# django-debug-toolbar
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
INSTALLED_APPS += ["debug_toolbar"] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
DEBUG_TOOLBAR_CONFIG = {
"DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
"SHOW_TEMPLATE_CONTEXT": True,
}
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
# INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"]
# if env("USE_DOCKER") == "yes":
# import socket
# hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
# INTERNAL_IPS += [ip[:-1] + "1" for ip in ips]
# django-extensions
# ------------------------------------------------------------------------------
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
INSTALLED_APPS += ["django_extensions"] # noqa F405
# Your stuff...
# ------------------------------------------------------------------------------
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = False
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
CSRF_TRUSTED_ORIGINS = ['servicewechat.com', 'lhxq.top', 'beep.lhxq.top', 'test.beepcrypto.com', '127.0.0.1', '127.0.0.1:8080', '127.0.0.1:7788', '192.168.0.102:7788']
# 小程序
MINI_PRAGRAM_APP_ID = 'wx300f2f1d32b30613'
MINI_PRAGRAM_APP_SECRET = '2d6b9fef49827381af8dd26b4b66f5e5'
MINI_PRAGRAM_LOGIN_URL = 'https://api.weixin.qq.com/sns/jscode2session?appid={}&secret={}&grant_type=authorization_code&js_code='.format(MINI_PRAGRAM_APP_ID, MINI_PRAGRAM_APP_SECRET)
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": ['redis://redis:6379/0'],
},
},
}
CELERY_BROKER_URL = 'redis://redis:6379/2' # Broker配置,使用Redis作为消息中间件
CELERY_RESULT_BACKEND = 'redis://redis:6379/2' # BACKEND配置,这里使用redis
CELERY_RESULT_SERIALIZER = 'json' # 结果序列化方案
CELERY_TIMEZONE = "Asia/Shanghai"
CELERY_ENABLE_UTC = False
| [
"[email protected]"
]
| |
62acac9f1ebfd24d7c48ad282623209de9fb6711 | 2626f6e6803c8c4341d01f57228a0fe117e3680b | /students/MikeShand/Lesson 04/json_save/test/test_savables.py | 3121cade9de7f296765a3f5f547a6ab833befd8f | []
| no_license | kmsnyde/SP_Online_Course2_2018 | 9e59362da253cdec558e1c2f39221c174d6216f3 | 7fe8635b47d4792a8575e589797260ad0a2b027e | refs/heads/master | 2020-03-19T17:15:03.945523 | 2018-09-05T22:28:55 | 2018-09-05T22:28:55 | 136,750,231 | 0 | 0 | null | 2018-06-09T19:01:52 | 2018-06-09T19:01:51 | null | UTF-8 | Python | false | false | 2,565 | py | #!/usr/bin/env python
"""
tests for the savable objects
"""
import pytest
import json
from json_save.saveables import *
# The simple, almost json <-> python ones:
# Type, default, example
basics = [(String, "This is a string"),
(Int, 23),
(Float, 3.1458),
(Bool, True),
(Bool, False),
(List, [2, 3, 4]),
(Tuple, (1, 2, 3.4, "this")),
(List, [[1, 2, 3], [4, 5, 6]]),
(List, [{"3": 34}, {"4": 5}]), # list with dicts in it.
(Dict, {"this": {"3": 34}, "that": {"4": 5}}) # dict with dicts
]
@pytest.mark.parametrize(('Type', 'val'), basics)
def test_basics(Type, val):
js = json.dumps(Type.to_json_compat(val))
val2 = Type.to_python(json.loads(js))
assert val == val2
assert type(val) == type(val2)
nested = [(List, [(1, 2), (3, 4), (5, 6)]), # tuple in list
(Tuple, ((1, 2), (3, 4), (5, 6))), # tuple in tuple
]
# This maybe should be fixed in the future??
@pytest.mark.xfail(reason="nested not-standard types not supported")
@pytest.mark.parametrize(('Type', 'val'), nested)
def test_nested(Type, val):
print("original value:", val)
js = json.dumps(Type.to_json_compat(val))
print("js is:", js)
val2 = Type.to_python(json.loads(js))
print("new value is:", val2)
assert val == val2
assert type(val) == type(val2)
dicts = [{"this": 14, "that": 1.23},
{34: 15, 23: 5},
{3.4: "float_key", 1.2: "float_key"},
{(1, 2, 3): "tuple_key"},
{(3, 4, 5): "tuple_int", ("this", "that"): "tuple_str"},
{4: "int_key", 1.23: "float_key", (1, 2, 3): "tuple_key"},
]
@pytest.mark.parametrize('val', dicts)
def test_dicts(val):
js = json.dumps(Dict.to_json_compat(val))
val2 = Dict.to_python(json.loads(js))
assert val == val2
assert type(val) == type(val2)
# check that the types of the keys is the same
for k1, k2 in zip(val.keys(), val2.keys()):
assert type(k1) is type(k2)
# These are dicts that can't be saved
# -- mixing string and non-string keys
bad_dicts = [{"this": "string_key", 4: "int_key"},
{3: "int_key", "this": "string_key"},
{None: "none_key", "this": "string_key"},
{"this": "string_key", None: "none_key"},
]
@pytest.mark.parametrize("val", bad_dicts)
def test_bad_dicts(val):
with pytest.raises(TypeError):
Dict.to_json_compat(val)
| [
"[email protected]"
]
| |
591897413cfedd971e1f5fd6e91cc59b6dcc9383 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/cast/helpers.py | c6a92c21fb462790c9a937b728dc7784cd8ac5ec | [
"Apache-2.0"
]
| permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 12,912 | py | """Helpers to deal with Cast devices."""
from __future__ import annotations
import asyncio
import configparser
from dataclasses import dataclass
import logging
from typing import TYPE_CHECKING
from urllib.parse import urlparse
import aiohttp
import attr
import pychromecast
from pychromecast import dial
from pychromecast.const import CAST_TYPE_GROUP
from pychromecast.models import CastInfo
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
from .const import DOMAIN
if TYPE_CHECKING:
from homeassistant.components import zeroconf
_LOGGER = logging.getLogger(__name__)
_PLS_SECTION_PLAYLIST = "playlist"
@attr.s(slots=True, frozen=True)
class ChromecastInfo:
"""Class to hold all data about a chromecast for creating connections.
This also has the same attributes as the mDNS fields by zeroconf.
"""
cast_info: CastInfo = attr.ib()
is_dynamic_group = attr.ib(type=bool | None, default=None)
@property
def friendly_name(self) -> str:
"""Return the Friendly Name."""
return self.cast_info.friendly_name
@property
def is_audio_group(self) -> bool:
"""Return if the cast is an audio group."""
return self.cast_info.cast_type == CAST_TYPE_GROUP
@property
def uuid(self) -> bool:
"""Return the UUID."""
return self.cast_info.uuid
def fill_out_missing_chromecast_info(self, hass: HomeAssistant) -> ChromecastInfo:
"""Return a new ChromecastInfo object with missing attributes filled in.
Uses blocking HTTP / HTTPS.
"""
cast_info = self.cast_info
if self.cast_info.cast_type is None or self.cast_info.manufacturer is None:
unknown_models = hass.data[DOMAIN]["unknown_models"]
if self.cast_info.model_name not in unknown_models:
# Manufacturer and cast type is not available in mDNS data,
# get it over HTTP
cast_info = dial.get_cast_type(
cast_info,
zconf=ChromeCastZeroconf.get_zeroconf(),
)
unknown_models[self.cast_info.model_name] = (
cast_info.cast_type,
cast_info.manufacturer,
)
report_issue = (
"create a bug report at "
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue"
"+label%3A%22integration%3A+cast%22"
)
_LOGGER.info(
(
"Fetched cast details for unknown model '%s' manufacturer:"
" '%s', type: '%s'. Please %s"
),
cast_info.model_name,
cast_info.manufacturer,
cast_info.cast_type,
report_issue,
)
else:
cast_type, manufacturer = unknown_models[self.cast_info.model_name]
cast_info = CastInfo(
cast_info.services,
cast_info.uuid,
cast_info.model_name,
cast_info.friendly_name,
cast_info.host,
cast_info.port,
cast_type,
manufacturer,
)
if not self.is_audio_group or self.is_dynamic_group is not None:
# We have all information, no need to check HTTP API.
return ChromecastInfo(cast_info=cast_info)
# Fill out missing group information via HTTP API.
is_dynamic_group = False
http_group_status = None
http_group_status = dial.get_multizone_status(
None,
services=self.cast_info.services,
zconf=ChromeCastZeroconf.get_zeroconf(),
)
if http_group_status is not None:
is_dynamic_group = any(
g.uuid == self.cast_info.uuid for g in http_group_status.dynamic_groups
)
return ChromecastInfo(
cast_info=cast_info,
is_dynamic_group=is_dynamic_group,
)
class ChromeCastZeroconf:
"""Class to hold a zeroconf instance."""
__zconf: zeroconf.HaZeroconf | None = None
@classmethod
def set_zeroconf(cls, zconf: zeroconf.HaZeroconf) -> None:
"""Set zeroconf."""
cls.__zconf = zconf
@classmethod
def get_zeroconf(cls) -> zeroconf.HaZeroconf | None:
"""Get zeroconf."""
return cls.__zconf
class CastStatusListener(
pychromecast.controllers.media.MediaStatusListener,
pychromecast.controllers.multizone.MultiZoneManagerListener,
pychromecast.controllers.receiver.CastStatusListener,
pychromecast.socket_client.ConnectionStatusListener,
):
"""Helper class to handle pychromecast status callbacks.
Necessary because a CastDevice entity or dynamic group can create a new
socket client and therefore callbacks from multiple chromecast connections can
potentially arrive. This class allows invalidating past chromecast objects.
"""
def __init__(self, cast_device, chromecast, mz_mgr, mz_only=False):
"""Initialize the status listener."""
self._cast_device = cast_device
self._uuid = chromecast.uuid
self._valid = True
self._mz_mgr = mz_mgr
if cast_device._cast_info.is_audio_group:
self._mz_mgr.add_multizone(chromecast)
if mz_only:
return
chromecast.register_status_listener(self)
chromecast.socket_client.media_controller.register_status_listener(self)
chromecast.register_connection_listener(self)
if not cast_device._cast_info.is_audio_group:
self._mz_mgr.register_listener(chromecast.uuid, self)
def new_cast_status(self, status):
"""Handle reception of a new CastStatus."""
if self._valid:
self._cast_device.new_cast_status(status)
def new_media_status(self, status):
"""Handle reception of a new MediaStatus."""
if self._valid:
self._cast_device.new_media_status(status)
def load_media_failed(self, item, error_code):
"""Handle reception of a new MediaStatus."""
if self._valid:
self._cast_device.load_media_failed(item, error_code)
def new_connection_status(self, status):
"""Handle reception of a new ConnectionStatus."""
if self._valid:
self._cast_device.new_connection_status(status)
def added_to_multizone(self, group_uuid):
"""Handle the cast added to a group."""
def removed_from_multizone(self, group_uuid):
"""Handle the cast removed from a group."""
if self._valid:
self._cast_device.multizone_new_media_status(group_uuid, None)
def multizone_new_cast_status(self, group_uuid, cast_status):
"""Handle reception of a new CastStatus for a group."""
def multizone_new_media_status(self, group_uuid, media_status):
"""Handle reception of a new MediaStatus for a group."""
if self._valid:
self._cast_device.multizone_new_media_status(group_uuid, media_status)
def invalidate(self):
"""Invalidate this status listener.
All following callbacks won't be forwarded.
"""
# pylint: disable=protected-access
if self._cast_device._cast_info.is_audio_group:
self._mz_mgr.remove_multizone(self._uuid)
else:
self._mz_mgr.deregister_listener(self._uuid, self)
self._valid = False
class PlaylistError(Exception):
"""Exception wrapper for pls and m3u helpers."""
class PlaylistSupported(PlaylistError):
"""The playlist is supported by cast devices and should not be parsed."""
@dataclass
class PlaylistItem:
"""Playlist item."""
length: str | None
title: str | None
url: str
def _is_url(url):
"""Validate the URL can be parsed and at least has scheme + netloc."""
result = urlparse(url)
return all([result.scheme, result.netloc])
async def _fetch_playlist(hass, url, supported_content_types):
"""Fetch a playlist from the given url."""
try:
session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False)
async with session.get(url, timeout=5) as resp:
charset = resp.charset or "utf-8"
if resp.content_type in supported_content_types:
raise PlaylistSupported
try:
playlist_data = (await resp.content.read(64 * 1024)).decode(charset)
except ValueError as err:
raise PlaylistError(f"Could not decode playlist {url}") from err
except asyncio.TimeoutError as err:
raise PlaylistError(f"Timeout while fetching playlist {url}") from err
except aiohttp.client_exceptions.ClientError as err:
raise PlaylistError(f"Error while fetching playlist {url}") from err
return playlist_data
async def parse_m3u(hass, url):
"""Very simple m3u parser.
Based on https://github.com/dvndrsn/M3uParser/blob/master/m3uparser.py
"""
# From Mozilla gecko source: https://github.com/mozilla/gecko-dev/blob/c4c1adbae87bf2d128c39832d72498550ee1b4b8/dom/media/DecoderTraits.cpp#L47-L52
hls_content_types = (
# https://tools.ietf.org/html/draft-pantos-http-live-streaming-19#section-10
"application/vnd.apple.mpegurl",
# Additional informal types used by Mozilla gecko not included as they
# don't reliably indicate HLS streams
)
m3u_data = await _fetch_playlist(hass, url, hls_content_types)
m3u_lines = m3u_data.splitlines()
playlist = []
length = None
title = None
for line in m3u_lines:
line = line.strip()
if line.startswith("#EXTINF:"):
# Get length and title from #EXTINF line
info = line.split("#EXTINF:")[1].split(",", 1)
if len(info) != 2:
_LOGGER.warning("Ignoring invalid extinf %s in playlist %s", line, url)
continue
length = info[0].split(" ", 1)
title = info[1].strip()
elif line.startswith("#EXT-X-VERSION:"):
# HLS stream, supported by cast devices
raise PlaylistSupported("HLS")
elif line.startswith("#EXT-X-STREAM-INF:"):
# HLS stream, supported by cast devices
raise PlaylistSupported("HLS")
elif line.startswith("#"):
# Ignore other extensions
continue
elif len(line) != 0:
# Get song path from all other, non-blank lines
if not _is_url(line):
raise PlaylistError(f"Invalid item {line} in playlist {url}")
playlist.append(PlaylistItem(length=length, title=title, url=line))
# reset the song variables so it doesn't use the same EXTINF more than once
length = None
title = None
return playlist
async def parse_pls(hass, url):
"""Very simple pls parser.
Based on https://github.com/mariob/plsparser/blob/master/src/plsparser.py
"""
pls_data = await _fetch_playlist(hass, url, ())
pls_parser = configparser.ConfigParser()
try:
pls_parser.read_string(pls_data, url)
except configparser.Error as err:
raise PlaylistError(f"Can't parse playlist {url}") from err
if (
_PLS_SECTION_PLAYLIST not in pls_parser
or pls_parser[_PLS_SECTION_PLAYLIST].getint("Version") != 2
):
raise PlaylistError(f"Invalid playlist {url}")
try:
num_entries = pls_parser.getint(_PLS_SECTION_PLAYLIST, "NumberOfEntries")
except (configparser.NoOptionError, ValueError) as err:
raise PlaylistError(f"Invalid NumberOfEntries in playlist {url}") from err
playlist_section = pls_parser[_PLS_SECTION_PLAYLIST]
playlist = []
for entry in range(1, num_entries + 1):
file_option = f"File{entry}"
if file_option not in playlist_section:
_LOGGER.warning("Missing %s in pls from %s", file_option, url)
continue
item_url = playlist_section[file_option]
if not _is_url(item_url):
raise PlaylistError(f"Invalid item {item_url} in playlist {url}")
playlist.append(
PlaylistItem(
length=playlist_section.get(f"Length{entry}"),
title=playlist_section.get(f"Title{entry}"),
url=item_url,
)
)
return playlist
async def parse_playlist(hass, url):
"""Parse an m3u or pls playlist."""
if url.endswith(".m3u") or url.endswith(".m3u8"):
playlist = await parse_m3u(hass, url)
else:
playlist = await parse_pls(hass, url)
if not playlist:
raise PlaylistError(f"Empty playlist {url}")
return playlist
| [
"[email protected]"
]
| |
2d670cf46ab518d12618a5c7cd214f15721b1946 | 1afec7d1d3099138b5afe5fd73dfd3d24ff4eb15 | /test/functional/feature_minchainwork.py | bf9177d0a7c94fd1dfa263477971b818dbe15ed0 | [
"MIT"
]
| permissive | republic-productions/finalcoin | 5c7c6b0734178fe22db63f0946ec555f59e8d0eb | 7c0f335ded1e5c662034c822ca2c474b8e62778f | refs/heads/main | 2023-09-04T17:04:32.683667 | 2021-10-14T17:45:22 | 2021-10-14T17:45:22 | 417,209,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,109 | py | #!/usr/bin/env python3
# Copyright (c) 2017-2020 The Finalcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMinimumChainWork on command line.
Nodes don't consider themselves out of "initial block download" until
their active chain has more work than nMinimumChainWork.
Nodes don't download blocks from a peer unless the peer's best known block
has more work than nMinimumChainWork.
While in initial block download, nodes won't relay blocks to their peers, so
test that this parameter functions as intended by verifying that block relay
only succeeds past a given node once its nMinimumChainWork has been exceeded.
"""
import time
from test_framework.test_framework import FinalcoinTestFramework
from test_framework.util import assert_equal
# 2 hashes required per regtest block (with no difficulty adjustment)
REGTEST_WORK_PER_BLOCK = 2
class MinimumChainWorkTest(FinalcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]]
self.node_min_work = [0, 101, 101]
def setup_network(self):
# This test relies on the chain setup being:
# node0 <- node1 <- node2
# Before leaving IBD, nodes prefer to download blocks from outbound
# peers, so ensure that we're mining on an outbound peer and testing
# block relay to inbound peers.
self.setup_nodes()
for i in range(self.num_nodes-1):
self.connect_nodes(i+1, i)
def run_test(self):
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
# minchainwork is exceeded
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
self.log.info(f"Testing relay across node 1 (minChainWork = {self.node_min_work[1]})")
starting_blockcount = self.nodes[2].getblockcount()
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
self.log.info(f"Generating {num_blocks_to_generate} blocks on node0")
hashes = self.generatetoaddress(self.nodes[0], num_blocks_to_generate,
self.nodes[0].get_deterministic_priv_key().address)
self.log.info(f"Node0 current chain work: {self.nodes[0].getblockheader(hashes[-1])['chainwork']}")
# Sleep a few seconds and verify that node2 didn't get any new blocks
# or headers. We sleep, rather than sync_blocks(node0, node1) because
# it's reasonable either way for node1 to get the blocks, or not get
# them (since they're below node1's minchainwork).
time.sleep(3)
self.log.info("Verifying node 2 has no more blocks than before")
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
# Node2 shouldn't have any new headers yet, because node1 should not
# have relayed anything.
assert_equal(len(self.nodes[2].getchaintips()), 1)
assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash()
assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
self.log.info("Generating one more block")
self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Verifying nodes are all synced")
# Because nodes in regtest are all manual connections (eg using
# addnode), node1 should not have disconnected node0. If not for that,
# we'd expect node1 to have disconnected node0 for serving an
# insufficient work chain, in which case we'd need to reconnect them to
# continue the test.
self.sync_all()
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
if __name__ == '__main__':
MinimumChainWorkTest().main()
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.